hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
ce1f5da27eae9110975f89eccf71a878f1e04dd6
300
py
Python
kaybee/plugins/resources/resource.py
pauleveritt/kaybee
a00a718aaaa23b2d12db30dfacb6b2b6ec84459c
[ "Apache-2.0" ]
2
2017-11-08T19:55:57.000Z
2018-12-21T12:41:41.000Z
kaybee/plugins/resources/resource.py
pauleveritt/kaybee
a00a718aaaa23b2d12db30dfacb6b2b6ec84459c
[ "Apache-2.0" ]
null
null
null
kaybee/plugins/resources/resource.py
pauleveritt/kaybee
a00a718aaaa23b2d12db30dfacb6b2b6ec84459c
[ "Apache-2.0" ]
1
2018-10-13T08:59:29.000Z
2018-10-13T08:59:29.000Z
""" An out-of-the-box resource type. This can be used in RST with something like:: .. resource:: template: sometemplate.html """ from kaybee.app import kb from kaybee.plugins.resources.base_resource import BaseResource @kb.resource('resource') class Resource(BaseResource): pass
15
63
0.723333
40
300
5.4
0.75
0.092593
0
0
0
0
0
0
0
0
0
0
0.173333
300
19
64
15.789474
0.870968
0.436667
0
0
0
0
0.050314
0
0
0
0
0
0
1
0
true
0.2
0.4
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
cbf1eec94cbcb3b85b8c5b94e2f045bdd386473f
177
py
Python
cvmchain/crypto/__init__.py
dakk/cvmchain
2e3721ead9a018f7236d26f2206b29823d489168
[ "MIT" ]
null
null
null
cvmchain/crypto/__init__.py
dakk/cvmchain
2e3721ead9a018f7236d26f2206b29823d489168
[ "MIT" ]
3
2016-12-30T09:59:05.000Z
2017-01-02T17:11:01.000Z
cvmchain/crypto/__init__.py
dakk/cvmchain
2e3721ead9a018f7236d26f2206b29823d489168
[ "MIT" ]
null
null
null
# Copyright (c) 2016-2017 Davide Gessa # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php.
35.4
69
0.779661
26
177
5.307692
0.884615
0
0
0
0
0
0
0
0
0
0
0.051613
0.124294
177
4
70
44.25
0.83871
0.954802
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
022f84e1024b8b7af546e0c701c8675ab6ea4a68
51
py
Python
cornflow-server/cornflow/external_app/schemas/__init__.py
ggsdc/corn
4c17c46a70f95b8882bcb6a55ef7daa1f69e0456
[ "MIT" ]
2
2020-07-09T20:58:47.000Z
2020-07-20T20:40:46.000Z
cornflow-server/cornflow/external_app/schemas/__init__.py
baobabsoluciones/cornflow
bd7cae22107e5fe148704d5f41d4f58f9c410b40
[ "Apache-2.0" ]
2
2022-03-31T08:42:10.000Z
2022-03-31T12:05:23.000Z
cornflow-server/cornflow/external_app/schemas/__init__.py
ggsdc/corn
4c17c46a70f95b8882bcb6a55ef7daa1f69e0456
[ "MIT" ]
null
null
null
""" Initialization file for the schemas module """
12.75
42
0.72549
6
51
6.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.156863
51
3
43
17
0.860465
0.823529
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
02302c5046da6a53b5c2af55428d6a99543a2a67
128,385
py
Python
spacy/lang/tr/morph_rules.py
forgetso/spaCy
28256522c8a6311e6a20d66927ef2bd230755464
[ "BSD-3-Clause", "MIT" ]
10
2021-05-31T07:18:08.000Z
2022-03-19T09:20:11.000Z
spacy/lang/tr/morph_rules.py
forgetso/spaCy
28256522c8a6311e6a20d66927ef2bd230755464
[ "BSD-3-Clause", "MIT" ]
1
2021-08-03T12:23:01.000Z
2021-08-10T08:35:22.000Z
spacy/lang/tr/morph_rules.py
forgetso/spaCy
28256522c8a6311e6a20d66927ef2bd230755464
[ "BSD-3-Clause", "MIT" ]
2
2021-12-09T07:23:21.000Z
2022-03-31T06:13:10.000Z
# coding: utf8 from __future__ import unicode_literals from ...symbols import LEMMA, PRON_LEMMA _adverbs = [ "apansızın", "aslen", "aynen", "ayrıyeten", "basbayağı", "başaşağı", "belki", "çatkapı", "demin", "derhal", "doyasıya", "düpedüz", "ebediyen", "elbet", "elbette", "enikonu", "epey", "epeyce", "epeydir", "esasen", "evvela", "galiba", "gayet", "genellikle", "gerçekten", "gerisingeri", "giderayak", "gitgide", "gıyaben", "gözgöze", "güçbela", "gündüzleyin", "güya", "habire", "hakikaten", "hakkaten", "halen", "halihazırda", "harfiyen", "haricen", "hasbelkader", "hemen", "henüz", "hep", "hepten", "herhalde", "hiç", "hükmen", "ihtiyaten", "illaki", "ismen", "iştiraken", "izafeten", "kalben", "kargatulumba", "kasten", "katiyen", "katiyyen", "kazara", "kefaleten", "kendiliğinden", "kerhen", "kesinkes", "kesinlikle", "keşke", "kimileyin", "külliyen", "layıkıyla", "maalesef", "mahsusçuktan", "masumane", "malulen", "mealen", "mecazen", "mecburen", "muhakkak", "muhtemelen", "mutlaka", "müstacelen", "müştereken", "müteakiben", "naçizane", "nadiren", "nakden", "naklen", "nazikane", "nerdeyse", "neredeyse", "nispeten", "nöbetleşe", "olabildiğince", "olduğunca", "ortaklaşa", "otomatikman", "öğlenleyin", "öğleyin", "öldüresiye", "ölesiye", "örfen", "öyle", "öylesine", "özellikle", "peşinen", "peşpeşe", "peyderpey", "ruhen", "sadece", "sahi", "sahiden", "salt", "salimen", "sanırım", "sanki", "sehven", "senlibenli", "sereserpe", "sırf", "sözgelimi", "sözgelişi", "şahsen", "şakacıktan", "şeklen", "şıppadak", "şimdilik", "şipşak", "tahminen", "takdiren", "takiben", "tamamen", "tamamiyle", "tedbiren", "temsilen", "tepetaklak", "tercihen", "tesadüfen", "tevekkeli", "tezelden", "tıbben", "tıkabasa", "tıpatıp", "toptan", "tümüyle", "uluorta", "usulcacık", "usulen", "üstünkörü", "vekaleten", "vicdanen", "yalancıktan", "yavaşçacık", "yekten", "yeniden", "yeterince", "yine", "yüzükoyun", "yüzüstü", "yüzyüze", "zaten", "zımmen", "zihnen", "zilzurna" ] _postpositions = [ "geçe", "gibi", "göre", "ilişkin", "kadar", "kala", "karşın", "nazaran" "rağmen", "üzere" ] _subordinating_conjunctions = [ "eğer", "madem", "mademki", "şayet" ] _coordinating_conjunctions = [ "ama", "hem", "fakat", "ila", "lakin", "ve", "veya", "veyahut" ] MORPH_RULES = { "ADP": {word: {"POS": "ADP"} for word in _postpositions}, "ADV": {word: {"POS": "ADV"} for word in _adverbs}, "SCONJ": {word: {"POS": "SCONJ"} for word in _subordinating_conjunctions}, "CCONJ": {word: {"POS": "CCONJ"} for word in _coordinating_conjunctions}, "PRON": { "bana": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Dat" }, "benden": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Abl" }, "bende": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Loc" }, "beni": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Acc" }, "benle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Ins" }, "ben": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Nom" }, "benim": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Gen" }, "benimle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Sing", "Case": "Ins" }, "sana": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Dat" }, "senden": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Abl" }, "sende": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Loc" }, "seni": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Acc" }, "senle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Ins" }, "sen": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Nom" }, "senin": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Gen" }, "seninle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Sing", "Case": "Ins" }, "ona": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Dat" }, "ondan": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Abl" }, "onda": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "", "Number": "", "Case": "Loc" }, "onu": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Acc" }, "onla": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Ins" }, "o": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Nom" }, "onun": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Gen" }, "onunla": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Sing", "Case": "Ins" }, "bize": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Dat" }, "bizden": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Abl" }, "bizde": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Loc" }, "bizi": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Acc" }, "bizle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Ins" }, "biz": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Nom" }, "bizim": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Gen" }, "bizimle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "One", "Number": "Plur", "Case": "Ins" }, "size": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Dat" }, "sizden": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Abl" }, "sizde": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Loc" }, "sizi": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Acc" }, "sizle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Ins" }, "siz": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Nom" }, "sizin": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Gen" }, "sizinle": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "PronType": "Prs", "Person": "Two", "Number": "Plur", "Case": "Ins" }, "onlara": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Dat" }, "onlardan": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Abl" }, "onlarda": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Loc" }, "onları": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Acc" }, "onlarla": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Ins" }, "onlar": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Nom" }, "onların": { "LEMMA": "PRON_LEMMA", "POS": "PRON", "Person": "Three", "Number": "Plur", "Case": "Gen" }, "buna": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Dat" }, "bundan": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Abl" }, "bunda": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Loc" }, "bunu": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Acc" }, "bunla": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "bu": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Nom" }, "bunun": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Gen" }, "bununla": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "şuna": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Dat" }, "şundan": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Abl" }, "şunda": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Loc" }, "şunu": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Acc" }, "şunla": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "şu": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Nom" }, "şunun": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Gen" }, "şununla": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "bunlara": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Dat" }, "bunlardan": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Abl" }, "bunlarda": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Loc" }, "bunları": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Acc" }, "bunlarla": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Ins" }, "bunlar": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Nom" }, "bunların": { "LEMMA": "bu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Gen" }, "şunlara": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Dat" }, "şunlardan": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Abl" }, "şunlarda": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Loc" }, "şunları": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Acc" }, "şunlarla": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Ins" }, "şunlar": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Nom" }, "şunların": { "LEMMA": "şu", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Gen" }, "buraya": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Dat" }, "buradan": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Abl" }, "burada": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "loc.sg" }, "burayı": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Acc" }, "burayla": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "bura": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Nom" }, "buranın": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Gen" }, "şuraya": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Dat" }, "şuradan": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Abl" }, "şurada": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "loc.sg" }, "şurayı": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Acc" }, "şurayla": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "şura": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Nom" }, "şuranın": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Gen" }, "oraya": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Dat" }, "oradan": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Abl" }, "orada": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "loc.sg" }, "orayı": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Acc" }, "orayla": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Ins" }, "ora": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Nom" }, "oranın": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Sing", "Case": "Gen" }, "buralarına": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Dat" }, "buralarından": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Abl" }, "buralarında": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Loc" }, "buralarını": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Acc" }, "buralarıyla": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Ins" }, "buraları": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Nom" }, "buralarının": { "LEMMA": "bura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Gen" }, "şuralarına": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Dat" }, "şuralarından": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Abl" }, "şuralarında": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Loc" }, "şuralarını": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Acc" }, "şuralarıyla": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Ins" }, "şuraları": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Nom" }, "şuralarının": { "LEMMA": "şura", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Gen" }, "oralarına": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Dat" }, "oralarından": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Abl" }, "oralarında": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Loc" }, "oralarını": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Acc" }, "oralarıyla": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Ins" }, "oraları": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Nom" }, "oralarının": { "LEMMA": "ora", "POS": "PRON", "PronType": "Dem", "Number": "Plur", "Case": "Gen" }, "kendime": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Dat", "Number": "Sing" }, "kendimden": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Abl", "Number": "Sing" }, "kendimde": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Loc", "Number": "Sing" }, "kendimi": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Acc", "Number": "Sing" }, "kendimle": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Ins", "Number": "Sing" }, "kendim": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Nom", "Number": "Sing" }, "kendimin": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Gen", "Number": "Sing" }, "kendine": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Dat", "Number": "Sing" }, "kendinden": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Abl", "Number": "Sing" }, "kendinde": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Loc", "Number": "Sing" }, "kendini": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Acc", "Number": "Sing" }, "kendiyle": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Ins", "Number": "Sing" }, "kendi": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Nom", "Number": "Sing" }, "kendinin": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Gen", "Number": "Sing" }, "kendisine": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Dat", "Number": "Sing" }, "kendisinden": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Abl", "Number": "Sing" }, "kendisinde": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Loc", "Number": "Sing" }, "kendisini": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Acc", "Number": "Sing" }, "kendisiyle": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Ins", "Number": "Sing" }, "kendisi": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Nom", "Number": "Sing" }, "kendisinin": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Gen", "Number": "Sing" }, "kendimize": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Dat", "Number": "Sing" }, "kendimizden": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Abl", "Number": "Sing" }, "kendimizde": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Loc", "Number": "Sing" }, "kendimizi": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Acc", "Number": "Sing" }, "kendimizle": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Ins", "Number": "Sing" }, "kendimiz": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Nom", "Number": "Sing" }, "kendimizin": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "One", "Case": "Gen", "Number": "Sing" }, "kendinize": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Dat", "Number": "Sing" }, "kendinizden": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Abl", "Number": "Sing" }, "kendinizde": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Loc", "Number": "Sing" }, "kendinizi": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Acc", "Number": "Sing" }, "kendinizle": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Ins", "Number": "Sing" }, "kendiniz": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Nom", "Number": "Sing" }, "kendinizin": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Two", "Case": "Gen", "Number": "Sing" }, "kendilerine": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Dat", "Number": "Sing" }, "kendilerinden": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Abl", "Number": "Sing" }, "kendilerinde": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Loc", "Number": "Sing" }, "kendilerini": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Acc", "Number": "Sing" }, "kendileriyle": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Ins", "Number": "Sing" }, "kendileriyken": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Nom", "Number": "Sing" }, "kendilerinin": { "LEMMA": "kendi", "POS": "PRON", "PronType": "Prs", "Reflex": "Yes", "Person": "Three", "Case": "Gen", "Number": "Sing" }, "hangilerine": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "hangilerinden": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "hangilerinde": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "hangilerini": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "hangileriyle": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "hangileri": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "hangilerinin": { "LEMMA": "hangileri", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "hangisine": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "hangisinden": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "hangisinde": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "hangisini": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "hangisiyle": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "hangisi": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "hangisinin": { "LEMMA": "hangi", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "kime": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "kimden": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "kimde": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "kimi": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "kimle": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "kim": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "kimin": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "kimlere": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Plur" }, "kimlerden": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Plur" }, "kimlerde": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Plur" }, "kimleri": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Plur" }, "kimlerle": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Plur" }, "kimler": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Plur" }, "kimlerin": { "LEMMA": "kim", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Plur" }, "neye": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "neden": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "nede": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "neyi": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "neyle": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "ne": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "neyin": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "nelere": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Plur" }, "nelerden": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Plur" }, "nelerde": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Plur" }, "neleri": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Plur" }, "nelerle": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Plur" }, "neler": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Plur" }, "nelerin": { "LEMMA": "ne", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Plur" }, "nereye": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "nereden": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "nerede": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "nereyi": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "nereyle": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "nere": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "nerenin": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "nerelere": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Plur" }, "nerelerden": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Plur" }, "nerelerde": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Plur" }, "nereleri": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Plur" }, "nerelerle": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Plur" }, "nereler": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Plur" }, "nerelerin": { "LEMMA": "nere", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Plur" }, "kaçlarına": { "LEMMA": "kaçları", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "kaçlarından": { "LEMMA": "kaçları", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "kaçlarında": { "LEMMA": "kaçları", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "kaçlarını": { "LEMMA": "kaçları", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "kaçlarıyla": { "LEMMA": "kaçları", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "kaçları": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "kaçlarının": { "LEMMA": "kaçları", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "kaçına": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Dat", "Number": "Sing" }, "kaçından": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Abl", "Number": "Sing" }, "kaçında": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Loc", "Number": "Sing" }, "kaçını": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Acc", "Number": "Sing" }, "kaçıyla": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Ins", "Number": "Sing" }, "kaçı": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Nom", "Number": "Sing" }, "kaçının": { "LEMMA": "kaçı", "POS": "PRON", "PronType": "Int", "Case": "Gen", "Number": "Sing" }, "başkasına": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "başkasından": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "başkasında": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "başkasını": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "başkasıyla": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "başkası": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "başkasının": { "LEMMA": "başkası", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "başkalarına": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "başkalarından": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "başkalarında": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "başkalarını": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "başkalarıyla": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "başkaları": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "başkalarının": { "LEMMA": "başkaları", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "bazısına": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "bazısından": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "bazısında": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "bazısını": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "bazısıyla": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "bazısı": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "bazısının": { "LEMMA": "bazısı", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "bazılarına": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "bazılarından": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "bazılarında": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "bazılarını": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "bazılarıyla": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "bazıları": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "bazılarının": { "LEMMA": "bazıları", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "birbirine": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Dat", "Number": "Sing" }, "birbirinden": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Abl", "Number": "Sing" }, "birbirinde": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Loc", "Number": "Sing" }, "birbirini": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Acc", "Number": "Sing" }, "birbiriyle": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Ins", "Number": "Sing" }, "birbiri": { "LEMMA": "birbiri", "POS": "PRON", "PronType": "Rcp", "Case": "Nom", "Number": "Sing" }, "birbirinin": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Gen", "Number": "Sing" }, "birbirlerine": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Dat", "Number": "Sing" }, "birbirlerinden": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Abl", "Number": "Sing" }, "birbirlerinde": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Loc", "Number": "Sing" }, "birbirlerini": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Acc", "Number": "Sing" }, "birbirleriyle": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Ins", "Number": "Sing" }, "birbirleri": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Nom", "Number": "Sing" }, "birbirlerinin": { "LEMMA": "birbir", "POS": "PRON", "PronType": "Rcp", "Case": "Gen", "Number": "Sing" }, "birçoğuna": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "birçoğundan": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "birçoğunda": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "birçoğunu": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "birçoğuyla": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "birçoğu": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "birçoğunun": { "LEMMA": "birçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "birçoklarına": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "birçoklarından": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "birçoklarında": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "birçoklarını": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "birçoklarıyla": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "birçokları": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "birçoklarının": { "LEMMA": "birçokları", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "birilerine": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "birilerinden": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "birilerinde": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "birilerini": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "birileriyle": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "birileri": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "birilerinin": { "LEMMA": "birileri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "birisine": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "birisinden": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "birisinde": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "birisini": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "birisiyle": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "birisi": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "birisinin": { "LEMMA": "biri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "birkaçına": { "LEMMA": "birkaçı", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "birkaçından": { "LEMMA": "birkaçı", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "birkaçında": { "LEMMA": "birkaçı", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "birkaçını": { "LEMMA": "birkaçı", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "birkaçıyla": { "LEMMA": "birkaçı", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "birkaçı": { "LEMMA": "birkaç", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "birkaçının": { "LEMMA": "birkaçı", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "birtakımına": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "birtakımından": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "birtakımında": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "birtakımını": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "birtakımıyla": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "birtakımı": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "birtakımının": { "LEMMA": "birtakımı", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "böylesine": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "böylesinden": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "böylesinde": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "böylesini": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "böylesiyle": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "böylesi": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "böylesinin": { "LEMMA": "böylesi", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "şöylesine": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "şöylesinden": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "şöylesinde": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "şöylesini": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "şöylesiyle": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "şöylesi": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "şöylesinin": { "LEMMA": "şöylesi", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "öylesine": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "öylesinden": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "öylesinde": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "öylesini": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "öylesiyle": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "öylesi": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "öylesinin": { "LEMMA": "öylesi", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "böylelerine": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "böylelerinden": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "böylelerinde": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "böylelerini": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "böyleleriyle": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "böyleleri": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "böylelerinin": { "LEMMA": "böyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "şöylelerine": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "şöylelerinden": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "şöylelerinde": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "şöylelerini": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "şöyleleriyle": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "şöyleleri": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "şöylelerinin": { "LEMMA": "şöyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "öylelerine": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "öylelerinden": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "öylelerinde": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "öylelerini": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "öyleleriyle": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "öyleleri": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "öylelerinin": { "LEMMA": "öyleleri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "çoklarına": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "çoklarından": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "çoklarında": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "çoklarını": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "çoklarıyla": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "çokları": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "çoklarının": { "LEMMA": "çokları", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "çoğuna": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "çoğundan": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "çoğunda": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "çoğunu": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "çoğuyla": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "çoğu": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "çoğunun": { "LEMMA": "çoğu", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "diğerine": { "LEMMA": "diğeri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "diğerinden": { "LEMMA": "diğeri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "diğerinde": { "LEMMA": "diğeri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "diğerini": { "LEMMA": "diğeri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "diğeriyle": { "LEMMA": "diğeri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "diğeri": { "LEMMA": "diğer", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "diğerinin": { "LEMMA": "diğeri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "diğerlerine": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "diğerlerinden": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "diğerlerinde": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "diğerlerini": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "diğerleriyle": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "diğerleri": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "diğerlerinin": { "LEMMA": "diğerleri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "hepinize": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "hepinizden": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "hepinizde": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "hepinizi": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "hepinizle": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "hepiniz": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "hepinizin": { "LEMMA": "hepiniz", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "hepimize": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "hepimizden": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "hepimizde": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "hepimizi": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "hepimizle": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "hepimiz": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "hepimizin": { "LEMMA": "hepimiz", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "hepsine": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "hepsinden": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "hepsinde": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "hepsini": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "hepsiyle": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "hepsi": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "hepsinin": { "LEMMA": "hepsi", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "herbirine": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "herbirinden": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "herbirinde": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "herbirini": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "herbiriyle": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "herbiri": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "herbirinin": { "LEMMA": "herbiri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "herbirlerine": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "herbirlerinden": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "herbirlerinde": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "herbirlerini": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "herbirleriyle": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "herbirleri": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "herbirlerinin": { "LEMMA": "herbirleri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "herhangisine": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "herhangisinden": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "herhangisinde": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "herhangisini": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "herhangisiyle": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "herhangisi": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "herhangisinin": { "LEMMA": "herhangisi", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "herhangilerine": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "herhangilerinden": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "herhangilerinde": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "herhangilerini": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "herhangileriyle": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "herhangileri": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "herhangilerinin": { "LEMMA": "herhangileri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "herkese": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "herkesten": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "herkeste": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "herkesi": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "herkesle": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "herkes": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "herkesin": { "LEMMA": "herkes", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "hiçbirisine": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "hiçbirisinden": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "hiçbirisinde": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "hiçbirisini": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "hiçbirisiyle": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "hiçbirisi": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "hiçbirisinin": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "hiçbirine": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "hiçbirinden": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "hiçbirinde": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "hiçbirini": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "hiçbiriyle": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "hiçbiri": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "hiçbirinin": { "LEMMA": "hiçbiri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "kimisine": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "kimisinden": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "kimisinde": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "kimisini": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "kimisiyle": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "kimisi": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "kimisinin": { "LEMMA": "kimi", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "kimilerine": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "kimilerinden": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "kimilerinde": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "kimilerini": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "kimileriyle": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "kimileri": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "kimilerinin": { "LEMMA": "kimileri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "kimseye": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "kimseden": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "kimsede": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "kimseyi": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "kimseyle": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "kimse": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "kimsenin": { "LEMMA": "kimse", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "öbürüne": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "öbüründen": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "öbüründe": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "öbürünü": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "öbürüyle": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "öbürü": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "öbürünün": { "LEMMA": "öbürü", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "öbürlerine": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "öbürlerinden": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "öbürlerinde": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "öbürlerini": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "öbürleriyle": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "öbürleri": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "öbürlerinin": { "LEMMA": "öbürleri", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "ötekisine": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "ötekisinden": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "ötekisinde": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "ötekisini": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "ötekisiyle": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "ötekisi": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "ötekisinin": { "LEMMA": "öteki", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "pekçoğuna": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "pekçoğundan": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "pekçoğunda": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "pekçoğunu": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "pekçoğuyla": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "pekçoğu": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "pekçoğunun": { "LEMMA": "pekçoğu", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" }, "pekçoklarına": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Dat", "Number": "Sing" }, "pekçoklarından": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Abl", "Number": "Sing" }, "pekçoklarında": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Loc", "Number": "Sing" }, "pekçoklarını": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Acc", "Number": "Sing" }, "pekçoklarıyla": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Ins", "Number": "Sing" }, "pekçokları": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Nom", "Number": "Sing" }, "pekçoklarının": { "LEMMA": "pekçokları", "POS": "PRON", "PronType": "Ind", "Case": "Gen", "Number": "Sing" } } } for tag, rules in MORPH_RULES.items(): for key, attrs in dict(rules).items(): rules[key.title()] = attrs
32.868664
82
0.240534
6,158
128,385
5.003897
0.122442
0.116084
0.241449
0.155384
0.795807
0.795807
0.794931
0.685143
0.355326
0.213345
0
0.00002
0.609705
128,385
3,905
83
32.877081
0.614932
0.000093
0
0.684116
0
0
0.237741
0
0
0
0
0
0
1
0
false
0
0.000513
0
0.000513
0
0
0
0
null
0
1
0
0
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0243b01fa7f10a622ac9c88feae38b2c0ab703d1
98
py
Python
part3/ex04/birthday.py
abasu1007/intro-to-python
f6978816c3020860b74219a1bfe191aea8e4e75f
[ "CC-BY-4.0" ]
13
2015-05-11T06:20:24.000Z
2017-04-13T19:47:54.000Z
part3/ex04/birthday.py
PythonWorkshop/intro-to-python
9f8ad86e42e40d059877fd234fb160602e8907ac
[ "CC-BY-4.0" ]
null
null
null
part3/ex04/birthday.py
PythonWorkshop/intro-to-python
9f8ad86e42e40d059877fd234fb160602e8907ac
[ "CC-BY-4.0" ]
10
2016-04-16T19:28:22.000Z
2018-06-15T14:56:57.000Z
def happy_birthday(name): print("Happy Birthday, dear " + name + "!") happy_birthday("Trey")
19.6
47
0.663265
12
98
5.25
0.583333
0.619048
0
0
0
0
0
0
0
0
0
0
0.163265
98
4
48
24.5
0.768293
0
0
0
0
0
0.265306
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
024c5f09291942e829949bdfbedb654db16aa357
50
py
Python
tests/__init__.py
JulianPitney/data-management-demo
ba0aa152711dea393f71ec2bca04e85644d30205
[ "MIT" ]
null
null
null
tests/__init__.py
JulianPitney/data-management-demo
ba0aa152711dea393f71ec2bca04e85644d30205
[ "MIT" ]
null
null
null
tests/__init__.py
JulianPitney/data-management-demo
ba0aa152711dea393f71ec2bca04e85644d30205
[ "MIT" ]
null
null
null
"""Unit test package for data_management_demo."""
25
49
0.76
7
50
5.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.1
50
1
50
50
0.8
0.86
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
0254ee12c57dd76ebb84123a48e49eb0a8f8e356
223
py
Python
ProvisionScripts/_CommissionDevice2AWS.py
MicrochipTech/aws-iot-winc1500-secure-wifi-board-included-source-files
880492d226b93b8195ad3f5e4ec25961e2dd470f
[ "MIT" ]
2
2018-10-01T06:40:38.000Z
2020-08-26T19:28:13.000Z
ProvisionScripts/_CommissionDevice2AWS.py
MicrochipTech/aws-iot-winc1500-secure-wifi-board-included-source-files
880492d226b93b8195ad3f5e4ec25961e2dd470f
[ "MIT" ]
null
null
null
ProvisionScripts/_CommissionDevice2AWS.py
MicrochipTech/aws-iot-winc1500-secure-wifi-board-included-source-files
880492d226b93b8195ad3f5e4ec25961e2dd470f
[ "MIT" ]
2
2019-04-17T09:19:38.000Z
2020-06-04T15:37:12.000Z
#import kit_set_wifi_new #print('\nWiFi setup') #kit_set_wifi_new.main() import kit_provision print('\nProvisioning Kit') kit_provision.main() #import kit_set_wifi__kit_provision #kit_set_wifi__kit_provision.main()
13.117647
35
0.798206
34
223
4.705882
0.323529
0.15
0.25
0.2
0.275
0
0
0
0
0
0
0
0.09417
223
16
36
13.9375
0.792079
0.605381
0
0
0
0
0.219512
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.333333
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
025aeb7ab23a7f87779f48c6185c42d6af665263
61
py
Python
roost_backend/__init__.py
dehnert/roost-ng
a640e64592e64d8d318dc83bff3b47189a35c1b6
[ "MIT" ]
1
2020-11-19T21:04:32.000Z
2020-11-19T21:04:32.000Z
roost_backend/__init__.py
dehnert/roost-ng
a640e64592e64d8d318dc83bff3b47189a35c1b6
[ "MIT" ]
9
2020-11-29T18:11:41.000Z
2022-03-06T13:54:20.000Z
roost_backend/__init__.py
dehnert/roost-ng
a640e64592e64d8d318dc83bff3b47189a35c1b6
[ "MIT" ]
1
2020-11-27T01:26:18.000Z
2020-11-27T01:26:18.000Z
default_app_config = 'roost_backend.apps.RoostBackendConfig'
30.5
60
0.868852
7
61
7.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.04918
61
1
61
61
0.862069
0
0
0
0
0
0.606557
0.606557
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
02667ab7957ab1c4a64383a8be8bf2977cbb79a8
251
py
Python
orchester_cms_integration/admin.py
GeniALE/SiteWebGeniALE
39488a89887d6855590ab39a59dbc2b6dca21892
[ "MIT" ]
4
2018-03-17T15:38:17.000Z
2020-10-03T12:06:19.000Z
orchester_cms_integration/admin.py
GeniALE/SiteWebGeniALE
39488a89887d6855590ab39a59dbc2b6dca21892
[ "MIT" ]
80
2018-02-01T02:55:01.000Z
2022-03-11T23:17:17.000Z
orchester_cms_integration/admin.py
GeniALE/SiteWebGeniALE
39488a89887d6855590ab39a59dbc2b6dca21892
[ "MIT" ]
2
2018-01-30T09:27:31.000Z
2019-07-25T23:50:49.000Z
from orchester_cms_integration.models import ServiceInstructions from django.contrib import admin class ServiceInstructionsAdmin(admin.ModelAdmin): model = ServiceInstructions admin.site.register(ServiceInstructions, ServiceInstructionsAdmin)
25.1
66
0.860558
23
251
9.304348
0.695652
0
0
0
0
0
0
0
0
0
0
0
0.091633
251
9
67
27.888889
0.938596
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
0278e37eb9938a8866a8d9e8e2cde4db871d6d54
10,412
py
Python
medusa/libcloud/storage/drivers/ibm.py
rjshrjndrn/cassandra-medusa
c8b22cdc2ded191a02f99321cf4196ca199d6256
[ "Apache-2.0" ]
1
2021-06-04T16:54:09.000Z
2021-06-04T16:54:09.000Z
medusa/libcloud/storage/drivers/ibm.py
rjshrjndrn/cassandra-medusa
c8b22cdc2ded191a02f99321cf4196ca199d6256
[ "Apache-2.0" ]
null
null
null
medusa/libcloud/storage/drivers/ibm.py
rjshrjndrn/cassandra-medusa
c8b22cdc2ded191a02f99321cf4196ca199d6256
[ "Apache-2.0" ]
null
null
null
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from libcloud.common.types import LibcloudError from libcloud.common.aws import SignedAWSConnection from libcloud.storage.drivers.s3 import BaseS3Connection from libcloud.storage.drivers.s3 import BaseS3StorageDriver, API_VERSION __all__ = [ 'IBMCloudStorageDriver', ] IBM_CLOUD_DEFAULT_REGION = 's3.eu.cloud-object-storage.appdomain.cloud' IBM_CLOUD_HOSTS_BY_REGION = \ {"us-standard": "s3.us.cloud-object-storage.appdomain.cloud", "us-vault": "s3.us.cloud-object-storage.appdomain.cloud", "us-cold": "s3.us.cloud-object-storage.appdomain.cloud", "us-smart": "s3.us.cloud-object-storage.appdomain.cloud", "us-east-standard": "s3.us.cloud-object-storage.appdomain.cloud", "us-east-vault": "s3.us.cloud-object-storage.appdomain.cloud", "us-east-cold": "s3.us.cloud-object-storage.appdomain.cloud", "us-east-smart": "s3.us.cloud-object-storage.appdomain.cloud", "us-south-standard": "s3.us.cloud-object-storage.appdomain.cloud", "us-south-vault": "s3.us.cloud-object-storage.appdomain.cloud", "us-south-cold": "s3.us.cloud-object-storage.appdomain.cloud", "us-south-smart": "s3.us.cloud-object-storage.appdomain.cloud", "eu-standard": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-vault": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-cold": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-smart": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-gb-standard": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-gb-vault": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-gb-cold": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-gb-smart": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-de-standard": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-de-vault": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-de-cold": "s3.eu.cloud-object-storage.appdomain.cloud", "eu-de-smart": "s3.eu.cloud-object-storage.appdomain.cloud", "ap-standard": "s3.ap.cloud-object-storage.appdomain.cloud", "ap-vault": "s3.ap.cloud-object-storage.appdomain.cloud", "ap-cold": "s3.ap.cloud-object-storage.appdomain.cloud", "ap-smart": "s3.ap.cloud-object-storage.appdomain.cloud", "jp-tok-standard": "s3.tok.ap.cloud-object-storage.appdomain.cloud", "jp-tok-vault": "s3.tok.ap.cloud-object-storage.appdomain.cloud", "jp-tok-cold": "s3.tok.ap.cloud-object-storage.appdomain.cloud", "jp-tok-smart": "s3.tok.ap.cloud-object-storage.appdomain.cloud", "jp-osa-standard": "s3.jp-osa.cloud-object-storage.appdomain.cloud", "jp-osa-vault": "s3.jp-osa.cloud-object-storage.appdomain.cloud", "jp-osa-cold": "s3.jp-osa.cloud-object-storage.appdomain.cloud", "jp-osa-smart": "s3.jp-osa.cloud-object-storage.appdomain.cloud", "au-syd-standard": "s3.au-syd.cloud-object-storage.appdomain.cloud", "au-syd-vault": "s3.au-syd.cloud-object-storage.appdomain.cloud", "au-syd-cold": "s3.au-syd.cloud-object-storage.appdomain.cloud", "au-syd-smart": "s3.au-syd.cloud-object-storage.appdomain.cloud", "ams03-standard": "s3.ams.eu.cloud-object-storage.appdomain.cloud", "ams03-vault": "s3.ams.eu.cloud-object-storage.appdomain.cloud", "ams03-cold": "s3.ams.eu.cloud-object-storage.appdomain.cloud", "ams03-smart": "s3.ams.eu.cloud-object-storage.appdomain.cloud", "che01-standard": "s3.che01.cloud-object-storage.appdomain.cloud", "che01-vault": "s3.che01.cloud-object-storage.appdomain.cloud", "che01-cold": "s3.che01.cloud-object-storage.appdomain.cloud", "che01-smart": "s3.che01.cloud-object-storage.appdomain.cloud", "hkg02-standard": "s3.hkg.ap.cloud-object-storage.appdomain.cloud", "hkg02-vault": "s3.hkg.ap.cloud-object-storage.appdomain.cloud", "hkg02-cold": "s3.hkg.ap.cloud-object-storage.appdomain.cloud", "hkg02-smart": "s3.hkg.ap.cloud-object-storage.appdomain.cloud", "mel01-standard": "s3.mel01.cloud-object-storage.appdomain.cloud", "mel01-vault": "s3.mel01.cloud-object-storage.appdomain.cloud", "mel01-cold": "s3.mel01.cloud-object-storage.appdomain.cloud", "mel01-smart": "s3.mel01.cloud-object-storage.appdomain.cloud", "mex01-standard": "s3.mex01.cloud-object-storage.appdomain.cloud", "mex01-vault": "s3.mex01.cloud-object-storage.appdomain.cloud", "mex01-cold": "s3.mex01.cloud-object-storage.appdomain.cloud", "mex01-smart": "s3.mex01.cloud-object-storage.appdomain.cloud", "mil01-standard": "s3.mil01.cloud-object-storage.appdomain.cloud", "mil01-vault": "s3.mil01.cloud-object-storage.appdomain.cloud", "mil01-cold": "s3.mil01.cloud-object-storage.appdomain.cloud", "mil01-smart": "s3.mil01.cloud-object-storage.appdomain.cloud", "mon01-standard": "s3.mon01.cloud-object-storage.appdomain.cloud", "mon01-vault": "s3.mon01.cloud-object-storage.appdomain.cloud", "mon01-cold": "s3.mon01.cloud-object-storage.appdomain.cloud", "mon01-smart": "s3.mon01.cloud-object-storage.appdomain.cloud", "par01-standard": "s3.par01.cloud-object-storage.appdomain.cloud", "par01-vault": "s3.par01.cloud-object-storage.appdomain.cloud", "par01-cold": "s3.par01.cloud-object-storage.appdomain.cloud", "par01-smart": "s3.par01.cloud-object-storage.appdomain.cloud", "osl01-standard": "s3.osl01.cloud-object-storage.appdomain.cloud", "osl01-vault": "s3.osl01.cloud-object-storage.appdomain.cloud", "osl01-cold": "s3.osl01.cloud-object-storage.appdomain.cloud", "osl01-smart": "s3.osl01.cloud-object-storage.appdomain.cloud", "sjc04-standard": "s3.sjc04.cloud-object-storage.appdomain.cloud", "sjc04-vault": "s3.sjc04.cloud-object-storage.appdomain.cloud", "sjc04-cold": "s3.sjc04.cloud-object-storage.appdomain.cloud", "sjc04-smart": "s3.sjc04.cloud-object-storage.appdomain.cloud", "sao01-standard": "s3.sao01.cloud-object-storage.appdomain.cloud", "sao01-vault": "s3.sao01.cloud-object-storage.appdomain.cloud", "sao01-cold": "s3.sao01.cloud-object-storage.appdomain.cloud", "sao01-smart": "s3.sao01.cloud-object-storage.appdomain.cloud", "seo01-standard": "s3.seo01.cloud-object-storage.appdomain.cloud", "seo01-vault": "s3.seo01.cloud-object-storage.appdomain.cloud", "seo01-cold": "s3.seo01.cloud-object-storage.appdomain.cloud", "seo01-smart": "s3.seo01.cloud-object-storage.appdomain.cloud", "sng01-standard": "s3.sng01.cloud-object-storage.appdomain.cloud", "sng01-vault": "s3.sng01.cloud-object-storage.appdomain.cloud", "sng01-cold": "s3.sng01.cloud-object-storage.appdomain.cloud", "sng01-smart": "s3.sng01.cloud-object-storage.appdomain.cloud", "tor01-standard": "s3.tor01.cloud-object-storage.appdomain.cloud", "tor01-vault": "s3.tor01.cloud-object-storage.appdomain.cloud", "tor01-cold": "s3.tor01.cloud-object-storage.appdomain.cloud", "tor01-smart": "s3.tor01.cloud-object-storage.appdomain.cloud"} IBM_CLOUD_DEFAULT_REGION = 'eu-smart' class IBMCloudConnectionAWS(SignedAWSConnection, BaseS3Connection): service_name = 's3' version = API_VERSION def __init__(self, user_id, key, secure=True, host=None, port=None, url=None, timeout=None, proxy_url=None, token=None, retry_delay=None, backoff=None, **kwargs): super(IBMCloudConnectionAWS, self).__init__(user_id, key, secure, host, port, url, timeout, proxy_url, token, retry_delay, backoff, 4) # force aws4 class IBMStorageDriver(BaseS3StorageDriver): name = 'IBM Cloud Storage' website = 'http://cloud.ibm.com/' def __init__(self, key, secret=None, secure=True, host=None, port=None, api_version=None, region=IBM_CLOUD_DEFAULT_REGION, **kwargs): if host is None: raise LibcloudError('host required', driver=self) self.name = kwargs.pop('name', None) if self.name is None: self.name = 'IBM Cloud Object Storage (%s)' % (region) self.ex_location_name = region self.region_name = region self.connectionCls = IBMCloudConnectionAWS self.connectionCls.host = host super(IBMStorageDriver, self).__init__(key, secret, secure, host, port, api_version, region, **kwargs) class IBMCloudStorageDriver(IBMStorageDriver): name = 'IBM Cloud Storage' website = 'http://cloud.ibm.com/' def __init__(self, key, secret=None, secure=True, host=None, port=None, api_version=None, region=IBM_CLOUD_DEFAULT_REGION, **kwargs): if region not in IBM_CLOUD_HOSTS_BY_REGION: raise LibcloudError('Unknown region (%s)' % (region), driver=self) if host is None: host = IBM_CLOUD_HOSTS_BY_REGION[region] kwargs['name'] = 'IBM Cloud Object Storage region (%s)' % region super(IBMCloudStorageDriver, self).__init__(key, secret, secure, host, port, api_version, region, **kwargs)
55.679144
78
0.660008
1,316
10,412
5.170213
0.1269
0.160053
0.261905
0.384921
0.71649
0.701058
0.676367
0.598471
0.308201
0.114932
0
0.037602
0.187764
10,412
186
79
55.978495
0.766939
0.073281
0
0.091503
0
0
0.582373
0.448355
0
0
0
0
0
1
0.019608
false
0
0.026144
0
0.104575
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5a01f498b33d3aacaaafb37f08a6bad71002b1d6
262
py
Python
api_fetch.py
rohdalvi/newsbot
2794eb1073a12fd884f272d8aee3cc92744b5de7
[ "MIT" ]
null
null
null
api_fetch.py
rohdalvi/newsbot
2794eb1073a12fd884f272d8aee3cc92744b5de7
[ "MIT" ]
null
null
null
api_fetch.py
rohdalvi/newsbot
2794eb1073a12fd884f272d8aee3cc92744b5de7
[ "MIT" ]
null
null
null
def getIEX_API_KEY(): return readAPI(0) def getIEX_API_KEY_TEST(): return readAPI(1) def getNYT_API_KEY(): return readAPI(2) def readAPI(x): f = open("api_list.txt", "r") lines = f.readlines() return lines[x].translate({ord('\n'): None})
23.818182
48
0.648855
41
262
3.95122
0.560976
0.111111
0.148148
0.185185
0
0
0
0
0
0
0
0.014019
0.183206
262
10
49
26.2
0.742991
0
0
0
0
0
0.057252
0
0
0
0
0
0
1
0.4
false
0
0
0.3
0.8
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
5a365cf4aff2fa6086a69aaae404bca56b8f9c20
37
py
Python
python/pyspark_hyperloglog/__init__.py
mozilla/spark-hyperloglog
f58b859d5390a79042b189a4204b0d98695a1c8c
[ "Apache-2.0" ]
7
2018-07-10T15:35:29.000Z
2022-03-20T02:21:16.000Z
python/pyspark_hyperloglog/__init__.py
mozilla/spark-hyperloglog
f58b859d5390a79042b189a4204b0d98695a1c8c
[ "Apache-2.0" ]
8
2017-07-14T03:58:18.000Z
2018-07-24T13:32:05.000Z
python/pyspark_hyperloglog/__init__.py
mozilla/spark-hyperloglog
f58b859d5390a79042b189a4204b0d98695a1c8c
[ "Apache-2.0" ]
3
2017-09-15T23:45:01.000Z
2018-06-26T18:06:08.000Z
from . import hll __all__ = ['hll']
9.25
17
0.621622
5
37
3.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.216216
37
3
18
12.333333
0.655172
0
0
0
0
0
0.081081
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
5a60e11b5b2f445297583f3d28ed0a7fb6f72c89
162
py
Python
articlestudy/resources/word/__init__.py
Jay184/articlestudy
48a2ffb8d72baaa1ee28ece1e91fc7b45d8158b9
[ "MIT" ]
null
null
null
articlestudy/resources/word/__init__.py
Jay184/articlestudy
48a2ffb8d72baaa1ee28ece1e91fc7b45d8158b9
[ "MIT" ]
1
2021-07-03T14:11:54.000Z
2021-07-03T17:39:06.000Z
articlestudy/resources/word/__init__.py
Jay184/articlestudy
48a2ffb8d72baaa1ee28ece1e91fc7b45d8158b9
[ "MIT" ]
null
null
null
from .model import Word from .schema import WordSchema def register_namespace(api): from .controller import api as word_api api.add_namespace(word_api)
20.25
43
0.777778
24
162
5.083333
0.541667
0.114754
0
0
0
0
0
0
0
0
0
0
0.166667
162
7
44
23.142857
0.903704
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.6
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5a7c310447e0d08d23714119ec1552416fde0fdf
176
py
Python
src/mist/api/poller/schedulers.py
cc-daveloper/mist.io_mist.api
d3f9b8d478f23bf811c0bc6d3078e512aa975f86
[ "Apache-2.0" ]
1
2019-04-10T11:37:25.000Z
2019-04-10T11:37:25.000Z
src/mist/api/poller/schedulers.py
d-mo/mist.api
d3f9b8d478f23bf811c0bc6d3078e512aa975f86
[ "Apache-2.0" ]
3
2021-04-07T23:15:17.000Z
2021-09-23T23:21:45.000Z
src/mist/api/poller/schedulers.py
cc-daveloper/mist.io_mist.api
d3f9b8d478f23bf811c0bc6d3078e512aa975f86
[ "Apache-2.0" ]
null
null
null
from celerybeatmongo.schedulers import MongoScheduler from mist.api.poller.models import PollingSchedule class PollingScheduler(MongoScheduler): Model = PollingSchedule
22
53
0.840909
17
176
8.705882
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.113636
176
7
54
25.142857
0.948718
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
ce600bdcaaaee8afb35919d5f54dd1b4f27b89a4
81
py
Python
tests/wasp1/AllAnswerSets/choice_14.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
19
2015-12-03T08:53:45.000Z
2022-03-31T02:09:43.000Z
tests/wasp1/AllAnswerSets/choice_14.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
80
2017-11-25T07:57:32.000Z
2018-06-10T19:03:30.000Z
tests/wasp1/AllAnswerSets/choice_14.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
6
2015-01-15T07:51:48.000Z
2020-06-18T14:47:48.000Z
input = """ a:-b. b:-a. a v b. :- not a. :- not b. """ output = """ {a, b} """
6.230769
12
0.345679
15
81
1.866667
0.4
0.142857
0
0
0
0
0
0
0
0
0
0
0.283951
81
12
13
6.75
0.482759
0
0
0.2
0
0
0.604938
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
cea6ecced5a62dd06bdaa535604268d1fe249220
219
py
Python
accounts/serializers.py
meysam81/product-hunt-clone
8ff97e8248196c41c927a2c8118e51e553faa152
[ "MIT" ]
null
null
null
accounts/serializers.py
meysam81/product-hunt-clone
8ff97e8248196c41c927a2c8118e51e553faa152
[ "MIT" ]
4
2021-06-08T19:30:01.000Z
2022-03-11T23:37:49.000Z
accounts/serializers.py
meysam81/product-hunt-clone
8ff97e8248196c41c927a2c8118e51e553faa152
[ "MIT" ]
null
null
null
from django.contrib.auth.models import User from rest_framework import serializers as sz class UserSerializer(sz.HyperlinkedModelSerializer): class Meta: model = User fields = ('username', 'email')
27.375
52
0.730594
25
219
6.36
0.8
0
0
0
0
0
0
0
0
0
0
0
0.191781
219
8
53
27.375
0.898305
0
0
0
0
0
0.059091
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
0c80599878a5ec423621d0f1db201f47f90efa3e
77
py
Python
Python/CodeForces Solutions/501-1000/617A.py
7namansharma/Comp-Prog
b760ef9b4173e6d5851dc63cc92a8e935baf60ed
[ "MIT" ]
null
null
null
Python/CodeForces Solutions/501-1000/617A.py
7namansharma/Comp-Prog
b760ef9b4173e6d5851dc63cc92a8e935baf60ed
[ "MIT" ]
null
null
null
Python/CodeForces Solutions/501-1000/617A.py
7namansharma/Comp-Prog
b760ef9b4173e6d5851dc63cc92a8e935baf60ed
[ "MIT" ]
null
null
null
n = int(input()) if n%5 == 0: print(int(n/5)) else: print(int(n/5)+1)
15.4
21
0.506494
17
77
2.294118
0.529412
0.153846
0.461538
0.512821
0
0
0
0
0
0
0
0.083333
0.220779
77
5
21
15.4
0.566667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0c83180986358443a73a7250f090c9e0de2d83c0
132
py
Python
hello.py
TK5QK/cs3240-labdemo
3b41990e21c9c03ba3026a5fd6e2d86e04b8ac01
[ "MIT" ]
null
null
null
hello.py
TK5QK/cs3240-labdemo
3b41990e21c9c03ba3026a5fd6e2d86e04b8ac01
[ "MIT" ]
null
null
null
hello.py
TK5QK/cs3240-labdemo
3b41990e21c9c03ba3026a5fd6e2d86e04b8ac01
[ "MIT" ]
null
null
null
<<<<<<< HEAD from helper import greeting greeting("hello") ======= print("hello") >>>>>>> 659a1524a39ca60f9c297ed28e10a227a3a853cd
16.5
48
0.689394
10
132
9.1
0.8
0
0
0
0
0
0
0
0
0
0
0.216667
0.090909
132
7
49
18.857143
0.541667
0
0
0
0
0
0.075758
0
0
0
0
0
0
0
null
null
0
0.166667
null
null
0.166667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
0c9a2da29c03b97978ba0f0afc6af9a7853c35db
90
py
Python
测试/pytest-codes/test_plugin_rerun.py
hhyluor/Code_Repository-
0b692bcd5b00312b5c9fd19d55b50c495d27de2b
[ "MIT" ]
null
null
null
测试/pytest-codes/test_plugin_rerun.py
hhyluor/Code_Repository-
0b692bcd5b00312b5c9fd19d55b50c495d27de2b
[ "MIT" ]
null
null
null
测试/pytest-codes/test_plugin_rerun.py
hhyluor/Code_Repository-
0b692bcd5b00312b5c9fd19d55b50c495d27de2b
[ "MIT" ]
null
null
null
# pytest --reruns n n是重新测试的次数 def test_1(): print('test 1') assert False, '故意失败'
15
29
0.622222
13
90
4.230769
0.846154
0.181818
0
0
0
0
0
0
0
0
0
0.028986
0.233333
90
5
30
18
0.768116
0.3
0
0
0
0
0.163934
0
0
0
0
0
0.333333
1
0.333333
true
0
0
0
0.333333
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
0ca402baa580fadc4f7e20e335a1ae9bfcc6cc39
94
py
Python
rreader_src/config.py
rainygirl/rreader
b669d6999c6085f958051404d4260982738ababd
[ "MIT" ]
10
2022-01-16T14:15:05.000Z
2022-01-30T13:49:27.000Z
rreader_src/config.py
rainygirl/rreader
b669d6999c6085f958051404d4260982738ababd
[ "MIT" ]
1
2022-01-16T14:08:39.000Z
2022-01-16T14:09:51.000Z
rreader_src/config.py
rainygirl/rreader
b669d6999c6085f958051404d4260982738ababd
[ "MIT" ]
null
null
null
import datetime # KST Seoul UTC+9 TIMEZONE = datetime.timezone(datetime.timedelta(hours=9))
15.666667
57
0.776596
13
94
5.615385
0.692308
0.438356
0
0
0
0
0
0
0
0
0
0.024096
0.117021
94
5
58
18.8
0.855422
0.159574
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
0cb32544f9e48351371163c8834f20f4286aeafb
114
py
Python
pyqtgraph/flowchart/__init__.py
hishizuka/pyqtgraph
4820625d93ffb41f324431d0d29b395cf91f339e
[ "MIT" ]
2,762
2015-01-02T14:34:10.000Z
2022-03-30T14:06:07.000Z
pyqtgraph/flowchart/__init__.py
hishizuka/pyqtgraph
4820625d93ffb41f324431d0d29b395cf91f339e
[ "MIT" ]
1,901
2015-01-12T03:20:30.000Z
2022-03-31T16:33:36.000Z
pyqtgraph/flowchart/__init__.py
hishizuka/pyqtgraph
4820625d93ffb41f324431d0d29b395cf91f339e
[ "MIT" ]
1,038
2015-01-01T04:05:49.000Z
2022-03-31T11:57:51.000Z
# -*- coding: utf-8 -*- from .Flowchart import * from .library import getNodeType, registerNodeType, getNodeTree
22.8
63
0.736842
12
114
7
0.833333
0
0
0
0
0
0
0
0
0
0
0.010204
0.140351
114
4
64
28.5
0.846939
0.184211
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
0cbde566203f3b77c4a4cccf367928aeb96b5d55
187
py
Python
app/rest_api/ideas_core_api/serializer.py
MarcosAIM/Ideas
fd53fd03bf99fcd96a5f6719f8864f98f93f142e
[ "MIT" ]
null
null
null
app/rest_api/ideas_core_api/serializer.py
MarcosAIM/Ideas
fd53fd03bf99fcd96a5f6719f8864f98f93f142e
[ "MIT" ]
null
null
null
app/rest_api/ideas_core_api/serializer.py
MarcosAIM/Ideas
fd53fd03bf99fcd96a5f6719f8864f98f93f142e
[ "MIT" ]
null
null
null
from rest_framework import serializers from .models import BaseIdea class IdeaSerializer(serializers.ModelSerializer): class Meta: model = BaseIdea fields = '__all__'
26.714286
50
0.743316
19
187
7.052632
0.736842
0
0
0
0
0
0
0
0
0
0
0
0.203209
187
7
51
26.714286
0.899329
0
0
0
0
0
0.037234
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
0cc3cfe38eeb0eb15f7d8e5a0c2281884b79f15b
126
py
Python
ai1wm/exception.py
sanderwang/ai1wm
88aa3b471a51b4d4edde7096c383c9a14f232180
[ "MIT" ]
null
null
null
ai1wm/exception.py
sanderwang/ai1wm
88aa3b471a51b4d4edde7096c383c9a14f232180
[ "MIT" ]
null
null
null
ai1wm/exception.py
sanderwang/ai1wm
88aa3b471a51b4d4edde7096c383c9a14f232180
[ "MIT" ]
1
2020-01-31T16:40:58.000Z
2020-01-31T16:40:58.000Z
""" Package specific exceptions. """ class Ai1wmError(Exception): """ Exceptions raised from this package. """ pass
18
48
0.666667
12
126
7
0.833333
0
0
0
0
0
0
0
0
0
0
0.009901
0.198413
126
6
49
21
0.821782
0.52381
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
0cf7f4cfa42e0068b60bf9df6a4648e7fc146fbb
105
py
Python
ginjinn/__main__.py
AGOberprieler/GinJinn_development
e2424ccad4a1ab05f9c21c26cd177aaa8d69b2c7
[ "Apache-2.0" ]
17
2021-07-24T20:55:58.000Z
2022-02-09T05:15:04.000Z
ginjinn/__main__.py
AGOberprieler/GinJinn_development
e2424ccad4a1ab05f9c21c26cd177aaa8d69b2c7
[ "Apache-2.0" ]
1
2021-12-21T06:33:56.000Z
2022-02-05T13:57:53.000Z
ginjinn/__main__.py
AGOberprieler/GinJinn2
527feac125f476165e332277823c11016565f99d
[ "Apache-2.0" ]
null
null
null
''' GinJinn main ''' from ginjinn import commandline if __name__ == "__main__": commandline.main()
13.125
31
0.685714
11
105
5.818182
0.636364
0
0
0
0
0
0
0
0
0
0
0
0.180952
105
7
32
15
0.744186
0.114286
0
0
0
0
0.094118
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
0b2587c678a376bc3d24443183a3ee38fcce91f3
19
py
Python
testsuite/simple-package/package/__init__.py
xoviat/modulegraph2
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
[ "MIT" ]
9
2020-03-22T14:48:01.000Z
2021-05-30T12:18:12.000Z
testsuite/simple-package/package/__init__.py
xoviat/modulegraph2
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
[ "MIT" ]
15
2020-01-06T10:02:32.000Z
2021-05-28T12:22:44.000Z
testsuite/simple-package/package/__init__.py
ronaldoussoren/modulegraph2
b6ab1766b0098651b51083235ff8a18a5639128b
[ "MIT" ]
4
2020-05-10T18:51:41.000Z
2021-04-07T14:03:12.000Z
""" Basic init """
9.5
18
0.473684
2
19
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.210526
19
1
19
19
0.6
0.526316
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
0b55a243e869fab9acefed647cf8dbe41461fafd
768
py
Python
source/utils/math_.py
TungE/complex-fourier-series-animator
bdc0e391ee10055ee3ce38014e1bf51fd7b7b56c
[ "MIT" ]
null
null
null
source/utils/math_.py
TungE/complex-fourier-series-animator
bdc0e391ee10055ee3ce38014e1bf51fd7b7b56c
[ "MIT" ]
null
null
null
source/utils/math_.py
TungE/complex-fourier-series-animator
bdc0e391ee10055ee3ce38014e1bf51fd7b7b56c
[ "MIT" ]
null
null
null
import numpy as np from utils.defaults import numpy_ as np_ tau = 2 * np.pi def sin(angle): return np_('sin')(angle) def cos(angle): return np_('cos')(angle) def array(*args, **kwargs): return np_('array')(*args, **kwargs) def get_distance(point0, point1): return np.linalg.norm(array(point0) - array(point1)) def to_polar(complex_number): magnitude = np.abs(complex_number) direction = np.angle(complex_number) return magnitude, direction def to_complex_number(magnitude, direction): complex_number = magnitude * np.exp(direction * 1j) return complex_number def to_cartesian(magnitude, direction): complex_number = to_complex_number(magnitude, direction) return array((complex_number.real, complex_number.imag))
23.272727
60
0.722656
105
768
5.095238
0.342857
0.242991
0.164486
0.056075
0.123364
0
0
0
0
0
0
0.009317
0.161458
768
32
61
24
0.821429
0
0
0
0
0
0.014323
0
0
0
0
0
0
1
0.333333
false
0
0.095238
0.190476
0.761905
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
0b7cf6e2ab0873dd353f698f4f5f5e22e31b4917
54
py
Python
fluiddb/schema/scripts/tests/__init__.py
fluidinfo/fluiddb
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
[ "Apache-2.0" ]
3
2021-05-10T14:41:30.000Z
2021-12-16T05:53:30.000Z
fluiddb/schema/scripts/tests/__init__.py
fluidinfo/fluiddb
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
[ "Apache-2.0" ]
null
null
null
fluiddb/schema/scripts/tests/__init__.py
fluidinfo/fluiddb
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
[ "Apache-2.0" ]
2
2018-01-24T09:03:21.000Z
2021-06-25T08:34:54.000Z
"""Logic to create and manage the database schema."""
27
53
0.722222
8
54
4.875
1
0
0
0
0
0
0
0
0
0
0
0
0.148148
54
1
54
54
0.847826
0.87037
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
0b9d006e0178c26a1c0e30ae5ba2f7b356afd636
183
py
Python
src/mp_api/robocrys/client.py
jmmshn/api
5254a453f6ec749793639e4ec08bea14628c7dc3
[ "BSD-3-Clause-LBNL" ]
null
null
null
src/mp_api/robocrys/client.py
jmmshn/api
5254a453f6ec749793639e4ec08bea14628c7dc3
[ "BSD-3-Clause-LBNL" ]
159
2020-11-16T16:02:31.000Z
2022-03-28T15:03:38.000Z
src/mp_api/robocrys/client.py
jmmshn/api
5254a453f6ec749793639e4ec08bea14628c7dc3
[ "BSD-3-Clause-LBNL" ]
null
null
null
from mp_api.core.client import BaseRester from mp_api.robocrys.models import RobocrysDoc class RobocrysRester(BaseRester): suffix = "robocrys" document_model = RobocrysDoc
20.333333
46
0.79235
22
183
6.454545
0.681818
0.084507
0.126761
0
0
0
0
0
0
0
0
0
0.147541
183
8
47
22.875
0.910256
0
0
0
0
0
0.043716
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
0bb15ecc08ad9e34f1df1d79a2804b71566ec0bb
57,479
py
Python
code/34b_parcel-based_LiveabilityCI_ULI_v2_i15.py
carlhiggs/urban_liveability_index
61decb632e0b0db28c181fe62c548f2b338cc47c
[ "MIT" ]
null
null
null
code/34b_parcel-based_LiveabilityCI_ULI_v2_i15.py
carlhiggs/urban_liveability_index
61decb632e0b0db28c181fe62c548f2b338cc47c
[ "MIT" ]
null
null
null
code/34b_parcel-based_LiveabilityCI_ULI_v2_i15.py
carlhiggs/urban_liveability_index
61decb632e0b0db28c181fe62c548f2b338cc47c
[ "MIT" ]
null
null
null
# Purpose: create parcel-based liveability composite indicator # In particular, 'hard' and 'soft'-cutoff versions of ULI with 15 indicators (grouped destinations) # Author: Carl Higgs # Date: 20180411 # # Submitted for publication as: # Higgs, C., Badland, H., Simons, K. and Giles-Corti, B. Urban liveability and adult cardiometabolic health: policy-relevant evidence from a cross-sectional Australian built environment data linkage study. npj Urban Sustainability (2021; submitted for review). # Postgresql MPI implementation steps for i indicators across j parcels # De Muro P., Mazziotta M., Pareto A. (2011), "Composite Indices of Development and Poverty: An Application to MDGs", Social Indicators Research, Volume 104, Number 1, pp. 1-18. # Vidoli, F., Fusco, E. Compind: Composite Indicators Functions, Version 1.1.2, 2016 # Adapted for postgresql by Carl Higgs, 4/4/2017 import os import sys import time import psycopg2 # for database communication and management import subprocess as sp # for executing external commands (e.g. pgsql2shp) from script_running_log import script_running_log from ConfigParser import SafeConfigParser # ULI schema to which this script pertains # -- created tables should be nested within this schema for tidiness and organisation # detail: versions 2, with 15 indicators (grouped destinations) # NOTE: original ULI's excluded parcels script was considered general enough to apply equally for ULI_v2_*, # However, if future ULI versions are devised and prepared, the generation of schema specific # parcel exclusion tables should be created and drawn upon, catering to schema-specific indicator sets. uli_schema = 'uli_v2_i15' parser = SafeConfigParser() parser.read(os.path.join(sys.path[0],'config.ini')) # simple timer for log file start = time.time() script = os.path.basename(sys.argv[0]) task = 'create parcel-based liveability composite indicator for ULI schema {0}'.format(uli_schema) A_pointsID = parser.get('parcels', 'parcel_id') exclusion_criteria = 'WHERE {0} NOT IN (SELECT DISTINCT({0}) FROM excluded_parcels)'.format(A_pointsID.lower()) parcelmb_exclusion_criteria = 'WHERE parcelmb.{0} NOT IN (SELECT DISTINCT({0}) FROM excluded_parcels)'.format(A_pointsID.lower()) # SQL Settings - storing passwords in plain text is obviously not ideal sqlDBName = parser.get('postgresql', 'database') sqlDBHost = parser.get('postgresql', 'host') sqlUserName = parser.get('postgresql', 'user') sqlPWD = parser.get('postgresql', 'password') conn = psycopg2.connect(database=sqlDBName, user=sqlUserName, password=sqlPWD) curs = conn.cursor() # output folder for shape files outpath = parser.get('data','folderPath') # Create schema for this Urban Liveability Index version; createSchema = ''' DROP SCHEMA IF EXISTS {0} CASCADE; CREATE SCHEMA {0}; '''.format(uli_schema) curs.execute(createSchema) conn.commit() # Define function to shape if variable is outlying createFunction = ''' -- outlier limiting/compressing function -- if x < -2SD(x), scale up (hard knee upwards compression) to reach minimum by -3SD. -- if x > 2SD(x), scale up (hard knee downwards compression) to reach maximum by 3SD(x). CREATE OR REPLACE FUNCTION clean(var double precision,min_val double precision, max_val double precision, mean double precision, sd double precision) RETURNS double precision AS $$ DECLARE ll double precision := mean - 2*sd; ul double precision := mean + 2*sd; c double precision := 1*sd; BEGIN IF (min_val < ll-c) AND (var < ll) THEN RETURN ll - c + c*(var - min_val)/(ll-min_val); ELSIF (max_val > ul+c) AND (var > ul) THEN RETURN ul + c*(var - ul)/( max_val - ul ); ELSE RETURN var; END IF; END; $$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; ''' curs.execute(createFunction) conn.commit() print("Created custom function.") # create destination group based indicators specific to this liveability schema for i in ['hard','soft']: createTable = ''' DROP TABLE IF EXISTS {3}.ind_groups_{1} ; CREATE TABLE {3}.ind_groups_{1} AS SELECT {0}, (COALESCE(communitycentre_1000m , 0) + COALESCE(museumartgallery_3200m , 0) + COALESCE(cinematheatre_3200m , 0) + COALESCE(libraries_2014_1000m , 0)) / 4.0 AS community_culture_leisure, (COALESCE(childcareoutofschool_1600m , 0) + COALESCE(childcare_800m , 0)) / 2.0 AS early_years, (COALESCE(statesecondaryschools_1600m , 0) + COALESCE(stateprimaryschools_1600m , 0)) / 2.0 AS education, (COALESCE(agedcare_2012_1000m , 0) + COALESCE(communityhealthcentres_1000m, 0) + COALESCE(dentists_1000m , 0) + COALESCE(gp_clinics_1000m , 0) + COALESCE(maternalchildhealth_1000m , 0) + COALESCE(pharmacy_1000m , 0)) / 6.0 AS health_services, (COALESCE(swimmingpools_1200m , 0) + COALESCE(sport_1200m , 0)) / 2.0 AS sport_rec, (COALESCE(supermarkets_1000m , 0) + COALESCE(fishmeatpoultryshops_1600m , 0) + COALESCE(fruitvegeshops_1600m , 0)) / 3.0 AS food, (COALESCE(conveniencestores_1000m , 0) + COALESCE(petrolstations_1000m , 0) + COALESCE(newsagents_1000m , 0)) / 3.0 AS convenience FROM ind_dest_{1} {2}; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created grouped indicator table '{1}.ind_groups_{0}'.".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.ind_summary_means_li_{1} ; CREATE TABLE {3}.ind_summary_means_li_{1} AS SELECT (SELECT AVG(dd_nh1600m ) FROM dwelling_density {2}) AS dd_nh1600m , (SELECT AVG(sc_nh1600m ) FROM street_connectivity {2}) AS sc_nh1600m , (SELECT AVG(pos_greq15000m2_in_400m_{1} ) FROM ind_pos {2}) AS pos15000_access , (SELECT AVG(sa1_prop_affordablehous_30_40) FROM ind_abs {2}) AS sa1_prop_affordablehous_30_40, (SELECT AVG(sa2_prop_live_work_sa3 ) FROM ind_abs {2}) AS sa2_prop_live_work_sa3 , (SELECT AVG(community_culture_leisure ) FROM {3}.ind_groups_{1} {2}) AS community_culture_leisure , (SELECT AVG(early_years ) FROM {3}.ind_groups_{1} {2}) AS early_years , (SELECT AVG(education ) FROM {3}.ind_groups_{1} {2}) AS education , (SELECT AVG(health_services ) FROM {3}.ind_groups_{1} {2}) AS health_services , (SELECT AVG(sport_rec ) FROM {3}.ind_groups_{1} {2}) AS sport_rec , (SELECT AVG(food ) FROM {3}.ind_groups_{1} {2}) AS food , (SELECT AVG(convenience ) FROM {3}.ind_groups_{1} {2}) AS convenience , (SELECT AVG(busstop2012_400m ) FROM ind_dest_{1} {2}) AS busstop2012_400m , (SELECT AVG(tramstops2012_600m ) FROM ind_dest_{1} {2}) AS tramstops2012_600m , (SELECT AVG(trainstations2012_800m ) FROM ind_dest_{1} {2}) AS trainstations2012_800m ; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.ind_summary_means_li_{0}', a summary of liveability indicator means.".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.ind_summary_sd_li_{1} ; CREATE TABLE {3}.ind_summary_sd_li_{1} AS SELECT (SELECT stddev_pop(dd_nh1600m ) FROM dwelling_density {2}) AS dd_nh1600m , (SELECT stddev_pop(sc_nh1600m ) FROM street_connectivity {2}) AS sc_nh1600m , (SELECT stddev_pop(pos_greq15000m2_in_400m_{1} ) FROM ind_pos {2}) AS pos15000_access , (SELECT stddev_pop(sa1_prop_affordablehous_30_40) FROM ind_abs {2}) AS sa1_prop_affordablehous_30_40, (SELECT stddev_pop(sa2_prop_live_work_sa3 ) FROM ind_abs {2}) AS sa2_prop_live_work_sa3 , (SELECT stddev_pop(community_culture_leisure ) FROM {3}.ind_groups_{1} {2}) AS community_culture_leisure , (SELECT stddev_pop(early_years ) FROM {3}.ind_groups_{1} {2}) AS early_years , (SELECT stddev_pop(education ) FROM {3}.ind_groups_{1} {2}) AS education , (SELECT stddev_pop(health_services ) FROM {3}.ind_groups_{1} {2}) AS health_services , (SELECT stddev_pop(sport_rec ) FROM {3}.ind_groups_{1} {2}) AS sport_rec , (SELECT stddev_pop(food ) FROM {3}.ind_groups_{1} {2}) AS food , (SELECT stddev_pop(convenience ) FROM {3}.ind_groups_{1} {2}) AS convenience , (SELECT stddev_pop(busstop2012_400m ) FROM ind_dest_{1} {2}) AS busstop2012_400m , (SELECT stddev_pop(tramstops2012_600m ) FROM ind_dest_{1} {2}) AS tramstops2012_600m , (SELECT stddev_pop(trainstations2012_800m ) FROM ind_dest_{1} {2}) AS trainstations2012_800m ; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.ind_summary_sd_li_{0}', a summary of liveability indicator standard deviations.".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.ind_summary_min_li_{1} ; CREATE TABLE {3}.ind_summary_min_li_{1} AS SELECT (SELECT min(dd_nh1600m ) FROM dwelling_density {2}) AS dd_nh1600m , (SELECT min(sc_nh1600m ) FROM street_connectivity {2}) AS sc_nh1600m , (SELECT min(pos_greq15000m2_in_400m_{1} ) FROM ind_pos {2}) AS pos15000_access , (SELECT min(sa1_prop_affordablehous_30_40) FROM ind_abs {2}) AS sa1_prop_affordablehous_30_40, (SELECT min(sa2_prop_live_work_sa3 ) FROM ind_abs {2}) AS sa2_prop_live_work_sa3 , (SELECT min(community_culture_leisure ) FROM {3}.ind_groups_{1} {2}) AS community_culture_leisure , (SELECT min(early_years ) FROM {3}.ind_groups_{1} {2}) AS early_years , (SELECT min(education ) FROM {3}.ind_groups_{1} {2}) AS education , (SELECT min(health_services ) FROM {3}.ind_groups_{1} {2}) AS health_services , (SELECT min(sport_rec ) FROM {3}.ind_groups_{1} {2}) AS sport_rec , (SELECT min(food ) FROM {3}.ind_groups_{1} {2}) AS food , (SELECT min(convenience ) FROM {3}.ind_groups_{1} {2}) AS convenience , (SELECT min(busstop2012_400m ) FROM ind_dest_{1} {2}) AS busstop2012_400m , (SELECT min(tramstops2012_600m ) FROM ind_dest_{1} {2}) AS tramstops2012_600m , (SELECT min(trainstations2012_800m ) FROM ind_dest_{1} {2}) AS trainstations2012_800m ; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.ind_summary_min_li_{0}'".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.ind_summary_max_li_{1} ; CREATE TABLE {3}.ind_summary_max_li_{1} AS SELECT (SELECT max(dd_nh1600m ) FROM dwelling_density {2}) AS dd_nh1600m , (SELECT max(sc_nh1600m ) FROM street_connectivity {2}) AS sc_nh1600m , (SELECT max(pos_greq15000m2_in_400m_{1} ) FROM ind_pos {2}) AS pos15000_access , (SELECT max(sa1_prop_affordablehous_30_40) FROM ind_abs {2}) AS sa1_prop_affordablehous_30_40, (SELECT max(sa2_prop_live_work_sa3 ) FROM ind_abs {2}) AS sa2_prop_live_work_sa3 , (SELECT max(community_culture_leisure ) FROM {3}.ind_groups_{1} {2}) AS community_culture_leisure , (SELECT max(early_years ) FROM {3}.ind_groups_{1} {2}) AS early_years , (SELECT max(education ) FROM {3}.ind_groups_{1} {2}) AS education , (SELECT max(health_services ) FROM {3}.ind_groups_{1} {2}) AS health_services , (SELECT max(sport_rec ) FROM {3}.ind_groups_{1} {2}) AS sport_rec , (SELECT max(food ) FROM {3}.ind_groups_{1} {2}) AS food , (SELECT max(convenience ) FROM {3}.ind_groups_{1} {2}) AS convenience , (SELECT max(busstop2012_400m ) FROM ind_dest_{1} {2}) AS busstop2012_400m , (SELECT max(tramstops2012_600m ) FROM ind_dest_{1} {2}) AS tramstops2012_600m , (SELECT max(trainstations2012_800m ) FROM ind_dest_{1} {2}) AS trainstations2012_800m ; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.ind_summary_max_li_{0}'".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.clean_raw_ind_li_{1} ; CREATE TABLE {3}.clean_raw_ind_li_{1} AS SELECT parcelmb.{0}, abs_linkage.mb_code11, abs_linkage.sa1_7dig11, abs_linkage.sa2_name11, abs_linkage.sa3_name11, abs_linkage.ste_name11, non_abs_linkage.ssc_name, non_abs_linkage.lga_name11, clean(t3.dd_nh1600m ,_min.dd_nh1600m , _max.dd_nh1600m , _mean.dd_nh1600m ,_sd.dd_nh1600m ) AS dd_nh1600m , clean(t4.sc_nh1600m ,_min.sc_nh1600m , _max.sc_nh1600m , _mean.sc_nh1600m ,_sd.sc_nh1600m ) AS sc_nh1600m , clean(t7.pos_greq15000m2_in_400m_{1} ,_min.pos15000_access , _max.pos15000_access , _mean.pos15000_access ,_sd.pos15000_access ) AS pos15000_access , clean(t0.sa1_prop_affordablehous_30_40,_min.sa1_prop_affordablehous_30_40, _max.sa1_prop_affordablehous_30_40, _mean.sa1_prop_affordablehous_30_40,_sd.sa1_prop_affordablehous_30_40) AS sa1_prop_affordablehous_30_40 , clean(t0.sa2_prop_live_work_sa3 ,_min.sa2_prop_live_work_sa3 , _max.sa2_prop_live_work_sa3 , _mean.sa2_prop_live_work_sa3 ,_sd.sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , clean(t8.community_culture_leisure ,_min.community_culture_leisure , _max.community_culture_leisure , _mean.community_culture_leisure ,_sd.community_culture_leisure ) AS community_culture_leisure , clean(t8.early_years ,_min.early_years , _max.early_years , _mean.early_years ,_sd.early_years ) AS early_years , clean(t8.education ,_min.education , _max.education , _mean.education ,_sd.education ) AS education , clean(t8.health_services ,_min.health_services , _max.health_services , _mean.health_services ,_sd.health_services ) AS health_services , clean(t8.sport_rec ,_min.sport_rec , _max.sport_rec , _mean.sport_rec ,_sd.sport_rec ) AS sport_rec , clean(t8.food ,_min.food , _max.food , _mean.food ,_sd.food ) AS food , clean(t8.convenience ,_min.convenience , _max.convenience , _mean.convenience ,_sd.convenience ) AS convenience , clean(t9.busstop2012_400m ,_min.busstop2012_400m , _max.busstop2012_400m , _mean.busstop2012_400m ,_sd.busstop2012_400m ) AS busstop2012_400m , clean(t9.tramstops2012_600m ,_min.tramstops2012_600m , _max.tramstops2012_600m , _mean.tramstops2012_600m ,_sd.tramstops2012_600m ) AS tramstops2012_600m , clean(t9.trainstations2012_800m ,_min.trainstations2012_800m , _max.trainstations2012_800m , _mean.trainstations2012_800m ,_sd.trainstations2012_800m ) AS trainstations2012_800m FROM parcelmb LEFT JOIN abs_linkage ON parcelmb.mb_code11 = abs_linkage.mb_code11 LEFT JOIN non_abs_linkage ON parcelmb.{0} = non_abs_linkage.{0} LEFT JOIN ind_abs AS t0 ON parcelmb.{0} = t0.{0} LEFT JOIN dwelling_density AS t3 ON parcelmb.{0} = t3.{0} LEFT JOIN street_connectivity AS t4 ON parcelmb.{0} = t4.{0} LEFT JOIN ind_pos AS t7 ON parcelmb.{0} = t7.{0} LEFT JOIN {3}.ind_groups_{1} AS t8 ON parcelmb.{0} = t8.{0} LEFT JOIN ind_dest_{1} AS t9 ON parcelmb.{0} = t9.{0}, {3}.ind_summary_means_li_{1} AS _mean, {3}.ind_summary_sd_li_{1} AS _sd, {3}.ind_summary_min_li_{1} AS _min, {3}.ind_summary_max_li_{1} AS _max {2} ; ALTER TABLE {3}.clean_raw_ind_li_{1} ADD PRIMARY KEY ({0}); '''.format(A_pointsID.lower(),i,parcelmb_exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_raw_ind_li_{0}'".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.clean_ind_summary_means_li_{1} ; CREATE TABLE {3}.clean_ind_summary_means_li_{1} AS SELECT AVG(dd_nh1600m ) AS dd_nh1600m , AVG(sc_nh1600m ) AS sc_nh1600m , AVG(pos15000_access ) AS pos15000_access , AVG(sa1_prop_affordablehous_30_40) AS sa1_prop_affordablehous_30_40 , AVG(sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , AVG(community_culture_leisure ) AS community_culture_leisure , AVG(early_years ) AS early_years , AVG(education ) AS education , AVG(health_services ) AS health_services , AVG(sport_rec ) AS sport_rec , AVG(food ) AS food , AVG(convenience ) AS convenience , AVG(busstop2012_400m ) AS busstop2012_400m , AVG(tramstops2012_600m ) AS tramstops2012_600m , AVG(trainstations2012_800m ) AS trainstations2012_800m FROM {3}.clean_raw_ind_li_{1}; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_ind_summary_means_li_{0}', a summary of liveability indicator means.".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.clean_ind_summary_sd_li_{1} ; CREATE TABLE {3}.clean_ind_summary_sd_li_{1} AS SELECT stddev_pop(dd_nh1600m ) AS dd_nh1600m , stddev_pop(sc_nh1600m ) AS sc_nh1600m , stddev_pop(pos15000_access ) AS pos15000_access , stddev_pop(sa1_prop_affordablehous_30_40) AS sa1_prop_affordablehous_30_40 , stddev_pop(sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , stddev_pop(community_culture_leisure ) AS community_culture_leisure , stddev_pop(early_years ) AS early_years , stddev_pop(education ) AS education , stddev_pop(health_services ) AS health_services , stddev_pop(sport_rec ) AS sport_rec , stddev_pop(food ) AS food , stddev_pop(convenience ) AS convenience , stddev_pop(busstop2012_400m ) AS busstop2012_400m , stddev_pop(tramstops2012_600m ) AS tramstops2012_600m , stddev_pop(trainstations2012_800m ) AS trainstations2012_800m FROM {3}.clean_raw_ind_li_{1}; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_ind_summary_sd_li_{0}', a summary of liveability indicator standard deviations.".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.clean_ind_summary_min_li_{1} ; CREATE TABLE {3}.clean_ind_summary_min_li_{1} AS SELECT min(dd_nh1600m ) AS dd_nh1600m , min(sc_nh1600m ) AS sc_nh1600m , min(pos15000_access ) AS pos15000_access , min(sa1_prop_affordablehous_30_40) AS sa1_prop_affordablehous_30_40 , min(sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , min(community_culture_leisure ) AS community_culture_leisure , min(early_years ) AS early_years , min(education ) AS education , min(health_services ) AS health_services , min(sport_rec ) AS sport_rec , min(food ) AS food , min(convenience ) AS convenience , min(busstop2012_400m ) AS busstop2012_400m , min(tramstops2012_600m ) AS tramstops2012_600m , min(trainstations2012_800m ) AS trainstations2012_800m FROM {3}.clean_raw_ind_li_{1}; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_ind_summary_min_li_{0}'".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {3}.clean_ind_summary_max_li_{1} ; CREATE TABLE {3}.clean_ind_summary_max_li_{1} AS SELECT max(dd_nh1600m ) AS dd_nh1600m , max(sc_nh1600m ) AS sc_nh1600m , max(pos15000_access ) AS pos15000_access , max(sa1_prop_affordablehous_30_40) AS sa1_prop_affordablehous_30_40 , max(sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , max(community_culture_leisure ) AS community_culture_leisure , max(early_years ) AS early_years , max(education ) AS education , max(health_services ) AS health_services , max(sport_rec ) AS sport_rec , max(food ) AS food , max(convenience ) AS convenience , max(busstop2012_400m ) AS busstop2012_400m , max(tramstops2012_600m ) AS tramstops2012_600m , max(trainstations2012_800m ) AS trainstations2012_800m FROM {3}.clean_raw_ind_li_{1}; '''.format(A_pointsID.lower(),i,exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_ind_summary_max_li_{0}'".format(i,uli_schema)) createTable = ''' -- Note that in this normalisation stage, indicator polarity is adjusted for: air pollution has values substracted from 100, whilst positive indicators have them added. -- ALSO note that walkability subindicators are processed here for completeness and comparison purposes -- these are not used in final LI calculation (other than as walkability components) DROP TABLE IF EXISTS {3}.clean_ind_mpi_norm_{1} ; CREATE TABLE {3}.clean_ind_mpi_norm_{1} AS SELECT {0}, mb_code11, sa1_7dig11, sa2_name11, sa3_name11, ste_name11, ssc_name, lga_name11, 100 + 10 * (t.dd_nh1600m - _mean.dd_nh1600m ) / _sd.dd_nh1600m ::double precision AS dd_nh1600m , 100 + 10 * (t.sc_nh1600m - _mean.sc_nh1600m ) / _sd.sc_nh1600m ::double precision AS sc_nh1600m , 100 + 10 * (t.pos15000_access - _mean.pos15000_access ) / _sd.pos15000_access ::double precision AS pos15000_access , 100 + 10 * (t.sa1_prop_affordablehous_30_40- _mean.sa1_prop_affordablehous_30_40 ) / _sd.sa1_prop_affordablehous_30_40::double precision AS sa1_prop_affordablehousing , 100 + 10 * (t.sa2_prop_live_work_sa3 - _mean.sa2_prop_live_work_sa3 ) / _sd.sa2_prop_live_work_sa3 ::double precision AS sa2_prop_live_work_sa3 , 100 + 10 * (t.community_culture_leisure - _mean.community_culture_leisure ) / _sd.community_culture_leisure ::double precision AS community_culture_leisure , 100 + 10 * (t.early_years - _mean.early_years ) / _sd.early_years ::double precision AS early_years , 100 + 10 * (t.education - _mean.education ) / _sd.education ::double precision AS education , 100 + 10 * (t.health_services - _mean.health_services ) / _sd.health_services ::double precision AS health_services , 100 + 10 * (t.sport_rec - _mean.sport_rec ) / _sd.sport_rec ::double precision AS sport_rec , 100 + 10 * (t.food - _mean.food ) / _sd.food ::double precision AS food , 100 + 10 * (t.convenience - _mean.convenience ) / _sd.convenience ::double precision AS convenience , 100 + 10 * (t.busstop2012_400m - _mean.busstop2012_400m ) / _sd.busstop2012_400m ::double precision AS busstop2012_400m , 100 + 10 * (t.tramstops2012_600m - _mean.tramstops2012_600m ) / _sd.tramstops2012_600m ::double precision AS tramstops2012_600m , 100 + 10 * (t.trainstations2012_800m - _mean.trainstations2012_800m ) / _sd.trainstations2012_800m ::double precision AS trainstations2012_800m FROM {3}.clean_raw_ind_li_{1} AS t, {3}.clean_ind_summary_means_li_{1} AS _mean, {3}.clean_ind_summary_sd_li_{1} AS _sd; ALTER TABLE {3}.clean_ind_mpi_norm_{1} ADD PRIMARY KEY ({0}); '''.format(A_pointsID.lower(),i,parcelmb_exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_ind_mpi_norm_{0}', a table of MPI-normalised indicators.".format(i,uli_schema)) createTable = ''' -- 2. Create MPI estimates at parcel level -- rowmean*(1-(rowsd(z_j)/rowmean(z_j))^2) AS mpi_est_j -- took 1 minute for 2million vars DROP TABLE IF EXISTS {2}.clean_li_ci_{1}_est ; CREATE TABLE {2}.clean_li_ci_{1}_est AS SELECT {0}, AVG(val) AS mean, stddev_pop(val) AS sd, stddev_pop(val)/AVG(val) AS cv, AVG(val)-(stddev_pop(val)^2)/AVG(val) AS li_ci_est FROM (SELECT {0}, unnest(array[dd_nh1600m,sc_nh1600m,pos15000_access,sa1_prop_affordablehousing,sa2_prop_live_work_sa3,community_culture_leisure,early_years,education,health_services,sport_rec,food,convenience,busstop2012_400m,tramstops2012_600m,trainstations2012_800m]) as val FROM {2}.clean_ind_mpi_norm_{1} ) alias GROUP BY {0}; '''.format(A_pointsID.lower(),i,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_li_ci_{0}_est', a parcel level composite indicator estimate for liveability.".format(i,uli_schema)) createTable = ''' DROP TABLE IF EXISTS {2}.clean_li_parcel_ci_{1} ; CREATE TABLE {2}.clean_li_parcel_ci_{1} AS SELECT {2}.clean_ind_mpi_norm_{1}.{0}, mb_code11, sa1_7dig11, sa2_name11, sa3_name11, ssc_name, lga_name11, ste_name11, li_ci_est, dd_nh1600m , sc_nh1600m , pos15000_access, sa1_prop_affordablehousing, sa2_prop_live_work_sa3, community_culture_leisure , early_years , education , health_services , sport_rec , food , convenience , busstop2012_400m , tramstops2012_600m , trainstations2012_800m FROM {2}.clean_ind_mpi_norm_{1} LEFT JOIN {2}.clean_li_ci_{1}_est ON {2}.clean_li_ci_{1}_est.{0} = {2}.clean_ind_mpi_norm_{1}.{0}; ALTER TABLE {2}.clean_li_parcel_ci_{1} ADD PRIMARY KEY ({0}); -- export for analysis, although this may be better achieved through sql queries by SA1 -- same effect but lighter on memory -- COPY {2}.clean_li_parcel_ci_{1} TO 'C:/data/liveability/data/{2}_li_parcel_ci_{1}.csv' DELIMITER ',' CSV HEADER; '''.format(A_pointsID.lower(),i,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.clean_li_parcel_ci_{0}', a summary table combining the parcel level composite indicator estimate for for liveability with associated regions, and the standardised indicators of which the pCI is comprised of.".format(i,uli_schema)) # create raw indicator table createTable = ''' DROP TABLE IF EXISTS {3}.raw_indicators_{1} ; CREATE TABLE {3}.raw_indicators_{1} AS SELECT parcelmb.{0}, abs_linkage.mb_code11, abs_linkage.sa1_7dig11, abs_linkage.sa2_name11, abs_linkage.sa3_name11, abs_linkage.ste_name11, non_abs_linkage.ssc_name, non_abs_linkage.lga_name11, li_ci_est, dd_nh1600m, sc_nh1600m, pos_greq15000m2_in_400m_{1} AS pos15000_access, sa1_prop_affordablehous_30_40 AS sa1_prop_affordablehousing, sa2_prop_live_work_sa3, community_culture_leisure , early_years , education , health_services , sport_rec , food , convenience , busstop2012_400m , tramstops2012_600m , trainstations2012_800m FROM parcelmb LEFT JOIN abs_linkage ON parcelmb.mb_code11 = abs_linkage.mb_code11 LEFT JOIN non_abs_linkage ON parcelmb.{0} = non_abs_linkage.{0} LEFT JOIN {3}.clean_li_ci_{1}_est ON {3}.clean_li_ci_{1}_est.{0} = parcelmb.{0} LEFT JOIN ind_abs ON parcelmb.{0} = ind_abs.{0} LEFT JOIN dwelling_density ON parcelmb.{0} = dwelling_density.{0} LEFT JOIN street_connectivity ON parcelmb.{0} = street_connectivity.{0} LEFT JOIN ind_pos ON parcelmb.{0} = ind_pos.{0} LEFT JOIN {3}.ind_groups_{1} ON parcelmb.{0} = {3}.ind_groups_{1}.{0} LEFT JOIN ind_dest_{1} ON parcelmb.{0} = ind_dest_{1}.{0} {2}; ALTER TABLE {3}.raw_indicators_{1} ADD PRIMARY KEY ({0}); '''.format(A_pointsID.lower(),i,parcelmb_exclusion_criteria,uli_schema) curs.execute(createTable) conn.commit() print("Created table '{1}.raw_indicators_{0}', with parcel level id, linkage codes, pLI estimates, and raw indicators".format(i,uli_schema)) for type in ['hard','soft']: createTable = ''' DROP TABLE IF EXISTS {1}.clean_li_percentile_{0}; CREATE TABLE {1}.clean_li_percentile_{0} AS SELECT t1.detail_pid, round(100*cume_dist() OVER(ORDER BY li_ci_est)::numeric,0) as li_ci_est, geom FROM {1}.clean_li_parcel_ci_{0} AS t1 LEFT JOIN parcel_xy AS t2 on t1.detail_pid = t2.detail_pid '''.format(type,uli_schema) curs.execute(createTable) conn.commit() print("Created {0} address-level percentiles for schema {1}".format(type,uli_schema)) # create sa1 area linkage corresponding to later SA1 aggregate tables createTable = ''' DROP TABLE IF EXISTS {0}.sa1_area; CREATE TABLE {0}.sa1_area AS SELECT sa1_7dig11, string_agg(distinct(ssc_name),',') AS suburb, string_agg(distinct(lga_name11), ', ') AS lga FROM {0}.raw_indicators_hard WHERE sa1_7dig11 IN (SELECT sa1_7dig11 FROM abs_2011_irsd) GROUP BY sa1_7dig11 ORDER BY sa1_7dig11 ASC; '''.format(uli_schema) curs.execute(createTable) conn.commit() # create sa2 area linkage corresponding to later SA1 aggregate tables createTable = ''' DROP TABLE IF EXISTS {0}.sa2_area; CREATE TABLE {0}.sa2_area AS SELECT sa2_name11, string_agg(distinct(ssc_name),',') AS suburb, string_agg(distinct(lga_name11), ', ') AS lga FROM {0}.raw_indicators_hard WHERE sa2_name11 IN (SELECT sa2_name11 FROM abs_2011_irsd) GROUP BY sa2_name11 ORDER BY sa2_name11 ASC; '''.format(uli_schema) curs.execute(createTable) conn.commit() # create Suburb area linkage corresponding to later SA1 aggregate tables createTable = ''' DROP TABLE IF EXISTS {0}.ssc_area; CREATE TABLE {0}.ssc_area AS SELECT DISTINCT(ssc_name) AS suburb, string_agg(distinct(lga_name11), ', ') AS lga FROM {0}.raw_indicators_hard GROUP BY ssc_name ORDER BY ssc_name ASC; '''.format(uli_schema) curs.execute(createTable) conn.commit() # create aggregated raw liveability estimates for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.li_raw_{1}_{0} ; CREATE TABLE {2}.li_raw_{1}_{0} AS SELECT {0}, AVG(li_ci_est ) AS li_ci_est , AVG(dd_nh1600m ) AS dd_nh1600m , AVG(sc_nh1600m ) AS sc_nh1600m , AVG(pos15000_access ) AS pos15000_access , AVG(sa1_prop_affordablehousing ) AS sa1_prop_affordablehousing , AVG(sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , AVG(community_culture_leisure ) AS community_culture_leisure , AVG(early_years ) AS early_years , AVG(education ) AS education , AVG(health_services ) AS health_services , AVG(sport_rec ) AS sport_rec , AVG(food ) AS food , AVG(convenience ) AS convenience , AVG(busstop2012_400m ) AS busstop2012_400m , AVG(tramstops2012_600m ) AS tramstops2012_600m , AVG(trainstations2012_800m ) AS trainstations2012_800m FROM {2}.raw_indicators_{1} GROUP BY {0} ORDER BY {0} ASC; ALTER TABLE {2}.li_raw_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created raw {1} averages at {0} level for schema {2}".format(area,type,uli_schema)) # create aggregated SD for raw liveability estimates for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.li_raw_sd_{1}_{0} ; CREATE TABLE {2}.li_raw_sd_{1}_{0} AS SELECT {0}, stddev_pop(li_ci_est ) AS sd_li_ci_est , stddev_pop(dd_nh1600m ) AS sd_dd_nh1600m , stddev_pop(sc_nh1600m ) AS sd_sc_nh1600m , stddev_pop(pos15000_access ) AS sd_pos15000_access , stddev_pop(sa1_prop_affordablehousing ) AS sd_sa1_prop_affordablehousing , stddev_pop(sa2_prop_live_work_sa3 ) AS sd_sa2_prop_live_work_sa3 , stddev_pop(community_culture_leisure ) AS sd_community_culture_leisure , stddev_pop(early_years ) AS sd_early_years , stddev_pop(education ) AS sd_education , stddev_pop(health_services ) AS sd_health_services , stddev_pop(sport_rec ) AS sd_sport_rec , stddev_pop(food ) AS sd_food , stddev_pop(convenience ) AS sd_convenience , stddev_pop(busstop2012_400m ) AS sd_busstop2012_400m , stddev_pop(tramstops2012_600m ) AS sd_tramstops2012_600m , stddev_pop(trainstations2012_800m ) AS sd_trainstations2012_800m FROM {2}.raw_indicators_{1} GROUP BY {0} ORDER BY {0} ASC; ALTER TABLE {2}.li_raw_sd_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created SD for raw {1} averages at {0} level for schema {2}".format(area,type,uli_schema)) # create aggregated raw liveability range for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.li_range_{1}_{0} ; CREATE TABLE {2}.li_range_{1}_{0} AS SELECT {0}, round(min(li_centile )::numeric,1)::text || ' - ' ||round(max(li_centile )::numeric,1)::text AS li_centile , round(min(dd_nh1600m )::numeric,1)::text || ' - ' ||round(max(dd_nh1600m )::numeric,1)::text AS dd_nh1600m , round(min(sc_nh1600m )::numeric,1)::text || ' - ' ||round(max(sc_nh1600m )::numeric,1)::text AS sc_nh1600m , round(min(100*pos15000_access )::numeric,1)::text || ' - ' ||round(max(100*pos15000_access )::numeric,1)::text AS pos15000_access , round(min(100*sa1_prop_affordablehousing )::numeric,1)::text || ' - ' ||round(max(100*sa1_prop_affordablehousing )::numeric,1)::text AS sa1_prop_affordablehousing , round(min(100*sa2_prop_live_work_sa3 )::numeric,1)::text || ' - ' ||round(max(100*sa2_prop_live_work_sa3 )::numeric,1)::text AS sa2_prop_live_work_sa3 , round(min(100*community_culture_leisure )::numeric,1)::text || ' - ' ||round(max(100*community_culture_leisure )::numeric,1)::text AS community_culture_leisure , round(min(100*early_years )::numeric,1)::text || ' - ' ||round(max(100*early_years )::numeric,1)::text AS early_years , round(min(100*education )::numeric,1)::text || ' - ' ||round(max(100*education )::numeric,1)::text AS education , round(min(100*health_services )::numeric,1)::text || ' - ' ||round(max(100*health_services )::numeric,1)::text AS health_services , round(min(100*sport_rec )::numeric,1)::text || ' - ' ||round(max(100*sport_rec )::numeric,1)::text AS sport_rec , round(min(100*food )::numeric,1)::text || ' - ' ||round(max(100*food )::numeric,1)::text AS food , round(min(100*convenience )::numeric,1)::text || ' - ' ||round(max(100*convenience )::numeric,1)::text AS convenience , round(min(100*busstop2012_400m )::numeric,1)::text || ' - ' ||round(max(100*busstop2012_400m )::numeric,1)::text AS busstop2012_400m , round(min(100*tramstops2012_600m )::numeric,1)::text || ' - ' ||round(max(100*tramstops2012_600m )::numeric,1)::text AS tramstops2012_600m , round(min(100*trainstations2012_800m )::numeric,1)::text || ' - ' ||round(max(100*trainstations2012_800m )::numeric,1)::text AS trainstations2012_800m FROM {2}.raw_indicators_{1} AS t1 LEFT JOIN (SELECT detail_pid, 100*cume_dist() OVER(ORDER BY li_ci_est)::numeric AS li_centile FROM {2}.clean_li_parcel_ci_{1}) AS t2 ON t1.detail_pid = t2.detail_pid GROUP BY {0} ORDER BY {0} ASC; ALTER TABLE {2}.li_range_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created raw {1} range at {0} level for schema {2}".format(area,type,uli_schema)) # create aggregated raw liveability most for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.li_most_{1}_{0} ; CREATE TABLE {2}.li_most_{1}_{0} AS SELECT {0}, round(percentile_cont(0.1) WITHIN GROUP (ORDER BY li_centile )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY li_centile )::numeric,1)::text AS li_centile , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY dd_nh1600m )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY dd_nh1600m )::numeric,1)::text AS dd_nh1600m , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY sc_nh1600m )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY sc_nh1600m )::numeric,1)::text AS sc_nh1600m , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*pos15000_access )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*pos15000_access )::numeric,1)::text AS pos15000_access , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*sa1_prop_affordablehousing )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*sa1_prop_affordablehousing )::numeric,1)::text AS sa1_prop_affordablehousing, round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*sa2_prop_live_work_sa3 )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*sa2_prop_live_work_sa3 )::numeric,1)::text AS sa2_prop_live_work_sa3, round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*community_culture_leisure )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*community_culture_leisure )::numeric,1)::text AS community_culture_leisure , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*early_years )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*early_years )::numeric,1)::text AS early_years , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*education )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*education )::numeric,1)::text AS education , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*health_services )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*health_services )::numeric,1)::text AS health_services , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*sport_rec )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*sport_rec )::numeric,1)::text AS sport_rec , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*food )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*food )::numeric,1)::text AS food , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*convenience )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*convenience )::numeric,1)::text AS convenience , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*busstop2012_400m )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*busstop2012_400m )::numeric,1)::text AS busstop2012_400m , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*tramstops2012_600m )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*tramstops2012_600m )::numeric,1)::text AS tramstops2012_600m , round(percentile_cont(0.1) WITHIN GROUP (ORDER BY 100*trainstations2012_800m )::numeric,1)::text || ' - ' ||round(percentile_cont(0.9) WITHIN GROUP (ORDER BY 100*trainstations2012_800m )::numeric,1)::text AS trainstations2012_800m FROM {2}.raw_indicators_{1} AS t1 LEFT JOIN (SELECT detail_pid, 100*cume_dist() OVER(ORDER BY li_ci_est)::numeric AS li_centile FROM {2}.clean_li_parcel_ci_{1}) AS t2 ON t1.detail_pid = t2.detail_pid GROUP BY {0} ORDER BY {0} ASC; ALTER TABLE {2}.li_most_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created raw {1} most at {0} level for schema {2}".format(area,type,uli_schema)) # create aggregated normalised liveability estimates for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.clean_li_mpi_norm_{1}_{0} ; CREATE TABLE {2}.clean_li_mpi_norm_{1}_{0} AS SELECT {0}, AVG(li_ci_est ) AS li_ci_est , AVG(dd_nh1600m ) AS dd_nh1600m , AVG(sc_nh1600m ) AS sc_nh1600m , AVG(pos15000_access ) AS pos15000_access , AVG(sa1_prop_affordablehousing ) AS sa1_prop_affordablehousing , AVG(sa2_prop_live_work_sa3 ) AS sa2_prop_live_work_sa3 , AVG(community_culture_leisure ) AS community_culture_leisure , AVG(early_years ) AS early_years , AVG(education ) AS education , AVG(health_services ) AS health_services , AVG(sport_rec ) AS sport_rec , AVG(food ) AS food , AVG(convenience ) AS convenience , AVG(busstop2012_400m ) AS busstop2012_400m , AVG(tramstops2012_600m ) AS tramstops2012_600m , AVG(trainstations2012_800m ) AS trainstations2012_800m FROM {2}.clean_li_parcel_ci_{1} GROUP BY {0} ORDER BY {0} ASC; ALTER TABLE {2}.clean_li_mpi_norm_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created normalised {1} averages at {0} level for schema {2}".format(area,type,uli_schema)) # create aggregated SD for normalised liveability estimates for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.clean_li_mpi_sd_{1}_{0} ; CREATE TABLE {2}.clean_li_mpi_sd_{1}_{0} AS SELECT {0}, stddev_pop(li_ci_est ) AS sd_li_ci_est , stddev_pop(dd_nh1600m ) AS sd_dd_nh1600m , stddev_pop(sc_nh1600m ) AS sd_sc_nh1600m , stddev_pop(pos15000_access ) AS sd_pos15000_access , stddev_pop(sa1_prop_affordablehousing ) AS sd_sa1_prop_affordablehousing , stddev_pop(sa2_prop_live_work_sa3 ) AS sd_sa2_prop_live_work_sa3 , stddev_pop(community_culture_leisure ) AS sd_community_culture_leisure , stddev_pop(early_years ) AS sd_early_years , stddev_pop(education ) AS sd_education , stddev_pop(health_services ) AS sd_health_services , stddev_pop(sport_rec ) AS sd_sport_rec , stddev_pop(food ) AS sd_food , stddev_pop(convenience ) AS sd_convenience , stddev_pop(busstop2012_400m ) AS sd_busstop2012_400m , stddev_pop(tramstops2012_600m ) AS sd_tramstops2012_600m , stddev_pop(trainstations2012_800m ) AS sd_trainstations2012_800m FROM {2}.clean_li_parcel_ci_{1} GROUP BY {0} ORDER BY {0} ASC; ALTER TABLE {2}.clean_li_mpi_sd_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created SD for normalised {1} averages at {0} level for schema {2}".format(area,type,uli_schema)) # create deciles of liveability estimates for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.clean_li_deciles_{1}_{0} ; CREATE TABLE {2}.clean_li_deciles_{1}_{0} AS SELECT {0}, round(10*cume_dist() OVER(ORDER BY li_ci_est )::numeric,0) as li_ci_est , round(10*cume_dist() OVER(ORDER BY dd_nh1600m )::numeric,0) as dd_nh1600m , round(10*cume_dist() OVER(ORDER BY sc_nh1600m )::numeric,0) as sc_nh1600m , round(10*cume_dist() OVER(ORDER BY pos15000_access )::numeric,0) as pos15000_access , round(10*cume_dist() OVER(ORDER BY sa1_prop_affordablehousing )::numeric,0) as sa1_prop_affordablehousing , round(10*cume_dist() OVER(ORDER BY sa2_prop_live_work_sa3 )::numeric,0) as sa2_prop_live_work_sa3 , round(10*cume_dist() OVER(ORDER BY community_culture_leisure )::numeric,0) as community_culture_leisure , round(10*cume_dist() OVER(ORDER BY early_years )::numeric,0) as early_years , round(10*cume_dist() OVER(ORDER BY education )::numeric,0) as education , round(10*cume_dist() OVER(ORDER BY health_services )::numeric,0) as health_services , round(10*cume_dist() OVER(ORDER BY sport_rec )::numeric,0) as sport_rec , round(10*cume_dist() OVER(ORDER BY food )::numeric,0) as food , round(10*cume_dist() OVER(ORDER BY convenience )::numeric,0) as convenience , round(10*cume_dist() OVER(ORDER BY busstop2012_400m )::numeric,0) as busstop2012_400m , round(10*cume_dist() OVER(ORDER BY tramstops2012_600m )::numeric,0) as tramstops2012_600m , round(10*cume_dist() OVER(ORDER BY trainstations2012_800m )::numeric,0) as trainstations2012_800m FROM {2}.clean_li_mpi_norm_{1}_{0} ORDER BY {0} ASC; ALTER TABLE {2}.clean_li_deciles_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created {1} deciles at {0} level for schema {2}".format(area,type,uli_schema)) # create percentiles of liveability estimates for selected area for type in ['hard','soft']: for area in ['mb_code11','sa1_7dig11','sa2_name11','ssc_name','lga_name11']: createTable = ''' DROP TABLE IF EXISTS {2}.clean_li_percentiles_{1}_{0} ; CREATE TABLE {2}.clean_li_percentiles_{1}_{0} AS SELECT {0}, round(100*cume_dist() OVER(ORDER BY li_ci_est )::numeric,0) as li_ci_est , round(100*cume_dist() OVER(ORDER BY dd_nh1600m )::numeric,0) as dd_nh1600m , round(100*cume_dist() OVER(ORDER BY sc_nh1600m )::numeric,0) as sc_nh1600m , round(100*cume_dist() OVER(ORDER BY pos15000_access )::numeric,0) as pos15000_access , round(100*cume_dist() OVER(ORDER BY sa1_prop_affordablehousing )::numeric,0) as sa1_prop_affordablehousing , round(100*cume_dist() OVER(ORDER BY sa2_prop_live_work_sa3 )::numeric,0) as sa2_prop_live_work_sa3 , round(100*cume_dist() OVER(ORDER BY community_culture_leisure )::numeric,0) as community_culture_leisure , round(100*cume_dist() OVER(ORDER BY early_years )::numeric,0) as early_years , round(100*cume_dist() OVER(ORDER BY education )::numeric,0) as education , round(100*cume_dist() OVER(ORDER BY health_services )::numeric,0) as health_services , round(100*cume_dist() OVER(ORDER BY sport_rec )::numeric,0) as sport_rec , round(100*cume_dist() OVER(ORDER BY food )::numeric,0) as food , round(100*cume_dist() OVER(ORDER BY convenience )::numeric,0) as convenience , round(100*cume_dist() OVER(ORDER BY busstop2012_400m )::numeric,0) as busstop2012_400m , round(100*cume_dist() OVER(ORDER BY tramstops2012_600m )::numeric,0) as tramstops2012_600m , round(100*cume_dist() OVER(ORDER BY trainstations2012_800m )::numeric,0) as trainstations2012_800m FROM {2}.clean_li_mpi_norm_{1}_{0} ORDER BY {0} ASC; ALTER TABLE {2}.clean_li_percentiles_{1}_{0} ADD PRIMARY KEY ({0}); '''.format(area,type,uli_schema) curs.execute(createTable) conn.commit() print("Created {1} percentiles at {0} level for schema {2}".format(area,type,uli_schema)) # output to completion log script_running_log(script, task, start)
68.022485
275
0.578089
6,888
57,479
4.533537
0.070267
0.017485
0.024594
0.022577
0.792263
0.745573
0.71166
0.671822
0.618855
0.602459
0
0.085662
0.328363
57,479
844
276
68.103081
0.723222
0.044399
0
0.435897
0
0.102564
0.902228
0.233624
0
0
0
0
0
1
0
false
0.002699
0.009447
0
0.009447
0.032389
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f00eb6f102f06e123a6b385a6a055e2287e68040
191
py
Python
python/anyascii/_data/_313.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
python/anyascii/_data/_313.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
python/anyascii/_data/_313.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
b='Yang Qu Wa Zhi Mi Meng Bie Ma Bi Chang Hun Nong Ji Zi Zi Yan Ze Zou Chu Chi Yao Xie Quan Yun Zhai Zhan Zou Yi Ji Ou Dian Yi Gong Da Ran Gou Jiao Tong Bie'
191
191
0.60733
40
191
2.9
0.875
0
0
0
0
0
0
0
0
0
0
0
0.376963
191
1
191
191
0.97479
0
0
0
0
1
0.973958
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f057b5976cc76af52625385c1751d6894e8fcf0a
14,016
py
Python
src/apps/surveys19/tests/test_tokenizer_case1.py
COAStatistics/alss
e1b14cb13de7b8455fa6835587cb1ceb16e4595c
[ "MIT" ]
null
null
null
src/apps/surveys19/tests/test_tokenizer_case1.py
COAStatistics/alss
e1b14cb13de7b8455fa6835587cb1ceb16e4595c
[ "MIT" ]
81
2018-09-04T13:16:02.000Z
2022-03-11T23:20:40.000Z
src/apps/surveys19/tests/test_tokenizer_case1.py
COAStatistics/alss
e1b14cb13de7b8455fa6835587cb1ceb16e4595c
[ "MIT" ]
4
2018-08-21T22:17:59.000Z
2019-03-15T01:14:29.000Z
from . import TestCase, setup_fixtures from apps.surveys19.builder.tokenizer import Builder class ModelTestCase(TestCase): @classmethod def setUpTestData(cls): setup_fixtures() def setUp(self): self.string = "6700500100020101####林阿忠0912345678/##########3701屏東縣屏東市屏東路2號#台南#官田#6710+00250000000003001000000000100000001農所實驗#0000000000010010+101稻作(一期)#1004201100000002100021503桶柑#1003001100000000720020+F02泌乳牛#300009000000000500000F01肉牛#3001000000000015000031010000410020101080404+01110302000000101000000002220322000001001000000001+120040020010011010000000000005220040020010011111111111110010+0100900000500412130000000000000004010001005004142100000000000080+01002070050100+220011010000000000135140011111111111110080+香菇#A02130021100000000000060芒果#C06120010001100000000130+10121200011時間太忙#0#+稻受雨害影響#宋中積#宋會喬#0330" self.builder = Builder(self.string) self.builder.build_survey() def test_build_survey(self): self.assertEqual(self.builder.survey.farmer_id, "670050010002") self.assertEqual(self.builder.survey.page, 1) self.assertEqual(self.builder.survey.total_pages, 1) self.assertEqual(self.builder.survey.farmer_name, "林阿忠") self.assertEqual(self.builder.survey.origin_class, 37) self.assertEqual(self.builder.survey.note, "稻受雨害影響") self.assertEqual(self.builder.survey.period, 330) self.assertEqual(self.builder.survey.investigator, "宋中積") self.assertEqual(self.builder.survey.reviewer, "宋會喬") self.assertEqual(self.builder.survey.second, True) self.assertEqual(self.builder.survey.non_second, False) # check survey query string def test_build_phone(self): self.builder.build_phone() self.assertEqual(self.builder.phones[0].phone, "0912345678") # self.assertEqual(self.builder.phones[1].phone, "2991111") def test_build_address(self): self.builder.build_address() self.assertEqual(self.builder.address.match, False) self.assertEqual(self.builder.address.mismatch, True) self.assertEqual(self.builder.address.address, "屏東縣屏東市屏東路2號") def test_build_farm_location(self): self.builder.build_farm_location() self.assertEqual(self.builder.farm_location.city, "台南") self.assertEqual(self.builder.farm_location.town, "官田") self.assertEqual(self.builder.farm_location.code.code, "6710") def test_build_land_area(self): self.builder.build_land_area() self.assertEqual(len(self.builder.land_area), 3) self.assertEqual(self.builder.land_area[0].value, 250) self.assertEqual(self.builder.land_area[0].type.name, "可耕作地") self.assertEqual(self.builder.land_area[0].status.name, "自有") self.assertEqual(self.builder.land_area[1].value, 30) self.assertEqual(self.builder.land_area[1].type.name, "可耕作地") self.assertEqual(self.builder.land_area[1].status.name, "接受委託經營") self.assertEqual(self.builder.land_area[2].value, 1000) self.assertEqual(self.builder.land_area[2].type.name, "人工鋪面") self.assertEqual(self.builder.land_area[2].status.name, "自有") def test_build_business(self): self.builder.build_business() self.assertEqual(len(self.builder.business), 2) self.assertEqual( self.builder.business[0].farm_related_business.name, "無兼營農業相關事業" ) self.assertEqual(self.builder.business[1].farm_related_business.name, "其他") self.assertEqual(self.builder.business[1].extra, "農所實驗") def test_build_management(self): self.builder.build_management() self.assertEqual(len(self.builder.survey.management_types.all()), 1) self.assertEqual(self.builder.survey.management_types.all()[0].name, "豬") def test_build_crop_marketing(self): self.builder.build_crop_marketing() self.assertEqual(len(self.builder.crop_marketing), 2) self.assertEqual(self.builder.crop_marketing[0].product.code, "101") self.assertEqual(self.builder.crop_marketing[0].name, "稻作(一期)") self.assertEqual(self.builder.crop_marketing[0].land_number, 1) self.assertEqual(self.builder.crop_marketing[0].land_area, 420) self.assertEqual(self.builder.crop_marketing[0].plant_times, 1) self.assertEqual(self.builder.crop_marketing[0].unit.code, 1) self.assertEqual(self.builder.crop_marketing[0].year_sales, 21000) self.assertEqual(self.builder.crop_marketing[0].has_facility, 0) self.assertEqual(self.builder.crop_marketing[0].loss.code, 1) self.assertEqual(self.builder.crop_marketing[1].product.code, "503") self.assertEqual(self.builder.crop_marketing[1].name, "桶柑") self.assertEqual(self.builder.crop_marketing[1].land_number, 1) self.assertEqual(self.builder.crop_marketing[1].land_area, 300) self.assertEqual(self.builder.crop_marketing[1].plant_times, 1) self.assertEqual(self.builder.crop_marketing[1].unit.code, 1) self.assertEqual(self.builder.crop_marketing[1].year_sales, 7200) self.assertEqual(self.builder.crop_marketing[1].has_facility, 0) self.assertEqual(self.builder.crop_marketing[1].loss.code, 0) def test_build_livestock_marketing(self): self.builder.build_livestock_marketing() self.assertEqual(len(self.builder.livestock_marketing), 2) self.assertEqual(self.builder.livestock_marketing[0].product.code, "F02") self.assertEqual(self.builder.livestock_marketing[0].name, "泌乳牛") self.assertEqual(self.builder.livestock_marketing[0].unit.code, 3) self.assertEqual(self.builder.livestock_marketing[0].raising_number, 90) self.assertEqual(self.builder.livestock_marketing[0].year_sales, 5000) self.assertEqual(self.builder.livestock_marketing[0].contract.code, 0) self.assertEqual(self.builder.livestock_marketing[0].loss.code, 0) self.assertEqual(self.builder.livestock_marketing[1].product.code, "F01") self.assertEqual(self.builder.livestock_marketing[1].name, "肉牛") self.assertEqual(self.builder.livestock_marketing[1].unit.code, 3) self.assertEqual(self.builder.livestock_marketing[1].raising_number, 1000) self.assertEqual(self.builder.livestock_marketing[1].year_sales, 15000) self.assertEqual(self.builder.livestock_marketing[1].contract.code, 0) self.assertEqual(self.builder.livestock_marketing[1].loss.code, 3) def test_build_annual_income(self): self.builder.build_annual_income() self.assertEqual(len(self.builder.annual_income), 4) self.assertEqual( self.builder.annual_income[0].market_type.name, "農作物及其製品(含生產及加工)" ) self.assertEqual(self.builder.annual_income[0].income_range.name, "500~未滿1000") self.assertEqual( self.builder.annual_income[1].market_type.name, "畜禽產品及其製品(含生產及加工)" ) self.assertEqual(self.builder.annual_income[1].income_range.name, "500~未滿1000") self.assertEqual(self.builder.annual_income[2].market_type.name, "休閒、餐飲及相關事業") self.assertEqual(self.builder.annual_income[2].income_range.name, "75~未滿100") self.assertEqual(self.builder.annual_income[3].market_type.name, "銷售額總計") self.assertEqual(self.builder.annual_income[3].income_range.name, "500~未滿1000") def test_build_population_age(self): self.builder.build_population_age() self.assertEqual(len(self.builder.population_age), 4) self.assertEqual(self.builder.population_age[0].count, 1) self.assertEqual(self.builder.population_age[0].gender.name, "男") self.assertEqual(self.builder.population_age[0].age_scope.name, "未滿15歲") self.assertEqual(self.builder.population_age[1].count, 1) self.assertEqual(self.builder.population_age[1].gender.name, "女") self.assertEqual(self.builder.population_age[1].age_scope.name, "未滿15歲") self.assertEqual(self.builder.population_age[2].count, 4) self.assertEqual(self.builder.population_age[2].gender.name, "男") self.assertEqual(self.builder.population_age[2].age_scope.name, "滿15歲以上") self.assertEqual(self.builder.population_age[3].count, 4) self.assertEqual(self.builder.population_age[3].gender.name, "女") self.assertEqual(self.builder.population_age[3].age_scope.name, "滿15歲以上") def test_build_population(self): self.builder.build_population() self.assertEqual(len(self.builder.population), 2) self.assertEqual(self.builder.population[0].relationship.name, "戶長") self.assertEqual(self.builder.population[0].gender.name, "男") self.assertEqual(self.builder.population[0].birth_year, 30) self.assertEqual(self.builder.population[0].education_level.name, "小學及自修") self.assertEqual(self.builder.population[0].farmer_work_day.code, 7) self.assertEqual(self.builder.population[0].life_style.name, "自營農牧業工作") self.assertEqual(self.builder.population[1].relationship.name, "戶長之配偶") self.assertEqual(self.builder.population[1].gender.name, "女") self.assertEqual(self.builder.population[1].birth_year, 32) self.assertEqual(self.builder.population[1].education_level.name, "小學及自修") self.assertEqual(self.builder.population[1].farmer_work_day.code, 6) self.assertEqual(self.builder.population[1].life_style.name, "自營農牧業工作") def test_build_hire(self): self.builder.build_hire() self.assertEqual(self.builder.survey.non_hire, False) self.assertEqual(self.builder.survey.hire, True) def test_build_long_term_hire(self): self.builder.build_long_term_hire() self.assertEqual(len(self.builder.long_term_hire), 2) self.assertEqual(len(self.builder.long_term_hire[0].number_workers.all()), 3) self.assertEqual(len(self.builder.long_term_hire[0].months.all()), 2) self.assertEqual(self.builder.long_term_hire[0].avg_work_day, 0.5) self.assertEqual(len(self.builder.long_term_hire[1].number_workers.all()), 3) self.assertEqual(len(self.builder.long_term_hire[1].months.all()), 12) self.assertEqual(self.builder.long_term_hire[1].avg_work_day, 1) def test_build_short_term_hire(self): self.builder.build_short_term_hire() self.assertEqual(len(self.builder.short_term_hire), 2) self.assertEqual(len(self.builder.short_term_hire[0].number_workers.all()), 2) self.assertEqual(self.builder.short_term_hire[0].month.value, 1) self.assertEqual(self.builder.short_term_hire[0].avg_work_day, 0) self.assertEqual(len(self.builder.short_term_hire[1].number_workers.all()), 3) self.assertEqual(self.builder.short_term_hire[1].month.value, 4) self.assertEqual(self.builder.short_term_hire[1].avg_work_day, 8) def test_build_no_salary_hire(self): self.builder.build_no_salary_hire() self.assertEqual(len(self.builder.no_salary_hire), 2) self.assertEqual(self.builder.no_salary_hire[0].month.value, 1) self.assertEqual(self.builder.no_salary_hire[0].count, 2) self.assertEqual(self.builder.no_salary_hire[1].month.value, 7) self.assertEqual(self.builder.no_salary_hire[1].count, 5) def test_build_lack(self): self.builder.build_lack() self.assertEqual(len(self.builder.survey.lacks.all()), 1) def test_build_long_term_lack(self): self.builder.build_long_term_lack() self.assertEqual(len(self.builder.long_term_lack), 2) self.assertEqual(self.builder.long_term_lack[0].work_type.code, 22) self.assertEqual(self.builder.long_term_lack[0].count, 1) self.assertEqual(self.builder.long_term_lack[0].avg_lack_day, 13.5) self.assertEqual(len(self.builder.long_term_lack[0].months.all()), 2) self.assertEqual(self.builder.long_term_lack[1].work_type.code, 14) self.assertEqual(self.builder.long_term_lack[1].count, 1) self.assertEqual(self.builder.long_term_lack[1].avg_lack_day, 8) self.assertEqual(len(self.builder.long_term_lack[1].months.all()), 12) def test_build_short_term_lack(self): self.builder.build_short_term_lack() self.assertEqual(len(self.builder.short_term_lack), 2) self.assertEqual(self.builder.short_term_lack[0].name, "香菇") self.assertEqual(self.builder.short_term_lack[0].product, None) self.assertEqual(self.builder.short_term_lack[0].work_type.code, 13) self.assertEqual(self.builder.short_term_lack[0].count, 2) self.assertEqual(self.builder.short_term_lack[0].avg_lack_day, 6) self.assertEqual(len(self.builder.short_term_lack[0].months.all()), 2) self.assertEqual(len(self.builder.short_term_lack), 2) self.assertEqual(self.builder.short_term_lack[1].name, "芒果") self.assertEqual(self.builder.short_term_lack[1].product, None) self.assertEqual(self.builder.short_term_lack[1].work_type.code, 12) self.assertEqual(self.builder.short_term_lack[1].count, 1) self.assertEqual(self.builder.short_term_lack[1].avg_lack_day, 13) self.assertEqual(len(self.builder.short_term_lack[1].months.all()), 2) def test_build_subsidy(self): self.builder.build_subsidy() self.assertEqual(self.builder.subsidy.survey.farmer_id, "670050010002") self.assertEqual(self.builder.subsidy.none_subsidy, False) self.assertEqual(self.builder.subsidy.count, 12) self.assertEqual(self.builder.subsidy.month_delta, 12) self.assertEqual(self.builder.subsidy.day_delta, 0) self.assertEqual(len(self.builder.refuse), 2) self.assertEqual(self.builder.refuse[0].reason.name, "不知道此資訊") self.assertEqual(self.builder.refuse[0].extra, None) self.assertEqual(self.builder.refuse[1].reason.name, "沒有意願") self.assertEqual(self.builder.refuse[1].extra, "時間太忙")
57.208163
619
0.722531
1,833
14,016
5.348609
0.112384
0.201958
0.257752
0.352713
0.75918
0.660343
0.508364
0.367401
0.236536
0.061812
0
0.070413
0.146832
14,016
244
620
57.442623
0.749456
0.005922
0
0.023474
0
0.004695
0.063676
0.042714
0
0
0
0
0.741784
1
0.103286
false
0
0.00939
0
0.117371
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
f08164d6c685cea83082af8d4b89da3f8184a2f9
6,262
py
Python
tests/qtoggleserver/core/expressions/test_comparison.py
DigitEgal/qtoggleserver
54b6ac53742af9529fd349d4fc207b0dc8a38d3b
[ "Apache-2.0" ]
12
2020-07-26T05:49:25.000Z
2022-01-08T21:50:44.000Z
tests/qtoggleserver/core/expressions/test_comparison.py
DigitEgal/qtoggleserver
54b6ac53742af9529fd349d4fc207b0dc8a38d3b
[ "Apache-2.0" ]
8
2020-04-30T18:40:18.000Z
2020-11-08T21:09:35.000Z
tests/qtoggleserver/core/expressions/test_comparison.py
DigitEgal/qtoggleserver
54b6ac53742af9529fd349d4fc207b0dc8a38d3b
[ "Apache-2.0" ]
2
2020-02-14T02:52:13.000Z
2021-04-21T05:13:07.000Z
import pytest from qtoggleserver.core.expressions import comparison, Function from qtoggleserver.core.expressions import InvalidNumberOfArguments async def test_if_boolean(literal_false, literal_true, literal_one, literal_two): result = await comparison.IfFunction([literal_false, literal_one, literal_two]).eval(context={}) assert result == 2 result = await comparison.IfFunction([literal_true, literal_one, literal_two]).eval(context={}) assert result == 1 async def test_if_number(literal_zero, literal_one, literal_two): result = await comparison.IfFunction([literal_zero, literal_one, literal_two]).eval(context={}) assert result == 2 result = await comparison.IfFunction([literal_two, literal_one, literal_two]).eval(context={}) assert result == 1 def test_if_parse(): e = Function.parse(None, 'IF(1, 2, 3)', 0) assert isinstance(e, comparison.IfFunction) def test_if_num_args(): with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'IF(1, 2)', 0) with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'IF(1, 2, 3, 4)', 0) async def test_eq_boolean(literal_false, literal_true): result = await comparison.EqFunction([literal_false, literal_true]).eval(context={}) assert result == 0 result = await comparison.EqFunction([literal_false, literal_false]).eval(context={}) assert result == 1 async def test_eq_number(literal_one, literal_two): result = await comparison.EqFunction([literal_one, literal_two]).eval(context={}) assert result == 0 result = await comparison.EqFunction([literal_two, literal_two]).eval(context={}) assert result == 1 def test_eq_parse(): e = Function.parse(None, 'EQ(1, 2)', 0) assert isinstance(e, comparison.EqFunction) def test_eq_num_args(): with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'EQ(1)', 0) with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'EQ(1, 2, 3)', 0) async def test_gt_boolean(literal_false, literal_true): result = await comparison.GTFunction([literal_true, literal_false]).eval(context={}) assert result == 1 result = await comparison.GTFunction([literal_false, literal_false]).eval(context={}) assert result == 0 async def test_gt_number(literal_one, literal_two): result = await comparison.GTFunction([literal_two, literal_one]).eval(context={}) assert result == 1 result = await comparison.GTFunction([literal_one, literal_two]).eval(context={}) assert result == 0 result = await comparison.GTFunction([literal_two, literal_two]).eval(context={}) assert result == 0 def test_gt_parse(): e = Function.parse(None, 'GT(1, 2)', 0) assert isinstance(e, comparison.GTFunction) def test_gt_num_args(): with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'GT(1)', 0) with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'GT(1, 2, 3)', 0) async def test_gte_boolean(literal_false, literal_true): result = await comparison.GTEFunction([literal_true, literal_false]).eval(context={}) assert result == 1 result = await comparison.GTEFunction([literal_false, literal_false]).eval(context={}) assert result == 1 result = await comparison.GTEFunction([literal_false, literal_true]).eval(context={}) assert result == 0 async def test_gte_number(literal_one, literal_two): result = await comparison.GTEFunction([literal_two, literal_one]).eval(context={}) assert result == 1 result = await comparison.GTEFunction([literal_one, literal_two]).eval(context={}) assert result == 0 result = await comparison.GTEFunction([literal_two, literal_two]).eval(context={}) assert result == 1 def test_gte_parse(): e = Function.parse(None, 'GTE(1, 2)', 0) assert isinstance(e, comparison.GTEFunction) def test_gte_num_args(): with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'GTE(1)', 0) with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'GTE(1, 2, 3)', 0) async def test_lt_boolean(literal_false, literal_true): result = await comparison.LTFunction([literal_false, literal_true]).eval(context={}) assert result == 1 result = await comparison.LTFunction([literal_false, literal_false]).eval(context={}) assert result == 0 async def test_lt_number(literal_one, literal_two): result = await comparison.LTFunction([literal_one, literal_two]).eval(context={}) assert result == 1 result = await comparison.LTFunction([literal_two, literal_one]).eval(context={}) assert result == 0 result = await comparison.LTFunction([literal_two, literal_two]).eval(context={}) assert result == 0 def test_lt_parse(): e = Function.parse(None, 'LT(1, 2)', 0) assert isinstance(e, comparison.LTFunction) def test_lt_num_args(): with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'LT(1)', 0) with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'LT(1, 2, 3)', 0) async def test_lte_boolean(literal_false, literal_true): result = await comparison.LTEFunction([literal_false, literal_true]).eval(context={}) assert result == 1 result = await comparison.LTEFunction([literal_false, literal_false]).eval(context={}) assert result == 1 result = await comparison.LTEFunction([literal_true, literal_false]).eval(context={}) assert result == 0 async def test_lte_number(literal_one, literal_two): result = await comparison.LTEFunction([literal_one, literal_two]).eval(context={}) assert result == 1 result = await comparison.LTEFunction([literal_two, literal_one]).eval(context={}) assert result == 0 result = await comparison.LTEFunction([literal_two, literal_two]).eval(context={}) assert result == 1 def test_lte_parse(): e = Function.parse(None, 'LTE(1, 2)', 0) assert isinstance(e, comparison.LTEFunction) def test_lte_num_args(): with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'LTE(1)', 0) with pytest.raises(InvalidNumberOfArguments): Function.parse(None, 'LTE(1, 2, 3)', 0)
31.626263
100
0.712233
794
6,262
5.434509
0.062972
0.071842
0.146002
0.159907
0.945307
0.891773
0.862109
0.786327
0.663268
0.442178
0
0.016518
0.158895
6,262
197
101
31.786802
0.802734
0
0
0.341463
0
0
0.025395
0
0
0
0
0
0.292683
1
0.097561
false
0
0.02439
0
0.121951
0
0
0
0
null
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f082b1d41e178a3c978028f5ab7778618e0c5278
183
py
Python
nbdev_learning_template/core.py
ibacaraujo/nbdev-learning
339c4df09d0a7b1723ae469d11fbe85232b0b44b
[ "Apache-2.0" ]
null
null
null
nbdev_learning_template/core.py
ibacaraujo/nbdev-learning
339c4df09d0a7b1723ae469d11fbe85232b0b44b
[ "Apache-2.0" ]
2
2021-09-28T02:12:28.000Z
2022-02-26T07:34:33.000Z
nbdev_learning_template/core.py
ibacaraujo/nbdev-learning
339c4df09d0a7b1723ae469d11fbe85232b0b44b
[ "Apache-2.0" ]
null
null
null
# AUTOGENERATED! DO NOT EDIT! File to edit: 00_core.ipynb (unless otherwise specified). __all__ = ['learn'] # Cell def learn(model, data): ''' Model learns using the '''
20.333333
87
0.655738
24
183
4.791667
0.875
0
0
0
0
0
0
0
0
0
0
0.013889
0.213115
183
9
88
20.333333
0.784722
0.622951
0
0
1
0
0.09434
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
b2b32d0b10c0c5e909ab1f0383a50e923129f6e4
76
py
Python
readme.py
thor-shuang/git_learning
b3b86ea6636705f2bbe28014fc35303a0d7d75b5
[ "MIT" ]
null
null
null
readme.py
thor-shuang/git_learning
b3b86ea6636705f2bbe28014fc35303a0d7d75b5
[ "MIT" ]
null
null
null
readme.py
thor-shuang/git_learning
b3b86ea6636705f2bbe28014fc35303a0d7d75b5
[ "MIT" ]
null
null
null
# hello world # detached HEAD # detached HEAD # !!!! # !!!! # !!!! # !!!!
7.6
15
0.447368
6
76
5.666667
0.666667
0.705882
0
0
0
0
0
0
0
0
0
0
0.25
76
9
16
8.444444
0.596491
0.776316
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b2d72958c6d3b572be03b0ee0b677f8c9ca00f21
177
py
Python
data/contacts.py
yupasik/python_training
6a4be276fe4dccc9995efa5e1d8b507b8bea4aa3
[ "Apache-2.0" ]
null
null
null
data/contacts.py
yupasik/python_training
6a4be276fe4dccc9995efa5e1d8b507b8bea4aa3
[ "Apache-2.0" ]
null
null
null
data/contacts.py
yupasik/python_training
6a4be276fe4dccc9995efa5e1d8b507b8bea4aa3
[ "Apache-2.0" ]
null
null
null
from model.contact import Contact testdata = [ Contact(first_name="first_name1", last_name="last_name1"), Contact(first_name="first_name2", last_name="last_name2") ]
19.666667
62
0.740113
24
177
5.125
0.416667
0.195122
0.260163
0.341463
0
0
0
0
0
0
0
0.025974
0.129944
177
8
63
22.125
0.772727
0
0
0
0
0
0.238636
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6509e815f6e0fa50b936f2018aaecd9ba1ab5006
120
py
Python
buffer/shard-cpp-test/master-node/test-small/configurer_test.py
zaqwes8811/coordinator-tasks
7f63fdf613eff5d441a3c2c7b52d2a3d02d9736a
[ "MIT" ]
null
null
null
buffer/shard-cpp-test/master-node/test-small/configurer_test.py
zaqwes8811/coordinator-tasks
7f63fdf613eff5d441a3c2c7b52d2a3d02d9736a
[ "MIT" ]
15
2015-03-07T12:46:41.000Z
2015-04-11T09:08:36.000Z
buffer/shard-cpp-test/master-node/test-small/configurer_test.py
zaqwes8811/micro-apps
7f63fdf613eff5d441a3c2c7b52d2a3d02d9736a
[ "MIT" ]
null
null
null
# coding: utf-8 import unittest class Test(unittest.TestCase): def test_name(self): assert False == False
15
30
0.675
16
120
5
0.8125
0
0
0
0
0
0
0
0
0
0
0.010753
0.225
120
7
31
17.142857
0.849462
0.108333
0
0
0
0
0
0
0
0
0
0
0.25
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
65336b50343a634e5a4473e304146734d6ced88b
193
py
Python
playground/__init__.py
cation98/py-image-search-engine
88e0d93595a7ab58b2f88d6d0fa471d4a4d2dac1
[ "MIT" ]
21
2017-12-19T21:01:57.000Z
2020-10-17T15:11:23.000Z
playground/__init__.py
cation98/py-image-search-engine
88e0d93595a7ab58b2f88d6d0fa471d4a4d2dac1
[ "MIT" ]
null
null
null
playground/__init__.py
cation98/py-image-search-engine
88e0d93595a7ab58b2f88d6d0fa471d4a4d2dac1
[ "MIT" ]
14
2018-05-11T03:10:20.000Z
2020-09-17T03:33:41.000Z
""" @author Victor I. Afolabi A.I. Engineer & Software developer javafolabi@gmail.com Created on 22 December, 2017 @ 9:57 PM. Copyright © 2017. Victor. All rights reserved. """
19.3
48
0.668394
27
193
4.814815
0.888889
0
0
0
0
0
0
0
0
0
0
0.086667
0.222798
193
9
49
21.444444
0.773333
0.880829
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
e8dc775bc81e17a0b1547362a52d93751367fe9a
114
py
Python
djangocms_parallaxjs/apps.py
georgema1982/djangocms-parallaxjs
f49fe3abd915a82c3a74ae87e3815467bd9b1f49
[ "MIT" ]
null
null
null
djangocms_parallaxjs/apps.py
georgema1982/djangocms-parallaxjs
f49fe3abd915a82c3a74ae87e3815467bd9b1f49
[ "MIT" ]
null
null
null
djangocms_parallaxjs/apps.py
georgema1982/djangocms-parallaxjs
f49fe3abd915a82c3a74ae87e3815467bd9b1f49
[ "MIT" ]
null
null
null
from django.apps import AppConfig class DjangocmsParallaxjsConfig(AppConfig): name = 'djangocms_parallaxjs'
19
43
0.807018
11
114
8.272727
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.131579
114
5
44
22.8
0.919192
0
0
0
0
0
0.175439
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
e8e779d4b7799ec69b7f47d9eb5c5cfcc688a65e
74
py
Python
src/utils.py
Ejjaffe/twitter-command-bot
b92e79f8d5dd04d803bcea2d3e1b9327d63f4a17
[ "MIT" ]
null
null
null
src/utils.py
Ejjaffe/twitter-command-bot
b92e79f8d5dd04d803bcea2d3e1b9327d63f4a17
[ "MIT" ]
7
2021-03-11T14:38:09.000Z
2021-03-19T02:55:15.000Z
src/utils.py
Ejjaffe/twitter-command-bot
b92e79f8d5dd04d803bcea2d3e1b9327d63f4a17
[ "MIT" ]
null
null
null
def api_cert_validate(api): return not api.VerifyCredentials() is None
37
46
0.783784
11
74
5.090909
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.135135
74
2
46
37
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
e8efc8a68afffd295de35e91dae027a1cd736097
48
py
Python
FredMD/__init__.py
joe5saia/FredMD
796e3228a904298bcee5027f3c8cf721d5210c0f
[ "MIT" ]
null
null
null
FredMD/__init__.py
joe5saia/FredMD
796e3228a904298bcee5027f3c8cf721d5210c0f
[ "MIT" ]
null
null
null
FredMD/__init__.py
joe5saia/FredMD
796e3228a904298bcee5027f3c8cf721d5210c0f
[ "MIT" ]
2
2021-01-17T10:22:37.000Z
2021-02-07T06:05:44.000Z
from .fredmd import FredMD __all__ = ['FredMD']
16
26
0.729167
6
48
5.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.145833
48
3
27
16
0.756098
0
0
0
0
0
0.122449
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
333b073a1cc10cd17967d933342b74d3aec1a117
216
py
Python
easl/__init__.py
Luciden/mobile-sim
7263445b855740137ee0accd0e987953ebeddd06
[ "MIT" ]
1
2015-04-17T16:51:10.000Z
2015-04-17T16:51:10.000Z
easl/__init__.py
Luciden/mobile-sim
7263445b855740137ee0accd0e987953ebeddd06
[ "MIT" ]
null
null
null
easl/__init__.py
Luciden/mobile-sim
7263445b855740137ee0accd0e987953ebeddd06
[ "MIT" ]
null
null
null
__author__ = 'Dennis' from world import World from world import Sensor from world import Signal from entity import Entity from log import Log from simulation_suite import SimulationSuite import utils
16.615385
45
0.777778
29
216
5.62069
0.448276
0.165644
0.276074
0
0
0
0
0
0
0
0
0
0.208333
216
12
46
18
0.953216
0
0
0
0
0
0.029412
0
0
0
0
0
0
1
0
false
0
0.875
0
0.875
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
333ba4ba95bed4cb6937597cde048f37afcee6de
122
py
Python
senders/screen.py
krzysztofr/juwreport
d82a9e8c5b37637142af0419a7d46e502efd835c
[ "MIT" ]
null
null
null
senders/screen.py
krzysztofr/juwreport
d82a9e8c5b37637142af0419a7d46e502efd835c
[ "MIT" ]
2
2016-12-27T11:29:59.000Z
2017-03-23T14:36:20.000Z
senders/screen.py
krzysztofr/juwreport
d82a9e8c5b37637142af0419a7d46e502efd835c
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from juwparser import timestampize def send(message, settings): print timestampize(message)
17.428571
34
0.713115
14
122
6.214286
0.857143
0
0
0
0
0
0
0
0
0
0
0.009804
0.163934
122
7
35
17.428571
0.843137
0.172131
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
33420cb71bdb2f56b9c2dd270450b62b43a1d1c3
689
py
Python
siptracklib/transport/xmlrpc/event.py
sii/siptrack
40712cf114e4c689bbd251aa9495f9ce6340bc9e
[ "Apache-2.0" ]
8
2016-04-19T09:39:46.000Z
2021-11-09T10:57:54.000Z
siptracklib/transport/xmlrpc/event.py
sii/siptrack
40712cf114e4c689bbd251aa9495f9ce6340bc9e
[ "Apache-2.0" ]
3
2016-03-21T13:09:16.000Z
2017-08-23T17:25:59.000Z
siptracklib/transport/xmlrpc/event.py
sii/siptrack
40712cf114e4c689bbd251aa9495f9ce6340bc9e
[ "Apache-2.0" ]
2
2015-06-25T11:22:38.000Z
2021-06-22T10:39:17.000Z
from siptracklib.transport.xmlrpc import baserpc class CommandQueueRPC(baserpc.BaseRPC): command_path = 'command.queue' class CommandRPC(baserpc.BaseRPC): command_path = 'command' def setFreetext(self, oid, value): return self.send('set_freetext', oid, value) class EventRPC(baserpc.BaseRPC): command_path = 'event' class EventTriggerRPC(baserpc.BaseRPC): command_path = 'event.trigger' class EventTriggerRuleRPC(baserpc.BaseRPC): command_path = 'event.trigger.rule' class EventTriggerRulePythonRPC(baserpc.BaseRPC): command_path = 'event.trigger.rule.python' def setCode(self, oid, value): return self.send('set_code', oid, value)
25.518519
52
0.735849
79
689
6.316456
0.392405
0.168337
0.252505
0.300601
0.543086
0.354709
0.280561
0
0
0
0
0
0.155298
689
26
53
26.5
0.857388
0
0
0
0
0
0.146802
0.036337
0
0
0
0
0
1
0.117647
false
0
0.058824
0.117647
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
3345aed7a7cad725657e752686a7a457b532dc4a
165
py
Python
beedb/auth/__init__.py
soltysh/beedb
b8849334a3f5204de91ff4b800a29b892f361b12
[ "Apache-2.0" ]
null
null
null
beedb/auth/__init__.py
soltysh/beedb
b8849334a3f5204de91ff4b800a29b892f361b12
[ "Apache-2.0" ]
null
null
null
beedb/auth/__init__.py
soltysh/beedb
b8849334a3f5204de91ff4b800a29b892f361b12
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """This package contains code related to all authorization calls. The main and currently only supported authorization method is OAuth2. """
27.5
69
0.739394
22
165
5.545455
0.954545
0
0
0
0
0
0
0
0
0
0
0.014388
0.157576
165
5
70
33
0.863309
0.945455
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6821e7b7e88ac79a168b30b2bf218320842ffb47
73
py
Python
python/hackerrank/debugging/find that bug/task.py
3keepmovingforward3/ENGR1102
b4f38a70560fc695d70706279047b1dec9f5c7f4
[ "MIT" ]
null
null
null
python/hackerrank/debugging/find that bug/task.py
3keepmovingforward3/ENGR1102
b4f38a70560fc695d70706279047b1dec9f5c7f4
[ "MIT" ]
null
null
null
python/hackerrank/debugging/find that bug/task.py
3keepmovingforward3/ENGR1102
b4f38a70560fc695d70706279047b1dec9f5c7f4
[ "MIT" ]
null
null
null
# Complete the function below. def findSum(a, b): return(c)
8.111111
30
0.589041
10
73
4.3
1
0
0
0
0
0
0
0
0
0
0
0
0.30137
73
8
31
9.125
0.843137
0.383562
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
6836e91157eb4725900141e9251710cddd2da990
11,342
py
Python
hc/api/tests/test_notify_webhook.py
IfBkg/healthchecks
dcd8a74c6b0bcdb0065e7c27d5b6639823400562
[ "BSD-3-Clause" ]
11
2018-01-14T11:44:07.000Z
2022-01-30T18:16:44.000Z
hc/api/tests/test_notify_webhook.py
IfBkg/healthchecks
dcd8a74c6b0bcdb0065e7c27d5b6639823400562
[ "BSD-3-Clause" ]
147
2017-03-08T08:09:08.000Z
2021-09-28T07:59:40.000Z
hc/api/tests/test_notify_webhook.py
IfBkg/healthchecks
dcd8a74c6b0bcdb0065e7c27d5b6639823400562
[ "BSD-3-Clause" ]
6
2017-12-05T17:58:29.000Z
2021-03-13T16:30:35.000Z
# coding: utf-8 from datetime import timedelta as td import json from unittest.mock import patch from django.utils.timezone import now from hc.api.models import Channel, Check, Notification from hc.test import BaseTestCase from requests.exceptions import ConnectionError, Timeout from django.test.utils import override_settings class NotifyWebhookTestCase(BaseTestCase): def _setup_data(self, value, status="down", email_verified=True): self.check = Check(project=self.project) self.check.status = status self.check.last_ping = now() - td(minutes=61) self.check.save() self.channel = Channel(project=self.project) self.channel.kind = "webhook" self.channel.value = value self.channel.email_verified = email_verified self.channel.save() self.channel.checks.add(self.check) @patch("hc.api.transports.requests.request") def test_webhook(self, mock_get): definition = { "method_down": "GET", "url_down": "http://example", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) mock_get.return_value.status_code = 200 self.channel.notify(self.check) mock_get.assert_called_with( "get", "http://example", headers={"User-Agent": "healthchecks.io"}, timeout=5, ) @patch("hc.api.transports.requests.request", side_effect=Timeout) def test_webhooks_handle_timeouts(self, mock_get): definition = { "method_down": "GET", "url_down": "http://example", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check) # The transport should have retried 3 times self.assertEqual(mock_get.call_count, 3) n = Notification.objects.get() self.assertEqual(n.error, "Connection timed out") self.channel.refresh_from_db() self.assertEqual(self.channel.last_error, "Connection timed out") @patch("hc.api.transports.requests.request", side_effect=ConnectionError) def test_webhooks_handle_connection_errors(self, mock_get): definition = { "method_down": "GET", "url_down": "http://example", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check) # The transport should have retried 3 times self.assertEqual(mock_get.call_count, 3) n = Notification.objects.get() self.assertEqual(n.error, "Connection failed") @patch("hc.api.transports.requests.request") def test_webhooks_handle_500(self, mock_get): definition = { "method_down": "GET", "url_down": "http://example", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) mock_get.return_value.status_code = 500 self.channel.notify(self.check) # The transport should have retried 3 times self.assertEqual(mock_get.call_count, 3) n = Notification.objects.get() self.assertEqual(n.error, "Received status code 500") @patch("hc.api.transports.requests.request", side_effect=Timeout) def test_webhooks_dont_retry_when_sending_test_notifications(self, mock_get): definition = { "method_down": "GET", "url_down": "http://example", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check, is_test=True) # is_test flag is set, the transport should not retry: self.assertEqual(mock_get.call_count, 1) n = Notification.objects.get() self.assertEqual(n.error, "Connection timed out") @patch("hc.api.transports.requests.request") def test_webhooks_support_variables(self, mock_get): definition = { "method_down": "GET", "url_down": "http://host/$CODE/$STATUS/$TAG1/$TAG2/?name=$NAME", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.check.name = "Hello World" self.check.tags = "foo bar" self.check.save() self.channel.notify(self.check) url = "http://host/%s/down/foo/bar/?name=Hello%%20World" % self.check.code args, kwargs = mock_get.call_args self.assertEqual(args[0], "get") self.assertEqual(args[1], url) self.assertEqual(kwargs["headers"], {"User-Agent": "healthchecks.io"}) self.assertEqual(kwargs["timeout"], 5) @patch("hc.api.transports.requests.request") def test_webhooks_handle_variable_variables(self, mock_get): definition = { "method_down": "GET", "url_down": "http://host/$$NAMETAG1", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.check.tags = "foo bar" self.check.save() self.channel.notify(self.check) # $$NAMETAG1 should *not* get transformed to "foo" args, kwargs = mock_get.call_args self.assertEqual(args[1], "http://host/$TAG1") @patch("hc.api.transports.requests.request") def test_webhooks_support_post(self, mock_request): definition = { "method_down": "POST", "url_down": "http://example.com", "body_down": "The Time Is $NOW", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.check.save() self.channel.notify(self.check) args, kwargs = mock_request.call_args self.assertEqual(args[0], "post") self.assertEqual(args[1], "http://example.com") # spaces should not have been urlencoded: payload = kwargs["data"].decode() self.assertTrue(payload.startswith("The Time Is 2")) @patch("hc.api.transports.requests.request") def test_webhooks_dollarsign_escaping(self, mock_get): # If name or tag contains what looks like a variable reference, # that should be left alone: definition = { "method_down": "GET", "url_down": "http://host/$NAME", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.check.name = "$TAG1" self.check.tags = "foo" self.check.save() self.channel.notify(self.check) url = "http://host/%24TAG1" mock_get.assert_called_with( "get", url, headers={"User-Agent": "healthchecks.io"}, timeout=5 ) @patch("hc.api.transports.requests.request") def test_webhooks_handle_up_events(self, mock_get): definition = { "method_up": "GET", "url_up": "http://bar", "body_up": "", "headers_up": {}, } self._setup_data(json.dumps(definition), status="up") self.channel.notify(self.check) mock_get.assert_called_with( "get", "http://bar", headers={"User-Agent": "healthchecks.io"}, timeout=5 ) @patch("hc.api.transports.requests.request") def test_webhooks_handle_noop_up_events(self, mock_get): definition = { "method_up": "GET", "url_up": "", "body_up": "", "headers_up": {}, } self._setup_data(json.dumps(definition), status="up") self.channel.notify(self.check) self.assertFalse(mock_get.called) self.assertEqual(Notification.objects.count(), 0) @patch("hc.api.transports.requests.request") def test_webhooks_handle_unicode_post_body(self, mock_request): definition = { "method_down": "POST", "url_down": "http://foo.com", "body_down": "(╯°□°)╯︵ ┻━┻", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.check.save() self.channel.notify(self.check) args, kwargs = mock_request.call_args # unicode should be encoded into utf-8 self.assertIsInstance(kwargs["data"], bytes) @patch("hc.api.transports.requests.request") def test_webhooks_handle_post_headers(self, mock_request): definition = { "method_down": "POST", "url_down": "http://foo.com", "body_down": "data", "headers_down": {"Content-Type": "application/json"}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check) headers = {"User-Agent": "healthchecks.io", "Content-Type": "application/json"} mock_request.assert_called_with( "post", "http://foo.com", data=b"data", headers=headers, timeout=5 ) @patch("hc.api.transports.requests.request") def test_webhooks_handle_get_headers(self, mock_request): definition = { "method_down": "GET", "url_down": "http://foo.com", "body_down": "", "headers_down": {"Content-Type": "application/json"}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check) headers = {"User-Agent": "healthchecks.io", "Content-Type": "application/json"} mock_request.assert_called_with( "get", "http://foo.com", headers=headers, timeout=5 ) @patch("hc.api.transports.requests.request") def test_webhooks_allow_user_agent_override(self, mock_request): definition = { "method_down": "GET", "url_down": "http://foo.com", "body_down": "", "headers_down": {"User-Agent": "My-Agent"}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check) headers = {"User-Agent": "My-Agent"} mock_request.assert_called_with( "get", "http://foo.com", headers=headers, timeout=5 ) @patch("hc.api.transports.requests.request") def test_webhooks_support_variables_in_headers(self, mock_request): definition = { "method_down": "GET", "url_down": "http://foo.com", "body_down": "", "headers_down": {"X-Message": "$NAME is DOWN"}, } self._setup_data(json.dumps(definition)) self.check.name = "Foo" self.check.save() self.channel.notify(self.check) headers = {"User-Agent": "healthchecks.io", "X-Message": "Foo is DOWN"} mock_request.assert_called_with( "get", "http://foo.com", headers=headers, timeout=5 ) @override_settings(WEBHOOKS_ENABLED=False) def test_it_requires_webhooks_enabled(self): definition = { "method_down": "GET", "url_down": "http://example", "body_down": "", "headers_down": {}, } self._setup_data(json.dumps(definition)) self.channel.notify(self.check) n = Notification.objects.get() self.assertEqual(n.error, "Webhook notifications are not enabled.")
32.875362
87
0.592312
1,274
11,342
5.087127
0.145997
0.048604
0.0341
0.044592
0.735998
0.723191
0.711156
0.710076
0.6894
0.665175
0
0.005901
0.267854
11,342
344
88
32.97093
0.773483
0.035884
0
0.590226
0
0
0.216862
0.049799
0
0
0
0
0.105263
1
0.067669
false
0
0.030075
0
0.101504
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
68546d0fc1dc0d10d2d70585ca0df188826370a7
96
py
Python
gamegrapher/ossystem.py
HypoChloremic/GaGrapher
7e180d9dbd7afbe26e26507f1b23994029264e0e
[ "MIT" ]
null
null
null
gamegrapher/ossystem.py
HypoChloremic/GaGrapher
7e180d9dbd7afbe26e26507f1b23994029264e0e
[ "MIT" ]
null
null
null
gamegrapher/ossystem.py
HypoChloremic/GaGrapher
7e180d9dbd7afbe26e26507f1b23994029264e0e
[ "MIT" ]
null
null
null
import os with open("random.txt", 'w') as file: file.write(str(os.system("tasklist")))
19.2
43
0.625
15
96
4
0.866667
0
0
0
0
0
0
0
0
0
0
0
0.177083
96
4
44
24
0.759494
0
0
0
0
0
0.206522
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
685f19e41ddaac288ab6126383c88a5657a87569
147
py
Python
email_validation/apps.py
rezaramadhan/ValidationSystem
1e36f37ee79bd3a8e618dec492e5cfbe83791ebe
[ "BSD-3-Clause" ]
null
null
null
email_validation/apps.py
rezaramadhan/ValidationSystem
1e36f37ee79bd3a8e618dec492e5cfbe83791ebe
[ "BSD-3-Clause" ]
null
null
null
email_validation/apps.py
rezaramadhan/ValidationSystem
1e36f37ee79bd3a8e618dec492e5cfbe83791ebe
[ "BSD-3-Clause" ]
null
null
null
from __future__ import unicode_literals from django.apps import AppConfig class EmailValidationConfig(AppConfig): name = 'email_validation'
18.375
39
0.816327
16
147
7.125
0.8125
0
0
0
0
0
0
0
0
0
0
0
0.136054
147
7
40
21
0.897638
0
0
0
0
0
0.108844
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
688885c272a927a49cb8195ecf0e50984357fc73
816
py
Python
python/ql/test/library-tests/frameworks/django-v2-v3/FileField_test.py
Yonah125/codeql
eab8bcc82d736cda78240583edc3f69e4618f7fa
[ "MIT" ]
643
2018-08-03T11:16:54.000Z
2020-04-27T23:10:55.000Z
python/ql/test/library-tests/frameworks/django-v2-v3/FileField_test.py
DirtyApexAlpha/codeql
4c59b0d2992ee0d90cc2f46d6a85ac79e1d57f21
[ "MIT" ]
1,880
2018-08-03T11:28:32.000Z
2020-04-28T13:18:51.000Z
python/ql/test/library-tests/frameworks/django-v2-v3/FileField_test.py
DirtyApexAlpha/codeql
4c59b0d2992ee0d90cc2f46d6a85ac79e1d57f21
[ "MIT" ]
218
2018-08-03T11:16:58.000Z
2020-04-24T02:24:00.000Z
from django.db import models import django.db.models.fields.files def custom_path_function_1(instance, filename): ensure_tainted(filename) # $ tainted def custom_path_function_2(instance, filename): ensure_tainted(filename) # $ tainted def custom_path_function_3(instance, filename): ensure_tainted(filename) # $ tainted def custom_path_function_4(instance, filename): ensure_tainted(filename) # $ tainted class CustomFileFieldSubclass(models.FileField): pass class MyModel(models.Model): upload_1 = models.FileField(None, None, custom_path_function_1) upload_2 = django.db.models.fields.files.FileField(upload_to=custom_path_function_2) upload_3 = models.ImageField(upload_to=custom_path_function_3) upload_4 = CustomFileFieldSubclass(upload_to=custom_path_function_4)
29.142857
88
0.794118
107
816
5.728972
0.271028
0.130506
0.23491
0.137031
0.598695
0.389886
0.318108
0.318108
0.318108
0.318108
0
0.016736
0.121324
816
27
89
30.222222
0.838215
0.047794
0
0.235294
0
0
0
0
0
0
0
0
0
1
0.235294
false
0.058824
0.117647
0
0.705882
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
68921d30d96e6ed1b62f283672d9ac8ec399c07c
848
py
Python
src/modules/comm/__init__.py
fringsoo/NDQ
e243ba917e331065e82c6634cb1d756873747be5
[ "Apache-2.0" ]
1
2021-07-25T19:35:50.000Z
2021-07-25T19:35:50.000Z
src/modules/comm/__init__.py
fringsoo/NDQ
e243ba917e331065e82c6634cb1d756873747be5
[ "Apache-2.0" ]
null
null
null
src/modules/comm/__init__.py
fringsoo/NDQ
e243ba917e331065e82c6634cb1d756873747be5
[ "Apache-2.0" ]
null
null
null
from .information_bottleneck_comm import IBComm from .information_bottleneck_comm_full import IBFComm from .information_bottleneck_comm_not_IB import IBNIBComm from .tar_comm import TarComm from .information_bottleneck_pruned_comm import IBPComm from .information_bottleneck_comm_full_new import IPComm_mind, IPComm_speaker, IPComm_listener, IPComm_speaker_simple, IPComm_listener_simple, IPComm_speaker_train, IPComm_listener_train REGISTRY = {"information_bottleneck": IBComm, "information_bottleneck_full": IBFComm, "information_bottleneck_not_IB": IBNIBComm, "tar": TarComm, "information_bottleneck_pruned": IBPComm, "information_pragmatics": [IPComm_mind, IPComm_speaker, IPComm_listener, IPComm_speaker_simple, IPComm_listener_simple, IPComm_speaker_train, IPComm_listener_train]}
60.571429
186
0.814858
97
848
6.628866
0.247423
0.293935
0.194401
0.180404
0.454121
0.351477
0.351477
0.351477
0.351477
0.351477
0
0
0.130896
848
13
187
65.230769
0.872456
0
0
0
0
0
0.15566
0.152123
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
68a5b492c4c4663ef83d9579c639f4bab897f769
394
py
Python
stage1/BTCParser/__init__.py
0xf3cd/BTC-Blockchain-Raw-Data-ETL
842687801ddbf6b42d54c4a670d4e152eb8f5674
[ "MIT" ]
null
null
null
stage1/BTCParser/__init__.py
0xf3cd/BTC-Blockchain-Raw-Data-ETL
842687801ddbf6b42d54c4a670d4e152eb8f5674
[ "MIT" ]
null
null
null
stage1/BTCParser/__init__.py
0xf3cd/BTC-Blockchain-Raw-Data-ETL
842687801ddbf6b42d54c4a670d4e152eb8f5674
[ "MIT" ]
null
null
null
from .RawData import Header, Transaction, Txin, Txout, Script, Witness, Block, count_up_blk_in_dat_file from .RawDataParser import build_from_file, build_from_folder from .ScriptToken import OPCODE_TO_WORD_TABLE, WORD_TO_OPCODE_TABLE from .ScriptParser import parse_script, LockingScriptType from .Summary import BlockSummary, summarize_raw_data_file, LOCKING_SCRIPT_TYPE_ID from .Tool import *
65.666667
103
0.862944
57
394
5.578947
0.631579
0.056604
0
0
0
0
0
0
0
0
0
0
0.088832
394
6
104
65.666667
0.885794
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
d7bde6a70ac33852dc55c38b0e690f76dc2054f5
357
py
Python
docker/files/database.py
pariekshit/vespene-docker
c63ade953b7a2ae4aa66f5efff408c0831e152d7
[ "MIT" ]
1
2018-11-01T10:29:07.000Z
2018-11-01T10:29:07.000Z
docker/files/database.py
pariekshit/vespene-docker
c63ade953b7a2ae4aa66f5efff408c0831e152d7
[ "MIT" ]
null
null
null
docker/files/database.py
pariekshit/vespene-docker
c63ade953b7a2ae4aa66f5efff408c0831e152d7
[ "MIT" ]
1
2018-11-26T05:53:38.000Z
2018-11-26T05:53:38.000Z
import os DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': os.environ.get('DB_NAME', ''), 'USER': os.environ.get('DB_USER', ''), 'PASSWORD' : os.environ.get('DB_PASS', ''), 'HOST': os.environ.get('DB_HOST', ''), 'ATOMIC_REQUESTS': True } }
29.75
55
0.478992
36
357
4.611111
0.527778
0.216867
0.289157
0.337349
0
0
0
0
0
0
0
0
0.319328
357
11
56
32.454545
0.683128
0
0
0
0
0
0.294118
0.081232
0
0
0
0
0
1
0
false
0.090909
0.090909
0
0.090909
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
d7f050d1a13e9886c8a459ad7c72d9a74231d489
344
py
Python
serverless/service/plugins/iam_roles.py
epsyhealth/serverless-builder
6a1f943b5cabc4c4748234b1623a9ced6464043a
[ "MIT" ]
3
2022-03-16T14:25:03.000Z
2022-03-24T15:04:55.000Z
serverless/service/plugins/iam_roles.py
epsyhealth/serverless-builder
6a1f943b5cabc4c4748234b1623a9ced6464043a
[ "MIT" ]
3
2022-01-24T20:11:15.000Z
2022-01-26T19:33:20.000Z
serverless/service/plugins/iam_roles.py
epsyhealth/serverless-builder
6a1f943b5cabc4c4748234b1623a9ced6464043a
[ "MIT" ]
1
2022-02-15T13:54:29.000Z
2022-02-15T13:54:29.000Z
from serverless.service.plugins.generic import Generic class IAMRoles(Generic): """ Plugin npm: https://www.npmjs.com/package/serverless-iam-roles-per-function """ yaml_tag = "!IAMRolesPlugin" def __init__(self): super().__init__("serverless-iam-roles-per-function") def enable(self, service): pass
21.5
79
0.674419
40
344
5.575
0.7
0.116592
0.161435
0.188341
0.26009
0
0
0
0
0
0
0
0.19186
344
15
80
22.933333
0.802158
0.218023
0
0
0
0
0.189723
0.130435
0
0
0
0
0
1
0.285714
false
0.142857
0.142857
0
0.714286
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
d7fe9173daf4e76b97a7e875e590a6dccfcfeb19
249
py
Python
nlingua/corpora/_constants.py
clueless-skywatcher/polyglossa
93bdfe3da457454fd984c0af4bf3a6db724d6b56
[ "MIT" ]
null
null
null
nlingua/corpora/_constants.py
clueless-skywatcher/polyglossa
93bdfe3da457454fd984c0af4bf3a6db724d6b56
[ "MIT" ]
null
null
null
nlingua/corpora/_constants.py
clueless-skywatcher/polyglossa
93bdfe3da457454fd984c0af4bf3a6db724d6b56
[ "MIT" ]
null
null
null
PENN_TREEBANK_SRC = "https://raw.githubusercontent.com/nltk/nltk_data/gh-pages/packages/corpora/treebank.zip" LOCAL_CORPORA_DIR_UNIX = "/usr/share/nl_data" LOCAL_CORPORA_DIR_WINDOWS = "C:\\nl_data" LOCAL_CORPORA_DIR_MAC = "/usr/local/share/nl_data"
49.8
109
0.811245
40
249
4.675
0.575
0.192513
0.240642
0.192513
0.224599
0
0
0
0
0
0
0
0.048193
249
5
110
49.8
0.78903
0
0
0
0
0.25
0.56
0.096
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0bc4b842b35677d022e6f4fa1bda05f4c0859289
216
py
Python
geodescriber/tests/services/test_geodescriber.py
icpac-igad/Geodescriber
2341c655c570d10d497ba8ce7d1868ce0cc10d19
[ "MIT" ]
null
null
null
geodescriber/tests/services/test_geodescriber.py
icpac-igad/Geodescriber
2341c655c570d10d497ba8ce7d1868ce0cc10d19
[ "MIT" ]
null
null
null
geodescriber/tests/services/test_geodescriber.py
icpac-igad/Geodescriber
2341c655c570d10d497ba8ce7d1868ce0cc10d19
[ "MIT" ]
null
null
null
import logging from geodescriber.services.analysis.geodescriber import GeodescriberService def test_serialize_umd(): """Test the Hansen Serializer.""" d = GeodescriberService() assert d == is not None
21.6
75
0.75
24
216
6.666667
0.791667
0
0
0
0
0
0
0
0
0
0
0
0.166667
216
9
76
24
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.2
0
null
null
0
0.4
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
0bce47b4bedd0e9d0b359216d7cf95748f6ee467
58
py
Python
auto.py
chenyanmo/mac-android-decompile
349d20bda959770776b74a4227c03e5f9c6570e2
[ "MIT" ]
null
null
null
auto.py
chenyanmo/mac-android-decompile
349d20bda959770776b74a4227c03e5f9c6570e2
[ "MIT" ]
null
null
null
auto.py
chenyanmo/mac-android-decompile
349d20bda959770776b74a4227c03e5f9c6570e2
[ "MIT" ]
1
2020-05-04T05:54:02.000Z
2020-05-04T05:54:02.000Z
# -*- coding: utf-8 -*- from function import run run(0)
9.666667
24
0.603448
9
58
3.888889
0.888889
0
0
0
0
0
0
0
0
0
0
0.043478
0.206897
58
5
25
11.6
0.717391
0.362069
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
0bd005d64a99989ac751df050617aa811b5a045d
97
py
Python
mealpy/__main__.py
ipwnponies/mealpy
ae109f87fe4f294dd70352bb047dfb5e5a6bb4ff
[ "MIT" ]
null
null
null
mealpy/__main__.py
ipwnponies/mealpy
ae109f87fe4f294dd70352bb047dfb5e5a6bb4ff
[ "MIT" ]
null
null
null
mealpy/__main__.py
ipwnponies/mealpy
ae109f87fe4f294dd70352bb047dfb5e5a6bb4ff
[ "MIT" ]
null
null
null
from mealpy.cli import cli cli(prog_name=__package__) # pylint: disable=unexpected-keyword-arg
24.25
68
0.804124
14
97
5.214286
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.103093
97
3
69
32.333333
0.83908
0.391753
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
0bfaed2fbccd1520cfdd3c48c5b2651add68d0be
7,881
py
Python
generated/python/gapic-google-cloud-logging-v2-docs/google/cloud/grpc/logging/v2/logging_pb2.py
landrito/api-client-staging
140c312c9335af160efce5b37842c995308e0148
[ "BSD-3-Clause" ]
18
2016-12-08T20:47:57.000Z
2022-01-29T19:36:04.000Z
generated/python/gapic-google-cloud-logging-v2-docs/google/cloud/grpc/logging/v2/logging_pb2.py
landrito/api-client-staging
140c312c9335af160efce5b37842c995308e0148
[ "BSD-3-Clause" ]
252
2016-09-21T20:51:36.000Z
2021-03-25T23:02:36.000Z
generated/python/gapic-google-cloud-logging-v2-docs/google/cloud/grpc/logging/v2/logging_pb2.py
landrito/api-client-staging
140c312c9335af160efce5b37842c995308e0148
[ "BSD-3-Clause" ]
37
2016-09-19T21:13:16.000Z
2022-01-29T19:36:07.000Z
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.api import monitored_resource_pb2 from google.cloud.grpc.logging.v2 import log_entry_pb2 class DeleteLogRequest(object): """ The parameters to DeleteLog. Attributes: log_name (string): Required. The resource name of the log to delete: :: \"projects/[PROJECT_ID]/logs/[LOG_ID]\" \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" ``[LOG_ID]`` must be URL-encoded. For example, ``\"projects/my-project-id/logs/syslog\"``, ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. For more information about log names, see ``LogEntry``. """ pass class WriteLogEntriesRequest(object): """ The parameters to WriteLogEntries. Attributes: log_name (string): Optional. A default log resource name that is assigned to all log entries in ``entries`` that do not specify a value for ``log_name``: :: \"projects/[PROJECT_ID]/logs/[LOG_ID]\" \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" ``[LOG_ID]`` must be URL-encoded. For example, ``\"projects/my-project-id/logs/syslog\"`` or ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. For more information about log names, see ``LogEntry``. resource (:class:`google.api.monitored_resource_pb2.MonitoredResource`): Optional. A default monitored resource object that is assigned to all log entries in ``entries`` that do not specify a value for ``resource``. Example: :: { \"type\": \"gce_instance\", \"labels\": { \"zone\": \"us-central1-a\", \"instance_id\": \"00000000000000000000\" }} See ``LogEntry``. labels (dict[string -> :class:`google.cloud.grpc.logging.v2.logging_pb2.WriteLogEntriesRequest.LabelsEntry`]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this parameter, then the log entry's label is not changed. See ``LogEntry``. entries (list[:class:`google.cloud.grpc.logging.v2.log_entry_pb2.LogEntry`]): Required. The log entries to write. Values supplied for the fields ``log_name``, ``resource``, and ``labels`` in this ``entries.write`` request are added to those log entries that do not provide their own values for the fields. To improve throughput and to avoid exceeding the `quota limit <https://cloud.google.com/logging/quota-policy>`_ for calls to ``entries.write``, you should write multiple log entries at once rather than calling this method for each individual log entry. partial_success (bool): Optional. Whether valid entries should be written even if some other entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any entry is not written, the response status will be the error associated with one of the failed entries and include error details in the form of WriteLogEntriesPartialErrors. """ pass class WriteLogEntriesResponse(object): """ Result returned from WriteLogEntries. empty Attributes: """ pass class ListLogEntriesRequest(object): """ The parameters to ``ListLogEntries``. Attributes: project_ids (list[string]): Deprecated. One or more project identifiers or project numbers from which to retrieve log entries. Example: ``\"my-project-1A\"``. If present, these project identifiers are converted to resource format and added to the list of resources in ``resourceNames``. Callers should use ``resourceNames`` rather than this parameter. resource_names (list[string]): Required. One or more cloud resources from which to retrieve log entries: :: \"projects/[PROJECT_ID]\" \"organizations/[ORGANIZATION_ID]\" Projects listed in the ``project_ids`` field are added to this list. filter (string): Optional. A filter that chooses which log entries to return. See [Advanced Logs Filters](/logging/docs/view/advanced_filters). Only log entries that match the filter are returned. An empty filter matches all log entries. The maximum length of the filter is 20000 characters. order_by (string): Optional. How the results should be sorted. Presently, the only permitted values are ``\"timestamp asc\"`` (default) and ``\"timestamp desc\"``. The first option returns entries in order of increasing values of ``LogEntry.timestamp`` (oldest first), and the second option returns entries in order of decreasing timestamps (newest first). Entries with equal timestamps are returned in order of ``LogEntry.insertId``. page_size (int): Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of ``nextPageToken`` in the response indicates that more results might be available. page_token (string): Optional. If present, then retrieve the next batch of results from the preceding call to this method. ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. """ pass class ListLogEntriesResponse(object): """ Result returned from ``ListLogEntries``. Attributes: entries (list[:class:`google.cloud.grpc.logging.v2.log_entry_pb2.LogEntry`]): A list of log entries. next_page_token (string): If there might be more results than appear in this response, then ``nextPageToken`` is included. To get the next set of results, call this method again using the value of ``nextPageToken`` as ``pageToken``. """ pass class ListMonitoredResourceDescriptorsRequest(object): """ The parameters to ListMonitoredResourceDescriptors Attributes: page_size (int): Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of ``nextPageToken`` in the response indicates that more results might be available. page_token (string): Optional. If present, then retrieve the next batch of results from the preceding call to this method. ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. """ pass class ListMonitoredResourceDescriptorsResponse(object): """ Result returned from ListMonitoredResourceDescriptors. Attributes: resource_descriptors (list[:class:`google.api.monitored_resource_pb2.MonitoredResourceDescriptor`]): A list of resource descriptors. next_page_token (string): If there might be more results than appear in this response, then ``nextPageToken`` is included. To get the next set of results, call this method again using the value of ``nextPageToken`` as ``pageToken``. """ pass
42.144385
191
0.689126
996
7,881
5.403614
0.289157
0.022297
0.011148
0.016351
0.401152
0.392605
0.353029
0.353029
0.353029
0.353029
0
0.011113
0.223576
7,881
186
192
42.370968
0.868443
0.865119
0
0.4375
0
0
0
0
0
0
0
0
0
1
0
true
0.4375
0.125
0
0.5625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
4
0bfce5c0c1f14b2e83cff70b09a7cf95b73f1043
178
py
Python
sciit/gitlab/sciit_pythonanywhere_com.py
ScienceOfComputerProgramming/SCICO-D-20-00070
fd1e0acaa8b0efc4b1931763c7b73806558c7f28
[ "MIT" ]
null
null
null
sciit/gitlab/sciit_pythonanywhere_com.py
ScienceOfComputerProgramming/SCICO-D-20-00070
fd1e0acaa8b0efc4b1931763c7b73806558c7f28
[ "MIT" ]
null
null
null
sciit/gitlab/sciit_pythonanywhere_com.py
ScienceOfComputerProgramming/SCICO-D-20-00070
fd1e0acaa8b0efc4b1931763c7b73806558c7f28
[ "MIT" ]
null
null
null
import sys project_home = '/home/sciit/sciit/' if project_home not in sys.path: sys.path = [project_home] + sys.path from sciit.gitlab.webservice import app as application
22.25
54
0.752809
28
178
4.678571
0.535714
0.251908
0
0
0
0
0
0
0
0
0
0
0.151685
178
7
55
25.428571
0.86755
0
0
0
0
0
0.101124
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
0444f18b9e53cf74e193c40c736c380cf4f0eca1
147
py
Python
comment/urls.py
SJ-Z/DjangoSite
6a26804e286d060cef78186e0397d83897fb224e
[ "MIT" ]
null
null
null
comment/urls.py
SJ-Z/DjangoSite
6a26804e286d060cef78186e0397d83897fb224e
[ "MIT" ]
5
2021-03-19T10:51:52.000Z
2022-02-10T13:57:43.000Z
comment/urls.py
SJ-Z/DjangoSite
6a26804e286d060cef78186e0397d83897fb224e
[ "MIT" ]
null
null
null
from django.urls import path from comment import views urlpatterns = [ path('update_comment', views.update_comment, name='update_comment'), ]
21
72
0.761905
19
147
5.736842
0.526316
0.357798
0
0
0
0
0
0
0
0
0
0
0.136054
147
6
73
24.5
0.858268
0
0
0
0
0
0.190476
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
044bc367aff83f60be08aad3679d3f6414bf2081
1,553
py
Python
segments/APP.py
pinfort/jpeg-analyzer
5e68a1125d8626f34fdac2cf094cef5a2a8a1b58
[ "MIT" ]
5
2018-12-09T01:32:24.000Z
2019-10-23T17:38:50.000Z
segments/APP.py
pinfort/jpeg-analyzer
5e68a1125d8626f34fdac2cf094cef5a2a8a1b58
[ "MIT" ]
null
null
null
segments/APP.py
pinfort/jpeg-analyzer
5e68a1125d8626f34fdac2cf094cef5a2a8a1b58
[ "MIT" ]
2
2018-12-09T01:41:56.000Z
2019-09-28T03:10:04.000Z
import warnings from segments.segmentsCommon import SegmentsCommon from markers import Markers from segments.APPs.APP0 import APP0 class APP(SegmentsCommon): def __init__(self): super(APP, self).__init__() def analyze(self, marker, body): if marker == Markers.APP[0]: return APP0().analyze(marker, body) elif marker == Markers.APP[1]: pass elif marker == Markers.APP[2]: pass elif marker == Markers.APP[3]: pass elif marker == Markers.APP[4]: pass elif marker == Markers.APP[5]: pass elif marker == Markers.APP[6]: pass elif marker == Markers.APP[7]: pass elif marker == Markers.APP[8]: pass elif marker == Markers.APP[9]: pass elif marker == Markers.APP[10]: pass elif marker == Markers.APP[11]: pass elif marker == Markers.APP[12]: pass elif marker == Markers.APP[13]: pass elif marker == Markers.APP[14]: pass elif marker == Markers.APP[15]: pass else: raise ValueError("invailed marker. marker " + marker.to_bytes(1, 'big') + " found. class APP is not excepting it.\nAPP expecting marker is below.\n" + str([m.to_bytes(1, 'big') for m in Markers.APP])) warnings.warn("this type APP segment not supported. please inform to the author.")
33.042553
213
0.535737
180
1,553
4.566667
0.366667
0.206813
0.311436
0.364964
0.408759
0
0
0
0
0
0
0.027163
0.359948
1,553
46
214
33.76087
0.799799
0
0
0.348837
0
0
0.110816
0
0
0
0
0
0
1
0.046512
false
0.348837
0.093023
0
0.186047
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
045af2ff5be26158cd3e0e42d20b64eae8a8c3dd
61
py
Python
enthought/pyface/ui/wx/application_window.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
3
2016-12-09T06:05:18.000Z
2018-03-01T13:00:29.000Z
enthought/pyface/ui/wx/application_window.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
1
2020-12-02T00:51:32.000Z
2020-12-02T08:48:55.000Z
enthought/pyface/ui/wx/application_window.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
null
null
null
# proxy module from pyface.ui.wx.application_window import *
20.333333
45
0.803279
9
61
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.114754
61
2
46
30.5
0.888889
0.196721
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
f0f0512fe4749b33dd10418ce5a8f6655d2b317d
191
py
Python
handlers/comment_handler.py
waab76/WerewolfBot
b78ca0f65839cdf8a177cdeb529d4700e363bec9
[ "MIT" ]
null
null
null
handlers/comment_handler.py
waab76/WerewolfBot
b78ca0f65839cdf8a177cdeb529d4700e363bec9
[ "MIT" ]
null
null
null
handlers/comment_handler.py
waab76/WerewolfBot
b78ca0f65839cdf8a177cdeb529d4700e363bec9
[ "MIT" ]
null
null
null
''' Created on Dec 21, 2020 @author: rcurtis ''' import logging def handle_comment(comment): logging.info('Handling comment [%s] on submission [%s]', comment.id, comment.submission.id)
19.1
95
0.712042
26
191
5.192308
0.653846
0
0
0
0
0
0
0
0
0
0
0.036585
0.141361
191
10
95
19.1
0.786585
0.21466
0
0
0
0
0.27972
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
0b19d9194cdbb45f38b17f88d5eab59f7ae2601c
95
py
Python
doc/util/__init__.py
jhh67/chapel
f041470e9b88b5fc4914c75aa5a37efcb46aa08f
[ "ECL-2.0", "Apache-2.0" ]
1,602
2015-01-06T11:26:31.000Z
2022-03-30T06:17:21.000Z
doc/util/__init__.py
jhh67/chapel
f041470e9b88b5fc4914c75aa5a37efcb46aa08f
[ "ECL-2.0", "Apache-2.0" ]
11,789
2015-01-05T04:50:15.000Z
2022-03-31T23:39:19.000Z
doc/util/__init__.py
jhh67/chapel
f041470e9b88b5fc4914c75aa5a37efcb46aa08f
[ "ECL-2.0", "Apache-2.0" ]
498
2015-01-08T18:58:18.000Z
2022-03-20T15:37:45.000Z
"""This __init__.py allows the usage of python files in this directory as Sphinx extensions"""
31.666667
73
0.778947
15
95
4.666667
0.933333
0
0
0
0
0
0
0
0
0
0
0
0.147368
95
2
74
47.5
0.864198
0.926316
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
9bc3a7e11b989d63fbe6e38567c17d5e805c9acf
20,728
py
Python
test/test_pipeline/components/test_setup.py
LMZimmer/Auto-PyTorch_refactor
ac7a9ce35e87a428caca2ac108b362a54d3b8f3a
[ "Apache-2.0" ]
null
null
null
test/test_pipeline/components/test_setup.py
LMZimmer/Auto-PyTorch_refactor
ac7a9ce35e87a428caca2ac108b362a54d3b8f3a
[ "Apache-2.0" ]
34
2020-10-06T08:06:46.000Z
2021-01-21T13:23:34.000Z
test/test_pipeline/components/test_setup.py
LMZimmer/Auto-PyTorch_refactor
ac7a9ce35e87a428caca2ac108b362a54d3b8f3a
[ "Apache-2.0" ]
1
2020-10-14T12:25:47.000Z
2020-10-14T12:25:47.000Z
import copy import unittest.mock from ConfigSpace.configuration_space import ConfigurationSpace import numpy as np from sklearn.base import clone import torch import autoPyTorch.pipeline.components.setup.lr_scheduler.base_scheduler_choice as lr_components import autoPyTorch.pipeline.components.setup.network.base_network_choice as network_components import \ autoPyTorch.pipeline.components.setup.network_initializer.base_network_init_choice as network_initializer_components # noqa: E501 import autoPyTorch.pipeline.components.setup.optimizer.base_optimizer_choice as optimizer_components from autoPyTorch.pipeline.components.setup.lr_scheduler.base_scheduler_choice import ( BaseLRComponent, SchedulerChoice ) from autoPyTorch.pipeline.components.setup.network.base_network_choice import ( BaseNetworkComponent, NetworkChoice ) from autoPyTorch.pipeline.components.setup.network_initializer.base_network_init_choice import ( BaseNetworkInitializerComponent, NetworkInitializerChoice ) from autoPyTorch.pipeline.components.setup.optimizer.base_optimizer_choice import ( BaseOptimizerComponent, OptimizerChoice ) class DummyLR(BaseLRComponent): def __init__(self, random_state=None): pass @staticmethod def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() return cs def get_properties(dataset_properties=None): return { 'shortname': 'Dummy', 'name': 'Dummy', } class DummyOptimizer(BaseOptimizerComponent): def __init__(self, random_state=None): pass @staticmethod def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() return cs def get_properties(dataset_properties=None): return { 'shortname': 'Dummy', 'name': 'Dummy', } class DummyNet(BaseNetworkComponent): def __init__(self, random_state=None): pass @staticmethod def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() return cs def get_properties(dataset_properties=None): return { 'shortname': 'Dummy', 'name': 'Dummy', } class DummyNetworkInitializer(BaseNetworkInitializerComponent): def __init__(self, random_state=None): pass @staticmethod def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() return cs def get_properties(dataset_properties=None): return { 'shortname': 'Dummy', 'name': 'Dummy', } class SchedulerTest(unittest.TestCase): def test_every_scheduler_is_valid(self): """ Makes sure that every scheduler is a valid estimator. That is, we can fully create an object via get/set params. This also test that we can properly initialize each one of them """ scheduler_choice = SchedulerChoice(dataset_properties={}) # Make sure all components are returned self.assertEqual(len(scheduler_choice.get_components().keys()), 7) # For every scheduler in the components, make sure # that it complies with the scikit learn estimator. # This is important because usually components are forked to workers, # so the set/get params methods should recreate the same object for name, scheduler in scheduler_choice.get_components().items(): config = scheduler.get_hyperparameter_search_space().sample_configuration() estimator = scheduler(**config) estimator_clone = clone(estimator) estimator_clone_params = estimator_clone.get_params() # Make sure all keys are copied properly for k, v in estimator.get_params().items(): self.assertIn(k, estimator_clone_params) # Make sure the params getter of estimator are honored klass = estimator.__class__ new_object_params = estimator.get_params(deep=False) for name, param in new_object_params.items(): new_object_params[name] = clone(param, safe=False) new_object = klass(**new_object_params) params_set = new_object.get_params(deep=False) for name in new_object_params: param1 = new_object_params[name] param2 = params_set[name] self.assertEqual(param1, param2) def test_get_set_config_space(self): """Make sure that we can setup a valid choice in the scheduler choice""" scheduler_choice = SchedulerChoice(dataset_properties={}) cs = scheduler_choice.get_hyperparameter_search_space() # Make sure that all hyperparameters are part of the serach space self.assertListEqual( sorted(cs.get_hyperparameter('__choice__').choices), sorted(list(scheduler_choice.get_components().keys())) ) # Make sure we can properly set some random configs # Whereas just one iteration will make sure the algorithm works, # doing five iterations increase the confidence. We will be able to # catch component specific crashes for i in range(5): config = cs.sample_configuration() config_dict = copy.deepcopy(config.get_dictionary()) scheduler_choice.set_hyperparameters(config) self.assertEqual(scheduler_choice.choice.__class__, scheduler_choice.get_components()[config_dict['__choice__']]) # Then check the choice configuration selected_choice = config_dict.pop('__choice__', None) for key, value in config_dict.items(): # Remove the selected_choice string from the parameter # so we can query in the object for it key = key.replace(selected_choice + ':', '') self.assertIn(key, vars(scheduler_choice.choice)) self.assertEqual(value, scheduler_choice.choice.__dict__[key]) def test_scheduler_add(self): """Makes sure that a component can be added to the CS""" # No third party components to start with self.assertEqual(len(lr_components._addons.components), 0) # Then make sure the scheduler can be added and query'ed lr_components.add_scheduler(DummyLR) self.assertEqual(len(lr_components._addons.components), 1) cs = SchedulerChoice(dataset_properties={}).get_hyperparameter_search_space() self.assertIn('DummyLR', str(cs)) class OptimizerTest(unittest.TestCase): def test_every_optimizer_is_valid(self): """ Makes sure that every optimizer is a valid estimator. That is, we can fully create an object via get/set params. This also test that we can properly initialize each one of them """ optimizer_choice = OptimizerChoice(dataset_properties={}) # Make sure all components are returned self.assertEqual(len(optimizer_choice.get_components().keys()), 4) # For every optimizer in the components, make sure # that it complies with the scikit learn estimator. # This is important because usually components are forked to workers, # so the set/get params methods should recreate the same object for name, optimizer in optimizer_choice.get_components().items(): config = optimizer.get_hyperparameter_search_space().sample_configuration() estimator = optimizer(**config) estimator_clone = clone(estimator) estimator_clone_params = estimator_clone.get_params() # Make sure all keys are copied properly for k, v in estimator.get_params().items(): self.assertIn(k, estimator_clone_params) # Make sure the params getter of estimator are honored klass = estimator.__class__ new_object_params = estimator.get_params(deep=False) for name, param in new_object_params.items(): new_object_params[name] = clone(param, safe=False) new_object = klass(**new_object_params) params_set = new_object.get_params(deep=False) for name in new_object_params: param1 = new_object_params[name] param2 = params_set[name] self.assertEqual(param1, param2) def test_get_set_config_space(self): """Make sure that we can setup a valid choice in the optimizer choice""" optimizer_choice = OptimizerChoice(dataset_properties={}) cs = optimizer_choice.get_hyperparameter_search_space() # Make sure that all hyperparameters are part of the serach space self.assertListEqual( sorted(cs.get_hyperparameter('__choice__').choices), sorted(list(optimizer_choice.get_components().keys())) ) # Make sure we can properly set some random configs # Whereas just one iteration will make sure the algorithm works, # doing five iterations increase the confidence. We will be able to # catch component specific crashes for i in range(5): config = cs.sample_configuration() config_dict = copy.deepcopy(config.get_dictionary()) optimizer_choice.set_hyperparameters(config) self.assertEqual(optimizer_choice.choice.__class__, optimizer_choice.get_components()[config_dict['__choice__']]) # Then check the choice configuration selected_choice = config_dict.pop('__choice__', None) for key, value in config_dict.items(): # Remove the selected_choice string from the parameter # so we can query in the object for it key = key.replace(selected_choice + ':', '') self.assertIn(key, vars(optimizer_choice.choice)) self.assertEqual(value, optimizer_choice.choice.__dict__[key]) def test_optimizer_add(self): """Makes sure that a component can be added to the CS""" # No third party components to start with self.assertEqual(len(optimizer_components._addons.components), 0) # Then make sure the optimizer can be added and query'ed optimizer_components.add_optimizer(DummyOptimizer) self.assertEqual(len(optimizer_components._addons.components), 1) cs = OptimizerChoice(dataset_properties={}).get_hyperparameter_search_space() self.assertIn('DummyOptimizer', str(cs)) class NetworkTest(unittest.TestCase): def test_every_network_is_valid(self): """ Makes sure that every network is a valid estimator. That is, we can fully create an object via get/set params. This also test that we can properly initialize each one of them """ network_choice = NetworkChoice(dataset_properties={}) # Make sure all components are returned self.assertEqual(len(network_choice.get_components().keys()), 1) # For every network in the components, make sure # that it complies with the scikit learn estimator. # This is important because usually components are forked to workers, # so the set/get params methods should recreate the same object for name, network in network_choice.get_components().items(): config = network.get_hyperparameter_search_space().sample_configuration() estimator = network(**config) estimator_clone = clone(estimator) estimator_clone_params = estimator_clone.get_params() # Make sure all keys are copied properly for k, v in estimator.get_params().items(): self.assertIn(k, estimator_clone_params) # Make sure the params getter of estimator are honored klass = estimator.__class__ new_object_params = estimator.get_params(deep=False) for name, param in new_object_params.items(): new_object_params[name] = clone(param, safe=False) new_object = klass(**new_object_params) params_set = new_object.get_params(deep=False) for name in new_object_params: param1 = new_object_params[name] param2 = params_set[name] self.assertEqual(param1, param2) def test_backbone_head_net(self): network_choice = NetworkChoice(dataset_properties={}) task_types = {"image_classification": ((1, 3, 64, 64), (5,)), "image_regression": ((1, 3, 64, 64), (1,)), "time_series_classification": ((1, 32, 6), (5,)), "time_series_regression": ((1, 32, 6), (1,)), "tabular_classification": ((1, 100,), (5,)), "tabular_regression": ((1, 100), (1,))} device = torch.device("cpu") for task_type, (input_shape, output_shape) in task_types.items(): cs = network_choice.get_hyperparameter_search_space(dataset_properties={"task_type": task_type}, include=["BackboneHeadNet"]) # test 10 random configurations for i in range(10): config = cs.sample_configuration() network_choice.set_hyperparameters(config) network_choice.fit(X={"X_train": np.zeros(input_shape), "y_train": np.zeros(output_shape)}, y=None) self.assertNotEqual(network_choice.choice.network, None) network_choice.choice.to(device) dummy_input = torch.randn((2, *input_shape[1:]), dtype=torch.float) output = network_choice.choice.network(dummy_input) self.assertEqual(output.shape[1:], output_shape) def test_get_set_config_space(self): """Make sure that we can setup a valid choice in the network choice""" network_choice = NetworkChoice(dataset_properties={}) cs = network_choice.get_hyperparameter_search_space() # Make sure that all hyperparameters are part of the search space self.assertListEqual( sorted(cs.get_hyperparameter('__choice__').choices), sorted(list(network_choice.get_components().keys())) ) # Make sure we can properly set some random configs # Whereas just one iteration will make sure the algorithm works, # doing five iterations increase the confidence. We will be able to # catch component specific crashes for i in range(5): config = cs.sample_configuration() config_dict = copy.deepcopy(config.get_dictionary()) network_choice.set_hyperparameters(config) self.assertEqual(network_choice.choice.__class__, network_choice.get_components()[config_dict['__choice__']]) # Then check the choice configuration selected_choice = config_dict.pop('__choice__', None) self.assertNotEqual(selected_choice, None) for key, value in config_dict.items(): # Remove the selected_choice string from the parameter # so we can query in the object for it key = key.replace(selected_choice + ':', '') # In the case of MLP, parameters are dynamic, so they exist in config parameters = vars(network_choice.choice) parameters.update(vars(network_choice.choice)['config']) self.assertIn(key, parameters) self.assertEqual(value, parameters[key]) def test_network_add(self): """Makes sure that a component can be added to the CS""" # No third party components to start with self.assertEqual(len(network_components._addons.components), 0) # Then make sure the scheduler can be added and query'ed network_components.add_network(DummyNet) self.assertEqual(len(network_components._addons.components), 1) cs = NetworkChoice(dataset_properties={}).get_hyperparameter_search_space() self.assertIn('DummyNet', str(cs)) class NetworkInitializerTest(unittest.TestCase): def test_every_network_initializer_is_valid(self): """ Makes sure that every network_initializer is a valid estimator. That is, we can fully create an object via get/set params. This also test that we can properly initialize each one of them """ network_initializer_choice = NetworkInitializerChoice(dataset_properties={}) # Make sure all components are returned self.assertEqual(len(network_initializer_choice.get_components().keys()), 5) # For every optimizer in the components, make sure # that it complies with the scikit learn estimator. # This is important because usually components are forked to workers, # so the set/get params methods should recreate the same object for name, network_initializer in network_initializer_choice.get_components().items(): config = network_initializer.get_hyperparameter_search_space().sample_configuration() estimator = network_initializer(**config) estimator_clone = clone(estimator) estimator_clone_params = estimator_clone.get_params() # Make sure all keys are copied properly for k, v in estimator.get_params().items(): self.assertIn(k, estimator_clone_params) # Make sure the params getter of estimator are honored klass = estimator.__class__ new_object_params = estimator.get_params(deep=False) for name, param in new_object_params.items(): new_object_params[name] = clone(param, safe=False) new_object = klass(**new_object_params) params_set = new_object.get_params(deep=False) for name in new_object_params: param1 = new_object_params[name] param2 = params_set[name] self.assertEqual(param1, param2) def test_get_set_config_space(self): """Make sure that we can setup a valid choice in the network_initializer choice""" network_initializer_choice = NetworkInitializerChoice(dataset_properties={}) cs = network_initializer_choice.get_hyperparameter_search_space() # Make sure that all hyperparameters are part of the serach space self.assertListEqual( sorted(cs.get_hyperparameter('__choice__').choices), sorted(list(network_initializer_choice.get_components().keys())) ) # Make sure we can properly set some random configs # Whereas just one iteration will make sure the algorithm works, # doing five iterations increase the confidence. We will be able to # catch component specific crashes for i in range(5): config = cs.sample_configuration() config_dict = copy.deepcopy(config.get_dictionary()) network_initializer_choice.set_hyperparameters(config) self.assertEqual(network_initializer_choice.choice.__class__, network_initializer_choice.get_components()[config_dict['__choice__']]) # Then check the choice configuration selected_choice = config_dict.pop('__choice__', None) for key, value in config_dict.items(): # Remove the selected_choice string from the parameter # so we can query in the object for it key = key.replace(selected_choice + ':', '') self.assertIn(key, vars(network_initializer_choice.choice)) self.assertEqual(value, network_initializer_choice.choice.__dict__[key]) def test_network_initializer_add(self): """Makes sure that a component can be added to the CS""" # No third party components to start with self.assertEqual(len(network_initializer_components._addons.components), 0) # Then make sure the network_initializer can be added and query'ed network_initializer_components.add_network_initializer(DummyNetworkInitializer) self.assertEqual(len(network_initializer_components._addons.components), 1) cs = NetworkInitializerChoice(dataset_properties={}).get_hyperparameter_search_space() self.assertIn('DummyNetworkInitializer', str(cs)) if __name__ == '__main__': unittest.main()
43.729958
134
0.660604
2,357
20,728
5.571913
0.100127
0.021929
0.027412
0.036245
0.818168
0.766314
0.727633
0.685525
0.622858
0.613112
0
0.004973
0.262736
20,728
473
135
43.82241
0.854404
0.228724
0
0.512456
0
0
0.028551
0.00594
0
0
0
0
0.153025
1
0.088968
false
0.014235
0.049822
0.014235
0.19573
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
501f22b4b9d1753d6f7fcb9d75a87ab0aa2a259b
269
py
Python
src/ralph/lib/permissions/__init__.py
DoNnMyTh/ralph
97b91639fa68965ad3fd9d0d2652a6545a2a5b72
[ "Apache-2.0" ]
1,668
2015-01-01T12:51:20.000Z
2022-03-29T09:05:35.000Z
src/ralph/lib/permissions/__init__.py
hq-git/ralph
e2448caf02d6e5abfd81da2cff92aefe0a534883
[ "Apache-2.0" ]
2,314
2015-01-02T13:26:26.000Z
2022-03-29T04:06:03.000Z
src/ralph/lib/permissions/__init__.py
hq-git/ralph
e2448caf02d6e5abfd81da2cff92aefe0a534883
[ "Apache-2.0" ]
534
2015-01-05T12:40:28.000Z
2022-03-29T21:10:12.000Z
from ralph.lib.permissions.models import ( PermByFieldMixin, PermissionsForObjectMixin, user_permission ) default_app_config = 'ralph.lib.permissions.apps.PermissionAppConfig' __all__ = ['PermByFieldMixin', 'PermissionsForObjectMixin', 'user_permission']
26.9
78
0.795539
23
269
8.956522
0.695652
0.07767
0.184466
0.533981
0
0
0
0
0
0
0
0
0.111524
269
9
79
29.888889
0.861925
0
0
0
0
0
0.379182
0.263941
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
50261927775bbf70b360739faf856fa3e9f56bea
51
py
Python
src/msys/generators/__init__.py
willi-z/mdd
bdd9492429640d86d36e21a334645922e4832956
[ "BSD-3-Clause" ]
2
2021-05-19T12:02:28.000Z
2021-06-14T07:29:47.000Z
src/msys/generators/__init__.py
willi-z/msys
bdd9492429640d86d36e21a334645922e4832956
[ "BSD-3-Clause" ]
null
null
null
src/msys/generators/__init__.py
willi-z/msys
bdd9492429640d86d36e21a334645922e4832956
[ "BSD-3-Clause" ]
null
null
null
from .register_generator import RegisterGenerator
17
49
0.882353
5
51
8.8
1
0
0
0
0
0
0
0
0
0
0
0
0.098039
51
2
50
25.5
0.956522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
504aa8951bd6a9fed86384f158fd9a5a275cb1c5
5,942
py
Python
download_gfs025.py
eshernan/wrfbuilding
c76b591bb2341a4e7b415505bc420584dbfacb15
[ "Apache-2.0" ]
null
null
null
download_gfs025.py
eshernan/wrfbuilding
c76b591bb2341a4e7b415505bc420584dbfacb15
[ "Apache-2.0" ]
null
null
null
download_gfs025.py
eshernan/wrfbuilding
c76b591bb2341a4e7b415505bc420584dbfacb15
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python ################################################################# # Python Script to retrieve 372 online Data files of 'ds084.1', # total 123.88G. This script uses 'requests' to download data. # # Highlight this script by Select All, Copy and Paste it into a file; # make the file executable and run it on command line. # # You need pass in your password as a parameter to execute # this script; or you can set an environment variable RDAPSWD # if your Operating System supports it. # # Contact rpconroy@ucar.edu (Riley Conroy) for further assistance. ################################################################# import sys, os import requests def check_file_status(filepath, filesize): sys.stdout.write('\r') sys.stdout.flush() size = int(os.stat(filepath).st_size) percent_complete = (size/filesize)*100 sys.stdout.write('%.3f %s' % (percent_complete, '% Completed')) sys.stdout.flush() # Try to get password if len(sys.argv) < 2 and not 'RDAPSWD' in os.environ: try: import getpass input = getpass.getpass except: try: input = raw_input except: pass pswd = input('Password: ') else: try: pswd = sys.argv[1] except: pswd = os.environ['RDAPSWD'] url = 'https://rda.ucar.edu/cgi-bin/login' values = {'email' : 'hrico@javeriana.edu.co', 'passwd' : pswd, 'action' : 'login'} # Authenticate ret = requests.post(url,data=values) if ret.status_code != 200: print('Bad Authentication') print(ret.text) exit(1) dspath = 'https://rda.ucar.edu/data/ds084.1/' filelist = [ '2018/20180509/gfs.0p25.2018050900.f000.grib2', '2018/20180509/gfs.0p25.2018050900.f003.grib2', '2018/20180509/gfs.0p25.2018050900.f006.grib2', '2018/20180509/gfs.0p25.2018050900.f009.grib2', '2018/20180509/gfs.0p25.2018050900.f012.grib2', '2018/20180509/gfs.0p25.2018050900.f015.grib2', '2018/20180509/gfs.0p25.2018050900.f018.grib2', '2018/20180509/gfs.0p25.2018050900.f021.grib2', '2018/20180509/gfs.0p25.2018050900.f024.grib2', '2018/20180509/gfs.0p25.2018050900.f027.grib2', '2018/20180509/gfs.0p25.2018050900.f030.grib2', '2018/20180509/gfs.0p25.2018050900.f033.grib2', '2018/20180509/gfs.0p25.2018050900.f036.grib2', '2018/20180509/gfs.0p25.2018050900.f039.grib2', '2018/20180509/gfs.0p25.2018050900.f042.grib2', '2018/20180509/gfs.0p25.2018050900.f045.grib2', '2018/20180509/gfs.0p25.2018050900.f048.grib2', '2018/20180509/gfs.0p25.2018050900.f051.grib2', '2018/20180509/gfs.0p25.2018050900.f054.grib2', '2018/20180509/gfs.0p25.2018050900.f057.grib2', '2018/20180509/gfs.0p25.2018050900.f060.grib2', '2018/20180509/gfs.0p25.2018050900.f063.grib2', '2018/20180509/gfs.0p25.2018050900.f066.grib2', '2018/20180509/gfs.0p25.2018050900.f069.grib2', '2018/20180509/gfs.0p25.2018050900.f072.grib2', '2018/20180509/gfs.0p25.2018050900.f075.grib2', '2018/20180510/gfs.0p25.2018051000.f000.grib2', '2018/20180510/gfs.0p25.2018051000.f003.grib2', '2018/20180510/gfs.0p25.2018051000.f006.grib2', '2018/20180510/gfs.0p25.2018051000.f009.grib2', '2018/20180510/gfs.0p25.2018051000.f012.grib2', '2018/20180510/gfs.0p25.2018051000.f015.grib2', '2018/20180510/gfs.0p25.2018051000.f018.grib2', '2018/20180510/gfs.0p25.2018051000.f021.grib2', '2018/20180510/gfs.0p25.2018051000.f024.grib2', '2018/20180510/gfs.0p25.2018051000.f027.grib2', '2018/20180510/gfs.0p25.2018051000.f030.grib2', '2018/20180510/gfs.0p25.2018051000.f033.grib2', '2018/20180510/gfs.0p25.2018051000.f036.grib2', '2018/20180510/gfs.0p25.2018051000.f039.grib2', '2018/20180510/gfs.0p25.2018051000.f042.grib2', '2018/20180510/gfs.0p25.2018051000.f045.grib2', '2018/20180510/gfs.0p25.2018051000.f048.grib2', '2018/20180510/gfs.0p25.2018051000.f051.grib2', '2018/20180510/gfs.0p25.2018051000.f054.grib2', '2018/20180510/gfs.0p25.2018051000.f057.grib2', '2018/20180510/gfs.0p25.2018051000.f060.grib2', '2018/20180510/gfs.0p25.2018051000.f063.grib2', '2018/20180510/gfs.0p25.2018051000.f066.grib2', '2018/20180510/gfs.0p25.2018051000.f069.grib2', '2018/20180510/gfs.0p25.2018051000.f072.grib2', '2018/20180510/gfs.0p25.2018051000.f075.grib2', '2018/20180511/gfs.0p25.2018051100.f000.grib2', '2018/20180511/gfs.0p25.2018051100.f003.grib2', '2018/20180511/gfs.0p25.2018051100.f006.grib2', '2018/20180511/gfs.0p25.2018051100.f009.grib2', '2018/20180511/gfs.0p25.2018051100.f012.grib2', '2018/20180511/gfs.0p25.2018051100.f015.grib2', '2018/20180511/gfs.0p25.2018051100.f018.grib2', '2018/20180511/gfs.0p25.2018051100.f021.grib2', '2018/20180511/gfs.0p25.2018051100.f024.grib2', '2018/20180511/gfs.0p25.2018051100.f027.grib2', '2018/20180511/gfs.0p25.2018051100.f030.grib2', '2018/20180511/gfs.0p25.2018051100.f033.grib2', '2018/20180511/gfs.0p25.2018051100.f036.grib2', '2018/20180511/gfs.0p25.2018051100.f039.grib2', '2018/20180511/gfs.0p25.2018051100.f042.grib2', '2018/20180511/gfs.0p25.2018051100.f045.grib2', '2018/20180511/gfs.0p25.2018051100.f048.grib2', '2018/20180511/gfs.0p25.2018051100.f051.grib2', '2018/20180511/gfs.0p25.2018051100.f054.grib2', '2018/20180511/gfs.0p25.2018051100.f057.grib2', '2018/20180511/gfs.0p25.2018051100.f060.grib2', '2018/20180511/gfs.0p25.2018051100.f063.grib2', '2018/20180511/gfs.0p25.2018051100.f066.grib2', '2018/20180511/gfs.0p25.2018051100.f069.grib2', '2018/20180511/gfs.0p25.2018051100.f072.grib2', '2018/20180511/gfs.0p25.2018051100.f075.grib2'] for file in filelist: filename=dspath+file file_base = os.path.basename(file) print('Downloading',file_base) req = requests.get(filename, cookies = ret.cookies, allow_redirects=True, stream=True) filesize = int(req.headers['Content-length']) with open(file_base, 'wb') as outfile: chunk_size=1048576 for chunk in req.iter_content(chunk_size=chunk_size): outfile.write(chunk) if chunk_size < filesize: check_file_status(file_base, filesize) check_file_status(file_base, filesize) print()
40.421769
90
0.73073
835
5,942
5.173653
0.220359
0.126389
0.090278
0.114352
0.628935
0.622222
0.016204
0
0
0
0
0.423744
0.088522
5,942
146
91
40.69863
0.373892
0.087008
0
0.08
0
0
0.687938
0.654043
0
0
0
0
0
1
0.008
false
0.04
0.024
0
0.032
0.032
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
50506d42c9f428b9173fc47feea88630950355f5
184
py
Python
utils/test_utils.py
aaron-parsons/repoNoData
40b48807ecabea7a3a5b2aeee35e71c36c250289
[ "Apache-2.0" ]
null
null
null
utils/test_utils.py
aaron-parsons/repoNoData
40b48807ecabea7a3a5b2aeee35e71c36c250289
[ "Apache-2.0" ]
null
null
null
utils/test_utils.py
aaron-parsons/repoNoData
40b48807ecabea7a3a5b2aeee35e71c36c250289
[ "Apache-2.0" ]
null
null
null
''' some utils for the test ''' import inspect def get_test_data_path(fname): path = inspect.stack()[0][1].split('repoNoData')[0]+'repo4data/test_data/%s' return path % fname
20.444444
80
0.684783
28
184
4.357143
0.714286
0.131148
0
0
0
0
0
0
0
0
0
0.025478
0.146739
184
9
81
20.444444
0.751592
0.125
0
0
0
0
0.207792
0.142857
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
acb6ca3b86901b1923968a90e5cf68e1ef5b0f46
105
py
Python
__init__.py
hperugu/TransG
3986a08760369736e1bca1f4ab449bf390850b39
[ "MIT" ]
null
null
null
__init__.py
hperugu/TransG
3986a08760369736e1bca1f4ab449bf390850b39
[ "MIT" ]
null
null
null
__init__.py
hperugu/TransG
3986a08760369736e1bca1f4ab449bf390850b39
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Tue Jun 29 14:24:51 2021 @author: Hari Kishan Perugu """
13.125
36
0.561905
16
105
3.6875
1
0
0
0
0
0
0
0
0
0
0
0.166667
0.257143
105
7
37
15
0.589744
0.828571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
acbc0ecd56e8f3648d00664b684f9a4f3cbb90a0
175
py
Python
store/config.py
isaacrivas10/fb_chatbot
01a78e690e698609a5b89dc8f04c216b248f5047
[ "MIT" ]
null
null
null
store/config.py
isaacrivas10/fb_chatbot
01a78e690e698609a5b89dc8f04c216b248f5047
[ "MIT" ]
5
2019-12-24T08:14:37.000Z
2019-12-24T08:56:39.000Z
store/config.py
isaacrivas10/fb_chatbot
01a78e690e698609a5b89dc8f04c216b248f5047
[ "MIT" ]
null
null
null
import os class Config: SECRET_KEY= os.environ.get('SECRET_KEY') SQLALCHEMY_DATABASE_URI= os.environ.get('SQLALCHEMY_DATABASE_URI') FLASK_ADMIN_SWATCH = 'flatly'
25
70
0.76
24
175
5.208333
0.625
0.144
0.192
0
0
0
0
0
0
0
0
0
0.137143
175
7
71
25
0.827815
0
0
0
0
0
0.222857
0.131429
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
acdb7a684dbde1c2a7f07f32c2b1297b67dd2aec
1,246
py
Python
ztom/__init__.py
ztomsy/ztom
6cfb5e411c47678d3e6ab37aa98ff07803437854
[ "MIT" ]
33
2019-05-02T13:22:59.000Z
2022-03-19T22:29:20.000Z
ztom/__init__.py
ztomsy/ztom
6cfb5e411c47678d3e6ab37aa98ff07803437854
[ "MIT" ]
1
2021-02-11T06:17:05.000Z
2021-02-11T06:17:05.000Z
ztom/__init__.py
ztomsy/ztom
6cfb5e411c47678d3e6ab37aa98ff07803437854
[ "MIT" ]
11
2019-11-20T06:53:47.000Z
2021-09-16T16:14:09.000Z
from ztom import * from ztom.cli import * from ztom.timer import Timer from ztom.utils import * from ztom.exchanges import * from ztom.exchange_wrapper import ccxtExchangeWrapper from ztom.exchange_wrapper import ExchangeWrapperOfflineFetchError from ztom.exchange_wrapper import ExchangeWrapperError from ztom.stats_influx import StatsInflux from ztom.datastorage import DataStorage from ztom.reporter import Reporter, MongoReporter from ztom.models.deal import DealReport from ztom.models.trade_order import TradeOrderReport from ztom.models.tickers import Tickers from ztom.reporter_sqla import SqlaReporter from ztom.orderbook import OrderBook from ztom.orderbook import Order from ztom.orderbook import Depth from ztom.trade_orders import * from ztom.trade_order_manager import * from ztom import core from ztom.bot import Bot from ztom.errors import * from ztom.action_order import ActionOrder from ztom.recovery_orders import RecoveryOrder from ztom.fok_order import FokOrder, FokThresholdTakerPriceOrder from ztom.order_manager import * from ztom.throttle import Throttle from ztom import * from ztom.models.remainings import Remainings # Legacy support from ztom.owa_manager import OwaManager from ztom.owa_orders import OrderWithAim
33.675676
66
0.854735
171
1,246
6.140351
0.280702
0.24381
0.12
0.065714
0.174286
0
0
0
0
0
0
0
0.109149
1,246
36
67
34.611111
0.945946
0.011236
0
0.0625
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
acf7a9f2ee94f4c250223d9b9b8ec0f20b090480
193
py
Python
python/testData/quickFixes/PyTypeHintsQuickFixTest/instanceCheckOnReference.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/quickFixes/PyTypeHintsQuickFixTest/instanceCheckOnReference.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/quickFixes/PyTypeHintsQuickFixTest/instanceCheckOnReference.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
from typing import Callable class A: pass C = Callable[..., str] assert issubclass(A, <error descr="Parameterized generics cannot be used with instance and class checks"><caret>C</error>)
27.571429
122
0.740933
28
193
5.107143
0.821429
0
0
0
0
0
0
0
0
0
0
0
0.150259
193
7
122
27.571429
0.871951
0
0
0
0
0
0.350515
0
0
0
0
0
0.2
0
null
null
0.2
0.2
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
4
4a197644e6ae91fca7341bd3eb45b12b7ca04c9e
8,879
py
Python
retarded.py
velo223/Velo-s-Spam-Scripts
286943dee2734b0a412602794026b91b210565bc
[ "CC0-1.0" ]
null
null
null
retarded.py
velo223/Velo-s-Spam-Scripts
286943dee2734b0a412602794026b91b210565bc
[ "CC0-1.0" ]
null
null
null
retarded.py
velo223/Velo-s-Spam-Scripts
286943dee2734b0a412602794026b91b210565bc
[ "CC0-1.0" ]
null
null
null
# Copypastas by BACVelocifaptor#6969, he#9999 and u/GerardDG on Reddit. Script written by BACVelocifaptor#6969. If you kang, you big gei. opt = { "v":""" I always knew humanity cannot be the only cognizant species in the galaxy. The fact that we are less than type 1 on the Kardashev scale always bothered me, as did the hubris of humans who believed only in the power of money and sex, only on this plane of existence. I was always open to the existence of higher civilizations, higher-dimensional beings. What I did not really subscribe to was the notion that this life could be a simulation created by these higher-dimensional beings for their amusement. So I was rudely shocked while aboard an airplane I was kidnapped by a giant tentacled creature, its body apparently so massive that its tentacles extended into hyperspace. Through the jetstream, it picked me up, and led me to another world through some watery ether. And then I saw what I thought was God in all his glory: a giant Patrick the starfish angel. But he was just an archangel, who announced the coming of the one true God. And that is when I beheld his beautiful personage, a four-headed SpongeBob SquarePants adonis, his name written on the giant scarlet popcorn boxes distributed to all his devotees, who previously appeared as animalistic cartoons in a very realistic animation. Yes, it was him, the Being-Best-Friends-With-A-Woman-Is-Better-Than-Getting-Laid-Once God. As I chomped on the divine, giant popcorn, which was like Cheetos Flamin' Hot Popcorn but with KFC India Hot Wings masala instead, I saw the light and sought out my best friend, as I leaned my head on her shoulder, contemplating the very meaning of existence and the nature of the universe. ""","c":''' It’s late, you’ve been in bed sleeping for a few hours. I crawl under the covers. I always sleep naked, so the cool sheets give my whole body goosebumps. I snuggle on my side into the blankets and shiver.\n\nSuddenly I feel you behind me, pressing your warm body to mine, wrapping your arms tight around me. You rub my skin firmly, warming me. Your hand makes its way to my hip and pulls me close. Your cock is hard and throbbing; pulsing between my ass cheeks. I push back, wiggling my hips. Your other hand moves up my body and moves around my neck. You gently squeeze and tilt my head back. You start to grip my neck tighter. You begin to whisper sweet and dirty things in my ear; telling me how you can’t wait to fill me, how badly you want me, how your cock will fit perfectly in my tight pussy.\n \nYour hand on my hip slides down between my legs. You touch my wet pussy, but I can tell you’re not satisfied. You know how to fix that though. You grip my throat tight and whisper:\n\n“Baby…make Daddy proud”\n\nI can feel my pussy soak your fingers when you say that. You know I love it when you’re my Daddy. You chuckle and moan your approval.\n\n“Mmm. My baby girl needs her Daddy to fill her pussy. Are you going to be good and take your Daddy’s big cock?”\n\nAs you say that your fingers slide deep into my pussy. I try to scream, but your hand on my throat muffles the sound. I grind back to try to find release. You’re right - I am so turned on when you take control of my body. I push back on your fingers and start to thrust. I’m desperate to cum for you; to make my Daddy proud. I can feel you smile as I wantonly fuck your fingers.\n\n“That’s right. Be a good girl and cum for your Daddy.”\n\nI let out a moan. My whole body tenses up and my back arches as I gush on your fingers. My pussy clenches and spasms, trying to find what it desperately needs: my Daddy’s hard cock to stuff it full of cum. You sense my need and chuckle in my ear.\n\n“Come on my greedy little slut. Show your Daddy how bad you need it.”\n\nI roll towards you on my other side and wrap my leg around your waist. My hand goes between our legs and grips your hard cock. I can feel precum oozing from the tip. I play with your cock and guide it between my legs where my spread, swollen pussy is waiting for you. I thrust forward so that the tip of your cock is touching my clit. I guide you around, smearing my clit with your precum. Your hand moves to my ass and forcefully spreads me apart, causing the tip of your cock to slip into my wet pussy. I clench around you, trying to draw you in deeper. The penetration is enough to drive me wild. I’m so close to cumming for you, but I need more.\n\nWithout warning you wrap your arms tight around me, pin me to your chest, and roll on to your back. You grip my hair and kiss me deeply. The tip of your cock is teasing my pussy. I’m trying to grind my hips to take you in. You move your hands down to my ass and give me a sharp swat. I yelp in pain and surprise. You use your hands to spread my ass wide as you tell me,\n\n“Baby, put it in. Bounce on your Daddy’s cock.”\n\nI don’t waste a moment. I slide down your thick shaft and stay there a moment, savoring the feeling of being full of your cock. It’s short-lived as I quickly feel the sting of your hand on my ass.\n\n“Bounce baby girl. Make Daddy drain his balls in your hungry little pussy.”\n\nI immediately start sliding up and down your thick cock. I can feel my cum dripping down your shaft. I feel my pussy lips spread and stretch around your cock as I start to fuck you. My pussy is so wet I can hear it each time I thrust your cock deep. My tits bounce as I ride you. Your fingers dig into my hips. I can feel your cock twitch and throb. You’re so close. I need your cum.\n\nI stop a moment to put my feet under me and raise myself up in a squatting position. I know this is what you really wanted all along; it will make you cum so hard. I start with shallow thrusts that tease your cockhead. I move down with deep, slower thrusts and feel the tip of your cock swell as I do. I smile. With my final thrusts I take all of your cock. I clench my pussy as I lift myself back to the tip and that’s when I feel the spurts of your thick, hot cum. I lower myself back down as you twitch, pushing your cum deep. You hold me there so you can pump me full. I grind to make sure your balls are totally empty. I groan your name as I cum for the last time.\n\nI shift back to my knees and bend forward to collapse on your chest. Your breathing is heavy but starting to slow. You wrap your arms around me and start giving me little kisses all over my neck. You whisper in my ear how much you love me, how you love it when I get so turned on when you talk dirty to me. You gently roll me on my back and start rubbing my sore thighs. You kiss me lovingly, bringing me down as I start to relax and drift to sleep.\n\nAs I start to feel your cum drip from my pussy I think, “I can’t wait until my Daddy fucks me again.” ''',"d":""" Zwemmen in Bacardi Lemon is eigenlijk helemaal niet chill. Denk maar na. Ten eerste is het zoete plakzooi. Bacardi is 300 percent suiker weet je, het is gewoon vloeibare kauwgom. Als je daarin gaat zwemmen gaat dat enorm kleven.\n\nTen tweede ben je waarschijnlijk dronken voordat je je eerste baantje hebt gezwommen. De alcohol krijg je niet alleen binnen door het door te slikken maar ook via je gehemelte enzo. Het doet pijn aan je ogen en ik gok dat het brandt in je neus. Dronken worden is op zich lachen, maar het belemmert je zwemvermogen. Plus je kan moeilijk bijhouden hoeveel je binnenkrijgt. Dus de kans bestaat dat je jezelf in een coma zuipt en/of dat je in de Bacardi Lemon verdrinkt. Een ignobele dood als je het mij vraagt.\n\nMaar wat dus wel een goeie oplossing is, is als je een Bacardi Lemon sauna maakt. Dan heb je wel de alcohol, maar niet de zoete teringzooi (want die blijft achter na distillatie en de kans op comazuipen is kleiner). """, "o":''' There was once an unfortunate soul known as Onestone. As is probably evident from his name, he only had one testicle. He was always ostracized and made fun of as a kid. One day, he was drinking at a bar, when he met this cute girl, Beverly Bird, who was a childhood acquaintance. She found his name and condition very quaint, but he felt offended by it, so he got her drunk, took her home, and fucked her till she died. He dumped her body in a vat of acid. Two weeks later, Beverly's cousin, Penny Bird, who had no idea of the ill fate that had befallen her sister, saw Onestone at the same bar. "Hey Onestone. How's it hanging?" she asked. Onestone, offended by this, got her drunk and took her home. But no matter how brutally he'd fuck her, she just wouldn't die. Why? Because you can't kill two Birds with Onestone. ''' } userinput = input(""" Choose your option: [V]elo [C]at [D]utch [O]nestone """) with open('retarded.txt','w') as f: f.write(opt[userinput.lower()]) print("Output saved to file. Now force-feed all your friends that sweet, sweet pasta until they hate you.")
355.16
5,006
0.7636
1,735
8,879
3.90951
0.379251
0.014153
0.008846
0.007077
0.02123
0.005307
0
0
0
0
0
0.002231
0.192251
8,879
24
5,007
369.958333
0.943112
0.014867
0
0
0
0.238095
0.982563
0.009866
0
0
0
0
0
1
0
false
0
0
0
0
0.047619
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4a1aef934526bfa4bd4d257db171a95ca6fc835c
807
py
Python
ejabberd_python3d/defaults/arguments.py
Dedaldino3D/ejabberd-python3d
4ff4474347b89ecfda48ff05b76d3a8b3c983046
[ "MIT" ]
4
2020-11-04T02:33:06.000Z
2021-08-29T23:55:47.000Z
ejabberd_python3d/defaults/arguments.py
Dedaldino3D/ejabberd-python3d
4ff4474347b89ecfda48ff05b76d3a8b3c983046
[ "MIT" ]
null
null
null
ejabberd_python3d/defaults/arguments.py
Dedaldino3D/ejabberd-python3d
4ff4474347b89ecfda48ff05b76d3a8b3c983046
[ "MIT" ]
1
2021-09-26T01:49:23.000Z
2021-09-26T01:49:23.000Z
from __future__ import unicode_literals from ..abc.api import APIArgument from ..serializers import StringSerializer, IntegerSerializer, PositiveIntegerSerializer, BooleanSerializer, \ LogLevelSerializer, ListSerializer, GenericSerializer class GenericArgument(APIArgument): serializer_class = GenericSerializer class StringArgument(APIArgument): serializer_class = StringSerializer class IntegerArgument(APIArgument): serializer_class = IntegerSerializer class PositiveIntegerArgument(APIArgument): serializer_class = PositiveIntegerSerializer class BooleanArgument(APIArgument): serializer_class = BooleanSerializer class LogLevelArgument(APIArgument): serializer_class = LogLevelSerializer class ListArgument(APIArgument): serializer_class = ListSerializer
23.735294
110
0.825279
62
807
10.548387
0.403226
0.224771
0.278287
0
0
0
0
0
0
0
0
0
0.125155
807
33
111
24.454545
0.926346
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.944444
0
0
0
1
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
4a1ff17aeaa5f8eaaaf5fd08242924058f96b020
965
py
Python
unittest_reinvent/library_design/__init__.py
MolecularAI/reinvent-chemistry
bf0235bc2b1168b1db54c1e04bdba04b166ab7bf
[ "MIT" ]
null
null
null
unittest_reinvent/library_design/__init__.py
MolecularAI/reinvent-chemistry
bf0235bc2b1168b1db54c1e04bdba04b166ab7bf
[ "MIT" ]
null
null
null
unittest_reinvent/library_design/__init__.py
MolecularAI/reinvent-chemistry
bf0235bc2b1168b1db54c1e04bdba04b166ab7bf
[ "MIT" ]
1
2022-03-22T15:24:13.000Z
2022-03-22T15:24:13.000Z
from unittest_reinvent.library_design.aizynth import TestAiZynthClient from unittest_reinvent.library_design.reaction_definitions import TestStandardDefinitions, TestBuildingBlocks from unittest_reinvent.library_design.reaction_filters import * from unittest_reinvent.library_design.test_attachment_points import TestAttachmentPoints from unittest_reinvent.library_design.test_bond_maker import TestBondMaker from unittest_reinvent.library_design.test_failing_reactions_enumerator import TestFailingReactionsEnumerator, \ TestNonFailingReactionsEnumerator from unittest_reinvent.library_design.test_fragment_filter import TestFragmentFilter from unittest_reinvent.library_design.test_fragment_reactions import TestFragmentReactions from unittest_reinvent.library_design.test_fragment_reactions_slice_enumerator import \ TestSingleFragmentReactionsSliceEnumerator, TestMultipleFragmentReactionsSliceEnumerator, \ TestReactionsSliceEnumeratorWithFilters
68.928571
112
0.915026
92
965
9.217391
0.358696
0.127358
0.212264
0.286557
0.446934
0.408019
0.180425
0.127358
0
0
0
0
0.058031
965
13
113
74.230769
0.932893
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
4a221a76950c9089348ea067f0687386643517e9
1,045
py
Python
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/ATI/separate_stencil.py
ShujaKhalid/deep-rl
99c6ba6c3095d1bfdab81bd01395ced96bddd611
[ "MIT" ]
210
2016-04-09T14:26:00.000Z
2022-03-25T18:36:19.000Z
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/ATI/separate_stencil.py
ShujaKhalid/deep-rl
99c6ba6c3095d1bfdab81bd01395ced96bddd611
[ "MIT" ]
72
2016-09-04T09:30:19.000Z
2022-03-27T17:06:53.000Z
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/ATI/separate_stencil.py
ShujaKhalid/deep-rl
99c6ba6c3095d1bfdab81bd01395ced96bddd611
[ "MIT" ]
64
2016-04-09T14:26:49.000Z
2022-03-21T11:19:47.000Z
'''Autogenerated by xml_generate script, do not edit!''' from OpenGL import platform as _p, arrays # Code generation uses this from OpenGL.raw.GL import _types as _cs # End users want this... from OpenGL.raw.GL._types import * from OpenGL.raw.GL import _errors from OpenGL.constant import Constant as _C import ctypes _EXTENSION_NAME = 'GL_ATI_separate_stencil' def _f( function ): return _p.createFunction( function,_p.PLATFORM.GL,'GL_ATI_separate_stencil',error_checker=_errors._error_checker) GL_STENCIL_BACK_FAIL_ATI=_C('GL_STENCIL_BACK_FAIL_ATI',0x8801) GL_STENCIL_BACK_FUNC_ATI=_C('GL_STENCIL_BACK_FUNC_ATI',0x8800) GL_STENCIL_BACK_PASS_DEPTH_FAIL_ATI=_C('GL_STENCIL_BACK_PASS_DEPTH_FAIL_ATI',0x8802) GL_STENCIL_BACK_PASS_DEPTH_PASS_ATI=_C('GL_STENCIL_BACK_PASS_DEPTH_PASS_ATI',0x8803) @_f @_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLint,_cs.GLuint) def glStencilFuncSeparateATI(frontfunc,backfunc,ref,mask):pass @_f @_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum) def glStencilOpSeparateATI(face,sfail,dpfail,dppass):pass
43.541667
117
0.831579
171
1,045
4.625731
0.380117
0.091024
0.131479
0.06574
0.426043
0.266751
0.222503
0.073325
0.073325
0
0
0.020576
0.069856
1,045
23
118
45.434783
0.79321
0.095694
0
0.105263
1
0
0.17484
0.17484
0
0
0.025586
0
0
1
0.157895
false
0.210526
0.315789
0.052632
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
c57b93b77c089b9ac9bd8cee1acc96bbdb3f986c
31
py
Python
anchors/tests/__init__.py
Fantomas42/django-anchors
1cedc947dd3f1ed5197e55bb96cf0c6d2e2cf430
[ "BSD-3-Clause" ]
null
null
null
anchors/tests/__init__.py
Fantomas42/django-anchors
1cedc947dd3f1ed5197e55bb96cf0c6d2e2cf430
[ "BSD-3-Clause" ]
null
null
null
anchors/tests/__init__.py
Fantomas42/django-anchors
1cedc947dd3f1ed5197e55bb96cf0c6d2e2cf430
[ "BSD-3-Clause" ]
null
null
null
"""Tests for django-anchors"""
15.5
30
0.677419
4
31
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.096774
31
1
31
31
0.75
0.774194
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
c5d6da1a017ea7aa6c2dbacbb60ce9b7edaf4282
939
py
Python
dbscan.py
andrewwhite5/DBSCAN-Clustering-Algorithm
a61e23db19e3cb023c4a0278c6065e15dce9d156
[ "MIT" ]
null
null
null
dbscan.py
andrewwhite5/DBSCAN-Clustering-Algorithm
a61e23db19e3cb023c4a0278c6065e15dce9d156
[ "MIT" ]
null
null
null
dbscan.py
andrewwhite5/DBSCAN-Clustering-Algorithm
a61e23db19e3cb023c4a0278c6065e15dce9d156
[ "MIT" ]
null
null
null
''' DBSCAN: Density-Based Spatial Clustering of Applications with Noise Documentation for original: https://github.com/scikit-learn/scikit-learn/blob/0fb307bf3/sklearn/cluster/_dbscan.py#L150 ''' import numpy as np def dbscan(): ''' TODO: - Figure out callable parameters ^ - ''' # estimate = DBSCAN(parameters here) # estimate.fit pass class DBSCAN(): ''' TODO: - Figure out callable parameters - Define fit() method - Define predict() method - Find a way to build conceptualization of border points vs. core points ''' ''' dist = maximum distance between neighbors (same cluster) min_points = minimum number of points required to be considered for a cluster ''' def __init__(self, dist=.1, min_points=3): self.dist = dist self.min_points = min_points def fit(): pass def predict(): pass
22.902439
91
0.629393
111
939
5.243243
0.603604
0.061856
0.054983
0.065292
0.127148
0.127148
0
0
0
0
0
0.014684
0.27476
939
40
92
23.475
0.839941
0.494143
0
0.272727
0
0
0
0
0
0
0
0.05
0
1
0.363636
false
0.272727
0.090909
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
0
1
0
0
4
680008d91ddd8da309c0cb1a8b52de6e0b83e2e7
223
py
Python
newsfeeds/admin.py
PingLu8/django-twitter
ef5c47a9b2cd6796d67028946b2ed1c7b0af51be
[ "Apache-2.0" ]
null
null
null
newsfeeds/admin.py
PingLu8/django-twitter
ef5c47a9b2cd6796d67028946b2ed1c7b0af51be
[ "Apache-2.0" ]
null
null
null
newsfeeds/admin.py
PingLu8/django-twitter
ef5c47a9b2cd6796d67028946b2ed1c7b0af51be
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from newsfeeds.models import NewsFeed @admin.register(NewsFeed) class NewsFeedAdmin(admin.ModelAdmin): list_display = ( 'user', 'tweet', 'created_at') date_hierarchy = 'created_at'
27.875
51
0.762332
27
223
6.148148
0.740741
0.108434
0
0
0
0
0
0
0
0
0
0
0.130045
223
7
52
31.857143
0.85567
0
0
0
0
0
0.130045
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
a842647b6c25e7586006809896a7a27aafc73d8a
9,006
py
Python
jmetal/core/test/test_solution.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
null
null
null
jmetal/core/test/test_solution.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
null
null
null
jmetal/core/test/test_solution.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
null
null
null
import copy import unittest from jmetal.core.solution import BinarySolution, FloatSolution, IntegerSolution, Solution, CompositeSolution from jmetal.util.ckecking import InvalidConditionException class SolutionTestCase(unittest.TestCase): def test_should_default_constructor_create_a_valid_solution(self) -> None: solution = Solution(2, 3) self.assertEqual(2, solution.number_of_variables) self.assertEqual(3, solution.number_of_objectives) self.assertEqual(0, len(solution.attributes)) self.assertEqual(2, len(solution.variables)) self.assertEqual(3, len(solution.objectives)) class BinarySolutionTestCase(unittest.TestCase): def test_should_default_constructor_create_a_valid_solution(self) -> None: solution = BinarySolution(2, 3) self.assertEqual(2, solution.number_of_variables) self.assertEqual(3, solution.number_of_objectives) def test_should_constructor_create_a_valid_solution(self) -> None: solution = BinarySolution(number_of_variables=2, number_of_objectives=3) solution.variables[0] = [True, False] solution.variables[1] = [False] self.assertEqual(2, solution.number_of_variables) self.assertEqual(3, solution.number_of_objectives) self.assertEqual(2, len(solution.variables)) self.assertEqual(3, len(solution.objectives)) self.assertEqual([True, False], solution.variables[0]) self.assertEqual([False], solution.variables[1]) def test_should_get_total_number_of_bits_return_zero_if_the_object_variables_are_not_initialized(self) -> None: solution = BinarySolution(number_of_variables=2, number_of_objectives=3) self.assertEqual(0, solution.get_total_number_of_bits()) def test_should_get_total_number_of_bits_return_the_right_value(self) -> None: solution = BinarySolution(number_of_variables=2, number_of_objectives=3) solution.variables[0] = [True, False] solution.variables[1] = [False, True, False] self.assertEqual(5, solution.get_total_number_of_bits()) class FloatSolutionTestCase(unittest.TestCase): def test_should_constructor_create_a_non_null_object(self) -> None: solution = FloatSolution([], [], 2) self.assertIsNotNone(solution) def test_should_default_constructor_create_a_valid_solution(self) -> None: solution = FloatSolution([0.0, 0.5], [1.0, 2.0], 3) self.assertEqual(2, solution.number_of_variables) self.assertEqual(3, solution.number_of_objectives) self.assertEqual(2, len(solution.variables)) self.assertEqual(3, len(solution.objectives)) self.assertEqual([0.0, 0.5], solution.lower_bound) self.assertEqual([1.0, 2.0], solution.upper_bound) def test_should_copy_work_properly(self) -> None: solution = FloatSolution([0.0, 3.5], [1.0, 5.0], 3, 2) solution.variables = [1.24, 2.66] solution.objectives = [0.16, -2.34, 9.25] solution.constraints = [-1.2, -0.25] solution.attributes["attr"] = "value" new_solution = copy.copy(solution) self.assertEqual(solution.number_of_variables, new_solution.number_of_variables) self.assertEqual(solution.number_of_objectives, new_solution.number_of_objectives) self.assertEqual(solution.variables, new_solution.variables) self.assertEqual(solution.objectives, new_solution.objectives) self.assertEqual(solution.lower_bound, new_solution.lower_bound) self.assertEqual(solution.upper_bound, new_solution.upper_bound) self.assertEqual(solution.constraints, new_solution.constraints) self.assertIs(solution.lower_bound, solution.lower_bound) self.assertIs(solution.upper_bound, solution.upper_bound) self.assertEqual(solution.attributes, new_solution.attributes) class IntegerSolutionTestCase(unittest.TestCase): def test_should_constructor_create_a_non_null_object(self) -> None: solution = IntegerSolution([], [], 2) self.assertIsNotNone(solution) def test_should_default_constructor_create_a_valid_solution(self) -> None: solution = IntegerSolution([0, 5], [1, 2], 3, 0) self.assertEqual(2, solution.number_of_variables) self.assertEqual(3, solution.number_of_objectives) self.assertEqual(2, len(solution.variables)) self.assertEqual(3, len(solution.objectives)) self.assertEqual(0, len(solution.constraints)) self.assertEqual([0, 5], solution.lower_bound) self.assertEqual([1, 2], solution.upper_bound) def test_should_copy_work_properly(self) -> None: solution = IntegerSolution([0, 5], [1, 2], 3, 1) solution.variables = [1, 2] solution.objectives = [0.16, -2.34, 9.25] solution.constraints = [-5] solution.attributes["attr"] = "value" new_solution = copy.copy(solution) self.assertEqual(solution.number_of_variables, new_solution.number_of_variables) self.assertEqual(solution.number_of_objectives, new_solution.number_of_objectives) self.assertEqual(solution.variables, new_solution.variables) self.assertEqual(solution.objectives, new_solution.objectives) self.assertEqual(solution.lower_bound, new_solution.lower_bound) self.assertEqual(solution.upper_bound, new_solution.upper_bound) self.assertEqual(solution.constraints, new_solution.constraints) self.assertIs(solution.lower_bound, solution.lower_bound) self.assertIs(solution.upper_bound, solution.upper_bound) self.assertEqual(solution.attributes, new_solution.attributes) class CompositeSolutionTestCase(unittest.TestCase): def test_should_constructor_create_a_valid_not_none_composite_solution_composed_of_a_double_solution(self): composite_solution = CompositeSolution([FloatSolution([1.0], [2.0], 2)]) self.assertIsNotNone(composite_solution) def test_should_constructor_raise_an_exception_if_the_number_of_objectives_is_not_coherent(self): float_solution: FloatSolution = FloatSolution([1.0], [3.0], 3) integer_solution: IntegerSolution = IntegerSolution([2], [4], 2) with self.assertRaises(InvalidConditionException): CompositeSolution([float_solution, integer_solution]) def test_should_constructor_create_a_valid_soltion_composed_of_a_float_and_an_integer_solutions(self): number_of_objectives = 3 number_of_constraints = 1 float_solution: FloatSolution = FloatSolution([1.0], [3.0], number_of_objectives, number_of_constraints) integer_solution: IntegerSolution = IntegerSolution([2], [4], number_of_objectives, number_of_constraints) solution: CompositeSolution = CompositeSolution([float_solution, integer_solution]) self.assertIsNotNone(solution) self.assertEqual(2, solution.number_of_variables) self.assertEqual(number_of_objectives, solution.number_of_objectives) self.assertEqual(number_of_constraints, solution.number_of_constraints) self.assertEqual(number_of_objectives, solution.variables[0].number_of_objectives) self.assertEqual(number_of_objectives, solution.variables[1].number_of_objectives) self.assertEqual(number_of_constraints, solution.variables[0].number_of_constraints) self.assertEqual(number_of_constraints, solution.variables[1].number_of_constraints) self.assertTrue(type(solution.variables[0] is FloatSolution)) self.assertTrue(type(solution.variables[1] is IntegerSolution)) def test_should_copy_work_properly(self): number_of_objectives = 3 number_of_constraints = 1 float_solution: FloatSolution = FloatSolution([1.0], [3.0], number_of_objectives, number_of_constraints) integer_solution: IntegerSolution = IntegerSolution([2], [4], number_of_objectives, number_of_constraints) solution: CompositeSolution = CompositeSolution([float_solution, integer_solution]) new_solution: CompositeSolution = copy.deepcopy(solution) self.assertEqual(solution.number_of_variables, new_solution.number_of_variables) self.assertEqual(solution.number_of_objectives, new_solution.number_of_objectives) self.assertEqual(solution.number_of_constraints, new_solution.number_of_constraints) self.assertEqual(solution.variables[0].number_of_variables, new_solution.variables[0].number_of_variables) self.assertEqual(solution.variables[1].number_of_variables, new_solution.variables[1].number_of_variables) self.assertEqual(solution.variables[0], new_solution.variables[0]) self.assertEqual(solution.variables[1], new_solution.variables[1]) self.assertEqual(solution.variables[0].variables, new_solution.variables[0].variables) self.assertEqual(solution.variables[1].variables, new_solution.variables[1].variables) if __name__ == '__main__': unittest.main()
50.881356
115
0.744726
1,067
9,006
5.984067
0.090909
0.080188
0.067659
0.046985
0.839781
0.785748
0.729209
0.667032
0.64166
0.588567
0
0.021464
0.156784
9,006
176
116
51.170455
0.819331
0
0
0.532847
0
0
0.002887
0
0
0
0
0
0.518248
1
0.109489
false
0
0.029197
0
0.175182
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
a861c5527b0f514fdb11d7e7503e5a4546b3f1df
47
py
Python
nautacli/__init__.py
josedanielr/nauta-cli
c646d6661892daed3ddd55e5546ef6a07ad0f42e
[ "MIT" ]
null
null
null
nautacli/__init__.py
josedanielr/nauta-cli
c646d6661892daed3ddd55e5546ef6a07ad0f42e
[ "MIT" ]
null
null
null
nautacli/__init__.py
josedanielr/nauta-cli
c646d6661892daed3ddd55e5546ef6a07ad0f42e
[ "MIT" ]
1
2020-03-20T04:35:15.000Z
2020-03-20T04:35:15.000Z
__version__ = '0.5.1' from .nauta import main
11.75
23
0.702128
8
47
3.625
1
0
0
0
0
0
0
0
0
0
0
0.076923
0.170213
47
3
24
15.666667
0.666667
0
0
0
0
0
0.106383
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
a866a48f4e17b04a44f251af048743591ba2db52
3,159
py
Python
pyecobee/objects/remote_sensor.py
kstough/Pyecobee
c7299f03ea7a4516d1b215fa9906edf1bf2bda2c
[ "MIT" ]
1
2019-04-30T00:57:48.000Z
2019-04-30T00:57:48.000Z
pyecobee/objects/remote_sensor.py
kstough/Pyecobee
c7299f03ea7a4516d1b215fa9906edf1bf2bda2c
[ "MIT" ]
null
null
null
pyecobee/objects/remote_sensor.py
kstough/Pyecobee
c7299f03ea7a4516d1b215fa9906edf1bf2bda2c
[ "MIT" ]
null
null
null
""" This module is home to the RemoteSensor class """ from pyecobee.ecobee_object import EcobeeObject class RemoteSensor(EcobeeObject): """ This class has been auto generated by scraping https://www.ecobee.com/home/developer/api/documentation/v1/objects/RemoteSensor.shtml Attribute names have been generated by converting ecobee property names from camelCase to snake_case. A getter property has been generated for each attribute. A setter property has been generated for each attribute whose value of READONLY is "no". An __init__ argument without a default value has been generated if the value of REQUIRED is "yes". An __init__ argument with a default value of None has been generated if the value of REQUIRED is "no". """ __slots__ = ['_id', '_name', '_type', '_code', '_in_use', '_capability'] attribute_name_map = {'id': 'id', 'name': 'name', 'type': 'type', 'code': 'code', 'in_use': 'inUse', 'inUse': 'in_use', 'capability': 'capability'} attribute_type_map = {'id': 'six.text_type', 'name': 'six.text_type', 'type': 'six.text_type', 'code': 'six.text_type', 'in_use': 'bool', 'capability': 'List[RemoteSensorCapability]'} def __init__(self, id=None, name=None, type=None, code=None, in_use=None, capability=None): """ Construct a RemoteSensor instance """ self._id = id self._name = name self._type = type self._code = code self._in_use = in_use self._capability = capability @property def id(self): """ Gets the id attribute of this RemoteSensor instance. :return: The value of the id attribute of this RemoteSensor instance. :rtype: six.text_type """ return self._id @property def name(self): """ Gets the name attribute of this RemoteSensor instance. :return: The value of the name attribute of this RemoteSensor instance. :rtype: six.text_type """ return self._name @property def type(self): """ Gets the type attribute of this RemoteSensor instance. :return: The value of the type attribute of this RemoteSensor instance. :rtype: six.text_type """ return self._type @property def code(self): """ Gets the code attribute of this RemoteSensor instance. :return: The value of the code attribute of this RemoteSensor instance. :rtype: six.text_type """ return self._code @property def in_use(self): """ Gets the in_use attribute of this RemoteSensor instance. :return: The value of the in_use attribute of this RemoteSensor instance. :rtype: bool """ return self._in_use @property def capability(self): """ Gets the capability attribute of this RemoteSensor instance. :return: The value of the capability attribute of this RemoteSensor instance. :rtype: List[RemoteSensorCapability] """ return self._capability
30.375
114
0.631212
385
3,159
5.025974
0.205195
0.134367
0.093023
0.167442
0.45478
0.45478
0.449612
0.357623
0.332817
0.29354
0
0.000438
0.277303
3,159
103
115
30.669903
0.847131
0.501108
0
0.1875
1
0
0.17282
0.021995
0
0
0
0
0
1
0.21875
false
0
0.03125
0
0.5625
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
764321817ce4163c4df0a36a4fd147e45eac94a7
237
py
Python
backend/company/admin.py
mahanfarzaneh2000/ProjeTor
bf21d2cfcd64552c8be5d97eb4ac1c7c3f7d9ca9
[ "Apache-2.0" ]
null
null
null
backend/company/admin.py
mahanfarzaneh2000/ProjeTor
bf21d2cfcd64552c8be5d97eb4ac1c7c3f7d9ca9
[ "Apache-2.0" ]
null
null
null
backend/company/admin.py
mahanfarzaneh2000/ProjeTor
bf21d2cfcd64552c8be5d97eb4ac1c7c3f7d9ca9
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import Company, Team, Position, JoinRequest # Register your models here. admin.site.register(Company) admin.site.register(Team) admin.site.register(Position) admin.site.register(JoinRequest)
29.625
56
0.818565
32
237
6.0625
0.4375
0.185567
0.350515
0
0
0
0
0
0
0
0
0
0.084388
237
7
57
33.857143
0.894009
0.109705
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
767273028216e6cd5149c3a068218ac53abc46f9
44
py
Python
kadro/__init__.py
koaning/kadro
cbf993e5142d1ade26ac5922d7d15784d56b3db6
[ "MIT" ]
13
2017-03-01T15:19:43.000Z
2021-09-23T21:02:27.000Z
kadro/__init__.py
koaning/kadro
cbf993e5142d1ade26ac5922d7d15784d56b3db6
[ "MIT" ]
9
2017-02-03T10:13:22.000Z
2018-05-28T21:26:10.000Z
kadro/__init__.py
koaning/kadro
cbf993e5142d1ade26ac5922d7d15784d56b3db6
[ "MIT" ]
3
2018-03-20T03:41:28.000Z
2020-07-03T06:00:16.000Z
from kadro.Frame import * __version__ = 0.1
14.666667
25
0.75
7
44
4.142857
1
0
0
0
0
0
0
0
0
0
0
0.054054
0.159091
44
3
26
14.666667
0.72973
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
7675d5d7c1c163519b35110c5cdc3310dae932a7
1,602
py
Python
setup.py
mosquito/ketama-python
d805b66cac85f4ab09f7470a05a2fff717725699
[ "BSD-2-Clause" ]
3
2020-01-28T05:44:17.000Z
2022-03-25T04:01:28.000Z
setup.py
mosquito/ketama-python
d805b66cac85f4ab09f7470a05a2fff717725699
[ "BSD-2-Clause" ]
null
null
null
setup.py
mosquito/ketama-python
d805b66cac85f4ab09f7470a05a2fff717725699
[ "BSD-2-Clause" ]
null
null
null
from setuptools import Extension, setup setup( name="ketama", version="0.1.1", package_data={"": ["ketama.pyi"]}, description="Python bindings for Ketama consistent hash library", long_description=open("README.rst").read(), url="https://github.com/mosquito/ketama-python", license="BSD License", ext_modules=[ Extension( "ketama", ["ketama_python.c", "src/ketama.c", "src/md5.c"], extra_compile_args=["-std=gnu99"] ) ], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", "Intended Audience :: Telecommunications Industry", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: MacOS", "Operating System :: POSIX", "Programming Language :: C", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Communications", "Topic :: Internet", "Topic :: Software Development :: Libraries", "Topic :: System :: Clustering", "Topic :: System :: Distributed Computing", ], )
36.409091
71
0.584894
149
1,602
6.248322
0.543624
0.183673
0.214823
0.195489
0
0
0
0
0
0
0
0.016143
0.265293
1,602
43
72
37.255814
0.774851
0
0
0.04878
0
0
0.615481
0
0
0
0
0
0
1
0
true
0
0.02439
0
0.02439
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
7675df9c3d788d46d06a98f6d9071f389ff9c085
33
py
Python
dgm4nlp/__init__.py
uva-slpl/dgm4nlp
9c5b3a4bc3f5e9b4f971d5b9bbad70e19bb12f8c
[ "MIT" ]
9
2015-07-22T18:07:44.000Z
2021-11-08T11:21:11.000Z
dgm4nlp/__init__.py
uva-slpl/dgm4nlp
9c5b3a4bc3f5e9b4f971d5b9bbad70e19bb12f8c
[ "MIT" ]
null
null
null
dgm4nlp/__init__.py
uva-slpl/dgm4nlp
9c5b3a4bc3f5e9b4f971d5b9bbad70e19bb12f8c
[ "MIT" ]
1
2021-01-12T10:00:22.000Z
2021-01-12T10:00:22.000Z
""" :Authors: - Wilker Aziz """
6.6
23
0.515152
3
33
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.212121
33
4
24
8.25
0.653846
0.69697
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
76959ba22fdef6e774997ab65ea97469238095c2
268
py
Python
client-portal/controller/taleem_aur_hunar/taleem_aur_hunar.py
oakbani/ksdp-portal
8f44b3cb0081a7f31b9c8121883dd51945a05520
[ "MIT" ]
null
null
null
client-portal/controller/taleem_aur_hunar/taleem_aur_hunar.py
oakbani/ksdp-portal
8f44b3cb0081a7f31b9c8121883dd51945a05520
[ "MIT" ]
null
null
null
client-portal/controller/taleem_aur_hunar/taleem_aur_hunar.py
oakbani/ksdp-portal
8f44b3cb0081a7f31b9c8121883dd51945a05520
[ "MIT" ]
1
2021-09-19T10:58:17.000Z
2021-09-19T10:58:17.000Z
from flask import Blueprint, request, render_template taleem_aur_hunar_api = Blueprint( "taleem_aur_hunar", __name__, url_prefix="/taleem-aur-hunar" ) @taleem_aur_hunar_api.route("/") def index(): return render_template("taleem-aur-hunar.html")
22.333333
65
0.727612
35
268
5.142857
0.542857
0.25
0.388889
0.255556
0.311111
0
0
0
0
0
0
0
0.152985
268
11
66
24.363636
0.792952
0
0
0
0
0
0.214008
0.081712
0
0
0
0
0
1
0.142857
false
0
0.142857
0.142857
0.428571
0.285714
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
4
76c6fd2fdbf556b09fbc4165ca06eee02e313912
92
py
Python
cherche/translate/__init__.py
raphaelsty/cherche
c9db846fc80451d71056803250ff649a063af057
[ "MIT" ]
193
2022-01-08T13:21:08.000Z
2022-03-26T17:24:18.000Z
cherche/translate/__init__.py
NicolasBizzozzero/cherche
3869206438a15ff9c14d3d36468b578f193b9a57
[ "MIT" ]
3
2022-02-08T01:17:22.000Z
2022-03-17T10:32:57.000Z
cherche/translate/__init__.py
NicolasBizzozzero/cherche
3869206438a15ff9c14d3d36468b578f193b9a57
[ "MIT" ]
7
2022-01-09T11:27:29.000Z
2022-01-23T08:58:01.000Z
from .translate import Translate, TranslateQuery __all__ = ["Translate", "TranslateQuery"]
23
48
0.782609
8
92
8.5
0.625
0.676471
0
0
0
0
0
0
0
0
0
0
0.108696
92
3
49
30.666667
0.829268
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
4f287ae69d7e0ee05a43974374cbdf7b24aabca5
209
py
Python
handlers/visualization.py
openxc/web-logging-example
830a3f1bfbb67af5ad6c5cfcde737e6faacb70f2
[ "BSD-3-Clause" ]
7
2015-09-30T13:46:32.000Z
2021-07-09T16:58:46.000Z
handlers/visualization.py
openxc/web-logging-example
830a3f1bfbb67af5ad6c5cfcde737e6faacb70f2
[ "BSD-3-Clause" ]
1
2015-09-29T04:14:41.000Z
2017-10-07T14:41:13.000Z
handlers/visualization.py
openxc/web-logging-example
830a3f1bfbb67af5ad6c5cfcde737e6faacb70f2
[ "BSD-3-Clause" ]
4
2015-01-02T14:47:12.000Z
2017-10-06T19:40:40.000Z
from handlers.base import BaseHandler import logging logger = logging.getLogger('recorder.' + __name__) class VisualizationHandler(BaseHandler): def get(self): self.render("visualization.html")
20.9
50
0.751196
22
209
6.954545
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.148325
209
9
51
23.222222
0.859551
0
0
0
0
0
0.129187
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
4f49b2f669b18985cd3870206c5ca91d9bb5b418
1,058
py
Python
probayes/named_dict.py
Bhumbra/probayes
e5ac193076e4188b9b38c0e18466223ab4d041f7
[ "BSD-3-Clause" ]
null
null
null
probayes/named_dict.py
Bhumbra/probayes
e5ac193076e4188b9b38c0e18466223ab4d041f7
[ "BSD-3-Clause" ]
null
null
null
probayes/named_dict.py
Bhumbra/probayes
e5ac193076e4188b9b38c0e18466223ab4d041f7
[ "BSD-3-Clause" ]
null
null
null
""" Provides named dictionary """ #------------------------------------------------------------------------------- import collections #------------------------------------------------------------------------------- class NamedDict(collections.OrderedDict): """ A named dictionary is an ordered dictionary with a name """ _name = None #------------------------------------------------------------------------------- def __init__(self, name, *args, **kwds): self.name = name super().__init__(*args, **kwds) #------------------------------------------------------------------------------- @property def name(self): return self._name @name.setter def name(self, name): assert isinstance(name, str), \ "Name must be a string, not {}".format(type(name)) self._name = name #------------------------------------------------------------------------------- def __repr__(self): return "{}: {}".format(self.name, super().__repr__()) #-------------------------------------------------------------------------------
34.129032
80
0.347826
72
1,058
4.847222
0.472222
0.137536
0.103152
0
0
0
0
0
0
0
0
0
0.118147
1,058
30
81
35.266667
0.374062
0.52741
0
0
0
0
0.072614
0
0
0
0
0
0.0625
1
0.25
false
0
0.0625
0.125
0.5625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
4f5a844d3800cf7f84ee672900d5706785fff199
157
py
Python
unittests/conftest.py
Philliams/ml_template
cb2616d5ca145fc3b3e8ec15f6eb4d2cd99778d0
[ "MIT" ]
null
null
null
unittests/conftest.py
Philliams/ml_template
cb2616d5ca145fc3b3e8ec15f6eb4d2cd99778d0
[ "MIT" ]
null
null
null
unittests/conftest.py
Philliams/ml_template
cb2616d5ca145fc3b3e8ec15f6eb4d2cd99778d0
[ "MIT" ]
null
null
null
import sys, os # this will append the root directory so that you can import # you modules inside your test files sys.path.insert(0, os.path.abspath('..'))
39.25
61
0.732484
27
157
4.259259
0.814815
0
0
0
0
0
0
0
0
0
0
0.007634
0.165605
157
4
62
39.25
0.870229
0.592357
0
0
0
0
0.033898
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
4f65f8017e0ba8d6326deeb1a62b457afc7edce6
121
py
Python
djangoblog/pages/urls.py
rojcode/djangoblog
9835d7540266c21c9f7ac85fb9ca622b5c806e8d
[ "MIT" ]
1
2021-09-11T11:44:08.000Z
2021-09-11T11:44:08.000Z
djangoblog/pages/urls.py
rojcode/djangoblog
9835d7540266c21c9f7ac85fb9ca622b5c806e8d
[ "MIT" ]
null
null
null
djangoblog/pages/urls.py
rojcode/djangoblog
9835d7540266c21c9f7ac85fb9ca622b5c806e8d
[ "MIT" ]
null
null
null
# from django.urls import path # from .views import Home # urlpatterns = [ # path('',Home.as_view(), name='home') # ]
24.2
42
0.636364
16
121
4.75
0.6875
0
0
0
0
0
0
0
0
0
0
0
0.181818
121
5
43
24.2
0.767677
0.917355
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4f8dee4f2674495822c3e449c0f4b284cae1e54f
250
py
Python
user/utils.py
sanidhyamangal/classh
61b86942f4a26f9e36a803186451455579351e10
[ "MIT" ]
null
null
null
user/utils.py
sanidhyamangal/classh
61b86942f4a26f9e36a803186451455579351e10
[ "MIT" ]
null
null
null
user/utils.py
sanidhyamangal/classh
61b86942f4a26f9e36a803186451455579351e10
[ "MIT" ]
null
null
null
import random from datetime import datetime def generate_token() -> str: """ Function to generate token for the miscellaneous tasks. """ return ''.join( str(random.randint(0, 99)).zfill(2) + datetime.now().strftime('%H%S'))
22.727273
78
0.644
32
250
5
0.78125
0.1625
0
0
0
0
0
0
0
0
0
0.020202
0.208
250
10
79
25
0.787879
0.22
0
0
1
0
0.022346
0
0
0
0
0
0
1
0.2
true
0
0.4
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
4faa3ad2343870bfa31bfa4364e3b7454e9d51a3
5,102
py
Python
tests/commands/base/test_download_url.py
paulproteus/briefcase
9d00dc9300cb00ef5dcd766e3a625a94b0462e51
[ "BSD-3-Clause" ]
null
null
null
tests/commands/base/test_download_url.py
paulproteus/briefcase
9d00dc9300cb00ef5dcd766e3a625a94b0462e51
[ "BSD-3-Clause" ]
null
null
null
tests/commands/base/test_download_url.py
paulproteus/briefcase
9d00dc9300cb00ef5dcd766e3a625a94b0462e51
[ "BSD-3-Clause" ]
null
null
null
from unittest import mock import pytest from briefcase.exceptions import ( BadNetworkResourceError, MissingNetworkResourceError ) def test_new_download_oneshot(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.url = 'https://example.com/path/to/something.zip' response.status_code = 200 response.headers.get.return_value = None response.content = b'all content' base_command.requests.get.return_value = response # Download the file filename = base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path / 'downloads', ) # requests.get has been invoked, but content isn't iterated base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_called_once_with('content-length') response.iter_content.assert_not_called() # The filename is derived from the URL assert filename == base_command.base_path / 'downloads' / 'something.zip' # File content is as expected with (base_command.base_path / 'downloads' / 'something.zip').open() as f: assert f.read() == 'all content' def test_new_download_chunked(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.url = 'https://example.com/path/to/something.zip' response.status_code = 200 response.headers.get.return_value = '24' response.iter_content.return_value = iter([ b'chunk-1;', b'chunk-2;', b'chunk-3;', ]) base_command.requests.get.return_value = response # Download the file filename = base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # requests.get has been invoked, and content is chunked. base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_called_once_with('content-length') response.iter_content.assert_called_once_with(chunk_size=1048576) # The filename is derived from the URL assert filename == base_command.base_path / 'something.zip' # The downloaded file exists, and content is as expected assert filename.exists() with (base_command.base_path / 'something.zip').open() as f: assert f.read() == 'chunk-1;chunk-2;chunk-3;' def test_already_downloaded(base_command): # Create an existing file existing_file = base_command.base_path / 'something.zip' with (existing_file).open('w') as f: f.write('existing content') base_command.requests = mock.MagicMock() response = mock.MagicMock() response.url = 'https://example.com/path/to/something.zip' response.status_code = 200 base_command.requests.get.return_value = response # Download the file filename = base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # The GET request will have been made base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) # But the request will be abandoned without reading. response.headers.get.assert_not_called() # but the file existed, so the method returns assert filename == existing_file assert filename.exists() def test_missing_resource(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.status_code = 404 base_command.requests.get.return_value = response # Download the file with pytest.raises(MissingNetworkResourceError): base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # requests.get has been invoked, but nothing else. base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_not_called() # The file doesn't exist as a result of the download failure assert not (base_command.base_path / 'something.zip').exists() def test_bad_resource(base_command): base_command.requests = mock.MagicMock() response = mock.MagicMock() response.status_code = 500 base_command.requests.get.return_value = response # Download the file with pytest.raises(BadNetworkResourceError): base_command.download_url( url='https://example.com/support?useful=Yes', download_path=base_command.base_path ) # requests.get has been invoked, but nothing else. base_command.requests.get.assert_called_with( 'https://example.com/support?useful=Yes', stream=True, ) response.headers.get.assert_not_called() # The file doesn't exist as a result of the download failure assert not (base_command.base_path / 'something.zip').exists()
32.291139
78
0.699138
650
5,102
5.306154
0.170769
0.118005
0.069585
0.066106
0.771818
0.766889
0.731226
0.717019
0.699623
0.699623
0
0.007321
0.196786
5,102
157
79
32.496815
0.834309
0.143081
0
0.561905
0
0
0.169577
0.005515
0
0
0
0
0.2
1
0.047619
false
0
0.028571
0
0.07619
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
96ce4fa9269f2c4e36a5af7df53e424e80453510
723
py
Python
tests/pystrings/test_compress.py
BrianLusina/PyCharm
144dd4f6b2d254507237f46c8ee175c407fe053d
[ "Apache-2.0", "MIT" ]
null
null
null
tests/pystrings/test_compress.py
BrianLusina/PyCharm
144dd4f6b2d254507237f46c8ee175c407fe053d
[ "Apache-2.0", "MIT" ]
null
null
null
tests/pystrings/test_compress.py
BrianLusina/PyCharm
144dd4f6b2d254507237f46c8ee175c407fe053d
[ "Apache-2.0", "MIT" ]
null
null
null
import unittest from pystrings.compress import compress class CompressStringTestCase(unittest.TestCase): def test_none(self): self.assertEqual(compress(None), None) def test_empty_string(self): self.assertEqual(compress(""), "") def test_no_need_to_compress(self): self.assertEqual(compress("AABBCC"), "AABBCC") def test_compression_AAABCCDDDDE(self): self.assertEqual(compress("AAABCCDDDDE"), "A3BC2D4E") def test_compression_BAAACCDDDD(self): self.assertEqual(compress("BAAACCDDDD"), "BA3C2D4") def test_compression_AAABAACCDDDD(self): self.assertEqual(compress("AAABAACCDDDD"), "A3BA2C2D4") if __name__ == "__main__": unittest.main()
25.821429
63
0.709544
75
723
6.56
0.386667
0.085366
0.231707
0.329268
0
0
0
0
0
0
0
0.016667
0.170124
723
27
64
26.777778
0.803333
0
0
0
0
0
0.106501
0
0
0
0
0
0.352941
1
0.352941
false
0
0.117647
0
0.529412
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
96d9acfaebcdd724fa09a9a078eb21b62684ef9c
60
py
Python
src/djaiot/device_perf/urls.py
Django-AI/DjAIoT
5e56e8dffb0920d1fcee563128b98432dbc8288c
[ "MIT" ]
2
2021-12-06T08:55:11.000Z
2022-02-01T08:25:50.000Z
src/djaiot/device_perf/urls.py
Django-AI/DjAI-IoT
5e56e8dffb0920d1fcee563128b98432dbc8288c
[ "MIT" ]
null
null
null
src/djaiot/device_perf/urls.py
Django-AI/DjAI-IoT
5e56e8dffb0920d1fcee563128b98432dbc8288c
[ "MIT" ]
1
2022-01-31T10:03:58.000Z
2022-01-31T10:03:58.000Z
"""DjAIoT Device Performance module: URL configurations."""
30
59
0.766667
6
60
7.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.1
60
1
60
60
0.851852
0.883333
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
96ebc5c91d8ddcee0924af3bfa04532678eab361
199
py
Python
mysite/core/forms.py
karan-1700/Smart_OCR
0a4a3c5cdcbf211edae5e31d8fb2d75e6ac335a2
[ "MIT" ]
null
null
null
mysite/core/forms.py
karan-1700/Smart_OCR
0a4a3c5cdcbf211edae5e31d8fb2d75e6ac335a2
[ "MIT" ]
null
null
null
mysite/core/forms.py
karan-1700/Smart_OCR
0a4a3c5cdcbf211edae5e31d8fb2d75e6ac335a2
[ "MIT" ]
1
2020-10-02T16:04:08.000Z
2020-10-02T16:04:08.000Z
from django.db import models from .models import History class HistoryForm(forms.ModelForm): class Meta: model = History fields = ('uploaded_at', 'image_path', 'predicted_text')
24.875
64
0.698492
24
199
5.666667
0.791667
0
0
0
0
0
0
0
0
0
0
0
0.20603
199
8
64
24.875
0.860759
0
0
0
0
0
0.175
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
96eeafd2fcbc4c6d2e6d251e0bf0fb5b658b74c1
146
py
Python
applications/3rdAssignment/modules/try.py
jtrwallace/web2py
797cd0442a76800a9d0d442b911f274110f0af95
[ "BSD-3-Clause" ]
null
null
null
applications/3rdAssignment/modules/try.py
jtrwallace/web2py
797cd0442a76800a9d0d442b911f274110f0af95
[ "BSD-3-Clause" ]
null
null
null
applications/3rdAssignment/modules/try.py
jtrwallace/web2py
797cd0442a76800a9d0d442b911f274110f0af95
[ "BSD-3-Clause" ]
null
null
null
function_list = [] for i in range(5): def f(x=i): return x function_list.append(f) print i for f in function_list: print f()
14.6
27
0.609589
26
146
3.307692
0.5
0.418605
0
0
0
0
0
0
0
0
0
0.009524
0.280822
146
9
28
16.222222
0.809524
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.25
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
8c02cd655a41b9b8d57317d1d541e655443896bf
653
py
Python
src/python/pants/backend/experimental/swift/register.py
danxmoran/pants
7fafd7d789747c9e6a266847a0ccce92c3fa0754
[ "Apache-2.0" ]
null
null
null
src/python/pants/backend/experimental/swift/register.py
danxmoran/pants
7fafd7d789747c9e6a266847a0ccce92c3fa0754
[ "Apache-2.0" ]
null
null
null
src/python/pants/backend/experimental/swift/register.py
danxmoran/pants
7fafd7d789747c9e6a266847a0ccce92c3fa0754
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import annotations from typing import Iterable from pants.backend.swift.goals import tailor from pants.backend.swift.target_types import SwiftSourcesGeneratorTarget, SwiftSourceTarget from pants.engine.rules import Rule from pants.engine.target import Target from pants.engine.unions import UnionRule def rules() -> Iterable[Rule | UnionRule]: return tailor.rules() def target_types() -> Iterable[type[Target]]: return ( SwiftSourceTarget, SwiftSourcesGeneratorTarget, )
27.208333
91
0.774885
77
653
6.493506
0.480519
0.09
0.09
0.084
0
0
0
0
0
0
0
0.01083
0.151608
653
23
92
28.391304
0.891697
0.192956
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
true
0
0.5
0.142857
0.785714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
4