hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
3a2def5897493b76f5245278f13191588cfca5bd
161
py
Python
kuwo/__init__.py
ssx12042/kwMusicer
b0425effc46246db80abe1978b2198a091a1abe3
[ "Apache-2.0" ]
null
null
null
kuwo/__init__.py
ssx12042/kwMusicer
b0425effc46246db80abe1978b2198a091a1abe3
[ "Apache-2.0" ]
null
null
null
kuwo/__init__.py
ssx12042/kwMusicer
b0425effc46246db80abe1978b2198a091a1abe3
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # @Time : 2021/5/16 22:41 # @Author : XiaYouRan # @Email : youran.xia@foxmail.com # @File : __init__.py.py # @Software: PyCharm
23
35
0.590062
22
161
4.136364
0.954545
0
0
0
0
0
0
0
0
0
0
0.094488
0.21118
161
6
36
26.833333
0.622047
0.919255
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
3a31c082eb96800c523838a4e64f1501e2abcac1
170
py
Python
amazon_scraper/constants.py
picorana/amazon-scraper
28e9c692ffd0af47a7f90355a53e41c47ddebf7f
[ "Unlicense" ]
32
2017-11-02T08:50:07.000Z
2021-11-18T14:45:00.000Z
amazon_scraper/constants.py
picorana/amazon-scraper
28e9c692ffd0af47a7f90355a53e41c47ddebf7f
[ "Unlicense" ]
11
2017-11-01T16:20:10.000Z
2019-12-26T12:56:59.000Z
amazon_scraper/constants.py
picorana/amazon-scraper
28e9c692ffd0af47a7f90355a53e41c47ddebf7f
[ "Unlicense" ]
4
2018-02-11T08:52:13.000Z
2021-12-05T19:17:40.000Z
base_product_page_url = 'https://www.amazon.com/gp/product/' base_amazon_url = 'https://www.amazon.com/' base_questions_url = 'https://www.amazon.com/ask/questions/asin/'
56.666667
65
0.764706
27
170
4.555556
0.444444
0.195122
0.268293
0.414634
0.487805
0
0
0
0
0
0
0
0.047059
170
3
65
56.666667
0.759259
0
0
0
0
0
0.578947
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3a38c2855da44034f8d8b3ce4dc039ead4091e79
318
py
Python
web/dbpatterns/documents/exporters/__init__.py
fatiherikli/dbpatterns
6936cfa3555bae9ef65296c7f31a6637c0ef5d54
[ "MIT" ]
133
2015-01-21T13:56:23.000Z
2018-06-03T03:58:37.000Z
web/dbpatterns/documents/exporters/__init__.py
fatiherikli/dbpatterns
6936cfa3555bae9ef65296c7f31a6637c0ef5d54
[ "MIT" ]
13
2015-02-24T15:47:25.000Z
2018-01-08T11:56:15.000Z
web/dbpatterns/documents/exporters/__init__.py
fatiherikli/dbpatterns
6936cfa3555bae9ef65296c7f31a6637c0ef5d54
[ "MIT" ]
26
2015-01-18T03:00:10.000Z
2018-03-10T13:31:03.000Z
class BaseExporter(object): """ The base class of all exporters. """ def __init__(self, document): self.document = document def export(self): raise NotImplementedError def as_text(self): return "\n".join(list(self.export())) class ExporterError(Exception): pass
18.705882
45
0.625786
35
318
5.542857
0.685714
0.123711
0
0
0
0
0
0
0
0
0
0
0.261006
318
17
46
18.705882
0.825532
0.100629
0
0
0
0
0.00738
0
0
0
0
0
0
1
0.333333
false
0.111111
0
0.111111
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
3a5c581d8fa7f9ea38f547c35a8d8961bb4d9f81
53
py
Python
tests/errors/test_zero_division.py
akshanshbhatt/lpython
70fef49dbbb6cbb0447f7013231171e5c8b8e5df
[ "BSD-3-Clause" ]
31
2022-01-07T23:56:33.000Z
2022-03-29T16:09:02.000Z
tests/errors/test_zero_division.py
akshanshbhatt/lpython
70fef49dbbb6cbb0447f7013231171e5c8b8e5df
[ "BSD-3-Clause" ]
197
2021-12-29T19:01:41.000Z
2022-03-31T15:58:25.000Z
tests/errors/test_zero_division.py
akshanshbhatt/lpython
70fef49dbbb6cbb0447f7013231171e5c8b8e5df
[ "BSD-3-Clause" ]
17
2022-01-06T15:34:36.000Z
2022-03-31T13:55:33.000Z
def f(): i: i32 i = 4 print(i // 0) f()
7.571429
17
0.339623
10
53
1.8
0.7
0
0
0
0
0
0
0
0
0
0
0.137931
0.45283
53
6
18
8.833333
0.482759
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.2
0.2
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
28e00a02ddda7496139895413b82003dcfd47450
126
py
Python
tigermeals/__init__.py
TigerMeals/Delivery
8512d097e3f7e78e4dc84fedeb027efd1ebb3514
[ "MIT" ]
null
null
null
tigermeals/__init__.py
TigerMeals/Delivery
8512d097e3f7e78e4dc84fedeb027efd1ebb3514
[ "MIT" ]
1
2022-02-12T04:04:58.000Z
2022-02-12T04:04:58.000Z
tigermeals/__init__.py
TigerMeals/Delivery
8512d097e3f7e78e4dc84fedeb027efd1ebb3514
[ "MIT" ]
null
null
null
from flask import Flask app = Flask(__name__) import tigermeals.delivery import tigermeals.restaurant import tigermeals.api
15.75
28
0.833333
16
126
6.3125
0.5625
0.475248
0
0
0
0
0
0
0
0
0
0
0.119048
126
7
29
18
0.90991
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
28e7fe13a2d78d6e79a5c8d7f73daf3e29163fd2
542
py
Python
sKoch.py
abcsds/Logo
1a9d69adc48e21df9fbf9f2a3d223cd223c8e9be
[ "MIT" ]
null
null
null
sKoch.py
abcsds/Logo
1a9d69adc48e21df9fbf9f2a3d223cd223c8e9be
[ "MIT" ]
null
null
null
sKoch.py
abcsds/Logo
1a9d69adc48e21df9fbf9f2a3d223cd223c8e9be
[ "MIT" ]
null
null
null
from turtle import * mode('logo') clearscreen() speed(0) def koch(size): if size > 5: koch(size*0.618) lt(90) koch(size*0.618) rt(90) koch(size*0.618) rt(90) koch(size*0.618) lt(90) koch(size*0.618) else: fd(size*0.618) lt(90) fd(size*0.618) rt(90) fd(size*0.618) rt(90) fd(size*0.618) lt(90) fd(size*0.618) clearscreen() lt(90) pu() fd(300) rt(90) bk(200) pd() rt(90) speed(0) koch(50)
13.897436
24
0.468635
88
542
2.886364
0.284091
0.19685
0.314961
0.23622
0.559055
0.559055
0.559055
0.559055
0.559055
0.559055
0
0.211594
0.363469
542
38
25
14.263158
0.524638
0
0
0.714286
0
0
0.00738
0
0
0
0
0
0
1
0.028571
false
0
0.028571
0
0.057143
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e91c82a7ba1a0185f0a953d7d4a7591c8fbbff56
198
py
Python
Python/IntegersComeInAllSizes.py
WinrichSy/HackerRank-Solutions
ed928de50cbbbdf0aee471630f6c04f9a0f69a1f
[ "Apache-2.0" ]
null
null
null
Python/IntegersComeInAllSizes.py
WinrichSy/HackerRank-Solutions
ed928de50cbbbdf0aee471630f6c04f9a0f69a1f
[ "Apache-2.0" ]
null
null
null
Python/IntegersComeInAllSizes.py
WinrichSy/HackerRank-Solutions
ed928de50cbbbdf0aee471630f6c04f9a0f69a1f
[ "Apache-2.0" ]
null
null
null
#Integers Come In All Sizes #https://www.hackerrank.com/challenges/python-integers-come-in-all-sizes/problem a = int(input()) b = int(input()) c = int(input()) d = int(input()) print(a**b + c**d)
19.8
80
0.671717
34
198
3.911765
0.558824
0.240602
0.210526
0.255639
0.330827
0
0
0
0
0
0
0
0.116162
198
9
81
22
0.76
0.530303
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e92f9a8ff441e9c69572dfa4d665798b467df68d
386
py
Python
INBa/2015/SHEMYAKIN_A_V/task_1_31.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
INBa/2015/SHEMYAKIN_A_V/task_1_31.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
INBa/2015/SHEMYAKIN_A_V/task_1_31.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
# Задача 1. Вариант 31. # Напишите программу, которая будет сообщать род деятельности и псевдоним под которым скрывается Эмиль Эрзог. # Shemyakin A.V. # 29.02.2016 input ("Андре Моруа, более известный как Эмиль Саломон Вильгельм Эрзог, французский писатель и член Французской академии. Примечание: впоследствии псевдоним стал его официальным именем.") input ('Press "Enter" to exit')
55.142857
188
0.787565
52
386
5.846154
0.903846
0
0
0
0
0
0
0
0
0
0
0.033233
0.142487
386
6
189
64.333333
0.885196
0.401554
0
0
0
0.5
0.88
0
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
e93578014f4676b1e5a418d5853c4edb04ee151a
162
py
Python
numcodecs/tests/__init__.py
llllllllll/numcodecs
20176a760904ad0d259fde2518a7ba73ca18f0a8
[ "MIT" ]
11
2016-10-16T14:53:00.000Z
2017-07-25T10:27:25.000Z
numcodecs/tests/__init__.py
llllllllll/numcodecs
20176a760904ad0d259fde2518a7ba73ca18f0a8
[ "MIT" ]
40
2016-09-20T20:19:40.000Z
2018-01-03T00:40:37.000Z
numcodecs/tests/__init__.py
llllllllll/numcodecs
20176a760904ad0d259fde2518a7ba73ca18f0a8
[ "MIT" ]
4
2016-10-16T14:53:11.000Z
2019-06-06T04:36:30.000Z
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, division import pytest pytest.register_assert_rewrite('numcodecs.tests.common')
23.142857
64
0.790123
20
162
6
0.85
0
0
0
0
0
0
0
0
0
0
0.006849
0.098765
162
6
65
27
0.815068
0.12963
0
0
0
0
0.158273
0.158273
0
0
0
0
0.333333
1
0
true
0
0.666667
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
e945df9b72ac2e1363c706cecd2fa7d0b88f709a
128
py
Python
src/reinforcement_learning/env/gym_core_environments/car_racing_advanced/__init__.py
youth-quaker/auto-features-extraction-for-RL
46a541291e38144d0b7e4820b2e33399da166a10
[ "MIT" ]
5
2020-06-07T08:52:31.000Z
2021-02-06T08:07:21.000Z
src/reinforcement_learning/env/gym_core_environments/car_racing_advanced/__init__.py
youth-quaker/auto-features-extraction-for-RL
46a541291e38144d0b7e4820b2e33399da166a10
[ "MIT" ]
null
null
null
src/reinforcement_learning/env/gym_core_environments/car_racing_advanced/__init__.py
youth-quaker/auto-features-extraction-for-RL
46a541291e38144d0b7e4820b2e33399da166a10
[ "MIT" ]
2
2020-06-07T08:54:12.000Z
2021-04-23T08:49:37.000Z
from .car_dynamics import Car try: import Box2D from .car_racing import CarRacing except ImportError: Box2D = None
16
37
0.734375
17
128
5.411765
0.647059
0.152174
0
0
0
0
0
0
0
0
0
0.020202
0.226563
128
7
38
18.285714
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3a6f1c33ed992a1d5b805ae97ba47b3379f3fb75
100
py
Python
src/user_platform/apps.py
allow-cookies/demon
0a62fdbdfbcb9ab5224be747ddf45c968207b51d
[ "MIT" ]
null
null
null
src/user_platform/apps.py
allow-cookies/demon
0a62fdbdfbcb9ab5224be747ddf45c968207b51d
[ "MIT" ]
1
2021-03-31T08:12:05.000Z
2021-03-31T08:12:05.000Z
src/user_platform/apps.py
allow-cookies/demon
0a62fdbdfbcb9ab5224be747ddf45c968207b51d
[ "MIT" ]
null
null
null
from django.apps import AppConfig class UserPlatformConfig(AppConfig): name = "user_platform"
16.666667
36
0.78
11
100
7
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.15
100
5
37
20
0.905882
0
0
0
0
0
0.13
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3a83ca6c362da5f6d9943aa54176b6e1d04ba2dd
661
py
Python
twiml_generator/specificity/__init__.py
TwilioDevEd/twiml-generator
f78cb30602301b358b88c2e53763a775fedaa326
[ "MIT" ]
5
2017-05-24T11:15:23.000Z
2021-07-05T01:08:03.000Z
twiml_generator/specificity/__init__.py
TwilioDevEd/twiml-generator
f78cb30602301b358b88c2e53763a775fedaa326
[ "MIT" ]
88
2018-07-31T15:01:57.000Z
2022-03-30T08:27:16.000Z
twiml_generator/specificity/__init__.py
TwilioDevEd/twiml-generator
f78cb30602301b358b88c2e53763a775fedaa326
[ "MIT" ]
null
null
null
from typing import Type from twiml_generator.specificity.csharp import CSharp from twiml_generator.specificity.java import Java from twiml_generator.specificity.node import Node from twiml_generator.specificity.php import PHP from twiml_generator.specificity.python import Python from twiml_generator.specificity.ruby import Ruby class Specificities: """All languages specificities cleaner""" def __init__(self): self.__languages = [Java, CSharp, Node, PHP, Python, Ruby] def clean(self, generator, language): for lang in self.__languages: if language == lang.__name__.lower(): lang.clean(generator)
31.47619
66
0.748865
81
661
5.888889
0.358025
0.113208
0.226415
0.36478
0
0
0
0
0
0
0
0
0.178517
661
20
67
33.05
0.878453
0.05295
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.5
0
0.714286
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3aa7fc65f683f366bde820f65579d4c5bb2fcb74
4,340
py
Python
p1 - Analytic Queries/scripts/customers_gen.py
Nerucius/1718.databases2
d4b2e81da3cb8210066f40b29f453e0aa0cbc52a
[ "MIT" ]
null
null
null
p1 - Analytic Queries/scripts/customers_gen.py
Nerucius/1718.databases2
d4b2e81da3cb8210066f40b29f453e0aa0cbc52a
[ "MIT" ]
null
null
null
p1 - Analytic Queries/scripts/customers_gen.py
Nerucius/1718.databases2
d4b2e81da3cb8210066f40b29f453e0aa0cbc52a
[ "MIT" ]
null
null
null
#! /usr/bin/python import math, random names = [ "James","Mary","John","Patricia","Robert","Jennifer","Michael","Elizabeth","William","Linda","David","Barbara","Richard","Susan","Joseph","Jessica","Thomas","Margaret","Charles","Sarah","Christopher","Karen","Daniel","Nancy","Matthew","Betty","Anthony","Lisa","Donald","Dorothy","Mark","Sandra","Paul","Ashley","Steven","Kimberly","Andrew","Donna","Kenneth","Carol","George","Michelle","Joshua","Emily","Kevin","Amanda","Brian","Helen","Edward","Melissa","Ronald","Deborah","Timothy","Stephanie","Jason","Laura","Jeffrey","Rebecca","Ryan","Sharon","Gary","Cynthia","Jacob","Kathleen","Nicholas","Amy","Eric","Shirley","Stephen","Anna","Jonathan","Angela","Larry","Ruth","Justin","Brenda","Scott","Pamela","Frank","Nicole","Brandon","Katherine","Raymond","Virginia","Gregory","Catherine","Benjamin","Christine","Samuel","Samantha","Patrick","Debra","Alexander","Janet","Jack","Rachel","Dennis","Carolyn","Jerry","Emma","Tyler","Maria","Aaron","Heather","Henry","Diane","Douglas","Julie","Jose","Joyce","Peter","Evelyn","Adam","Frances","Zachary","Joan","Nathan","Christina","Walter","Kelly","Harold","Victoria","Kyle","Lauren","Carl","Martha","Arthur","Judith","Gerald","Cheryl","Roger","Megan","Keith","Andrea","Jeremy","Ann","Terry","Alice","Lawrence","Jean","Sean","Doris","Christian","Jacqueline","Albert","Kathryn","Joe","Hannah","Ethan","Olivia","Austin","Gloria","Jesse","Marie","Willie","Teresa","Billy","Sara","Bryan","Janice","Bruce","Julia","Jordan","Grace","Ralph","Judy","Roy","Theresa","Noah","Rose","Dylan","Beverly","Eugene","Denise","Wayne","Marilyn","Alan","Amber","Juan","Madison","Louis","Danielle","Russell","Brittany","Gabriel","Diana","Randy","Abigail","Philip","Jane","Harry","Natalie","Vincent","Lori","Bobby","Tiffany","Johnny","Alexis","Logan","Kayla" ] surnames = [ "Smith","Johnson","Williams","Jones","Brown","Davis","Miller","Wilson","Moore","Taylor","Anderson","Thomas","Jackson","White","Harris","Martin","Thompson","Garcia","Martinez","Robinson","Clark","Rodriguez","Lewis","Lee","Walker","Hall","Allen","Young","Hernandez","King","Wright","Lopez","Hill","Scott","Green","Adams","Baker","Gonzalez","Nelson","Carter","Mitchell","Perez","Roberts","Turner","Phillips","Campbell","Parker","Evans","Edwards","Collins","Stewart","Sanchez","Morris","Rogers","Reed","Cook","Morgan","Bell","Murphy","Bailey","Rivera","Cooper","Richardson","Cox","Howard","Ward","Torres","Peterson","Gray","Ramirez","James","Watson","Brooks","Kelly","Sanders","Price","Bennett","Wood","Barnes","Ross","Henderson","Coleman","Jenkins","Perry","Powell","Long","Patterson","Hughes","Flores","Washington","Butler","Simmons","Foster","Gonzales","Bryant","Alexander","Russell","Griffin","Diaz","Hayes","Myers","Ford","Hamilton","Graham","Sullivan","Wallace","Woods","Cole","West","Jordan","Owens","Reynolds","Fisher","Ellis","Harrison","Gibson","Mcdonald","Cruz","Marshall","Ortiz","Gomez","Murray","Freeman","Wells","Webb","Simpson","Stevens","Tucker","Porter","Hunter","Hicks","Crawford","Henry","Boyd","Mason","Morales","Kennedy","Warren","Dixon","Ramos","Reyes","Burns","Gordon","Shaw","Holmes","Rice","Robertson","Hunt","Black","Daniels","Palmer","Mills","Nichols","Grant","Knight","Ferguson","Rose","Stone","Hawkins","Dunn","Perkins","Hudson","Spencer","Gardner","Stephens","Payne","Pierce","Berry","Matthews","Arnold","Wagner","Willis","Ray","Watkins","Olson","Carroll","Duncan","Snyder","Hart","Cunningham","Bradley","Lane","Andrews","Ruiz","Harper","Fox","Riley","Armstrong","Carpenter","Weaver","Greene","Lawrence","Elliott","Chavez","Sims","Austin","Peters","Kelley","Franklin","Lawson","Fields","Gutierrez","Ryan","Schmidt","Carr","Vasquez","Castillo","Wheeler","Chapman","Oliver","Montgomery","Richards","Williamson","Johnston","Banks","Meyer","Bishop","Mccoy","Howell","Alvarez","Morrison","Hansen","Fernandez","Garza","Harvey","Little","Burton","Stanley","Nguyen","George","Jacobs","Reid","Kim","Fuller","Lynch","Dean","Gilbert","Garrett","Romero","Welch","Larson","Frazier","Burke","Hanson","Day","Mendoza","Moreno","Bowman","Medina","Fowler" ] for i in range(1,1000): forename = random.choice(names) surname = random.choice(surnames) name = forename +" "+ surname phone = random.randint(600000000,700000000) print "%d;%s;%d"% (i, name, phone)
228.421053
2,268
0.662442
487
4,340
5.903491
0.948665
0.008348
0
0
0
0
0
0
0
0
0
0.005388
0.016359
4,340
18
2,269
241.111111
0.668072
0.003917
0
0
0
0
0.625174
0
0
0
0
0
0
0
null
null
0
0.076923
null
null
0.076923
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
3ac3f857e248a1515c341f6cb316e6c76a09207a
3,596
py
Python
gpt3_api.py
cvkumar/odqa-experiments
7adff8c7a68aebf8f334e60f2d78eae034365d1e
[ "MIT" ]
null
null
null
gpt3_api.py
cvkumar/odqa-experiments
7adff8c7a68aebf8f334e60f2d78eae034365d1e
[ "MIT" ]
null
null
null
gpt3_api.py
cvkumar/odqa-experiments
7adff8c7a68aebf8f334e60f2d78eae034365d1e
[ "MIT" ]
null
null
null
import openai from constants import OPEN_AI_API_KEY openai.api_key = OPEN_AI_API_KEY sample = "Abraham Lincoln ; February 12, 1809 April 15, 1865 was an American lawyer and statesman who served as the 16th president of the United States from 1861 until his assassination in 1865. Lincoln led the nation through the American Civil War and succeeded in preserving the Union, abolishing slavery, bolstering the federal government, and modernizing the U.S. economy. Lincoln was born into poverty in a log cabin in Kentucky and was raised on the frontier, primarily in Indiana. He was self-educated and became a lawyer, Whig Party leader, Illinois state legislator, and U.S. Congressman from Illinois. In 1849, he returned to his law practice but became vexed by the opening of additional lands to slavery as a result of the Kansas–Nebraska Act of 1854. He reentered politics in 1854, becoming a leader in the new Republican Party, and he reached a national audience in the 1858 Senate campaign debates against Stephen Douglas. Lincoln ran for President in 1860, sweeping the North to gain victory. Pro-slavery elements in the South viewed his success as a threat to slavery, and Southern states began seceding from the Union. To secure its independence, the new Confederate States fired on Fort Sumter, a U.S. fort in South Carolina, and Lincoln called up forces to suppress the rebellion and restore the Union. Lincoln, a moderate Republican, had to navigate a contentious array of factions with friends and opponents from both the Democratic and Republican parties. His allies, the War Democrats and the Radical Republicans, demanded harsh treatment of the Southern Confederates. Anti-war Democrats (called "Copperheads") despised Lincoln, and irreconcilable pro-Confederate elements plotted his assassination. He managed the factions by exploiting their mutual enmity, carefully distributing political patronage, and by appealing to the American people. His Gettysburg Address appealed to nationalistic, republican, egalitarian, libertarian, and democratic sentiments. Lincoln supervised the strategy and tactics in the war effort, including the selection of generals, and implemented a naval blockade of the South's trade. He suspended habeas corpus in Maryland, and he averted British intervention by defusing the Trent Affair. He engineered the end to slavery with his Emancipation Proclamation, including his order that the Army and Navy liberate, protect, and recruit former slaves. He also encouraged border states to outlaw slavery, and promoted the Thirteenth Amendment to the United States Constitution, which outlawed slavery across the country. Lincoln managed his own successful re-election campaign. He sought to heal the war-torn nation through reconciliation. On April 14, 1865, just days after the war's end at Appomattox, he was attending a play at Ford's Theatre in Washington, D.C., with his wife Mary when he was fatally shot by Confederate sympathizer John Wilkes Booth. Lincoln is remembered as a martyr and hero of the United States and is often ranked as the greatest president in American history." result = openai.Answer.create( search_model="ada", model="curie", question='test', documents='testing', examples_context=sample, examples=[["Who was the 16th president of the United States?", "Abraham Lincoln"], ["Through what major war did Abraham Lincoln serve as United States president?", "American Civil War"], ["What year was Abraham Lincoln killed?", "1865"]], max_rerank=3, max_tokens=5, stop=["\n", "<|endoftext|>"] )
179.8
3,037
0.797275
555
3,596
5.147748
0.526126
0.010501
0.021001
0.017851
0.023101
0.023101
0.023101
0
0
0
0
0.018494
0.157953
3,596
19
3,038
189.263158
0.924703
0
0
0
0
0.133333
0.902392
0
0
0
0
0
0
0
null
null
0
0.133333
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
3acf2faae3f1a75165f6d0119154abae78706006
31
py
Python
python/apps/tools/tests/__init__.py
matihost/monorepo
6822e48b3389f7977b9ba14827028275f1492c14
[ "MIT" ]
2
2020-11-13T06:58:49.000Z
2022-03-10T12:41:33.000Z
python/apps/tools/tests/__init__.py
matihost/monorepo
6822e48b3389f7977b9ba14827028275f1492c14
[ "MIT" ]
null
null
null
python/apps/tools/tests/__init__.py
matihost/monorepo
6822e48b3389f7977b9ba14827028275f1492c14
[ "MIT" ]
2
2019-02-15T11:55:42.000Z
2020-11-13T06:59:20.000Z
"""Tests for tools package."""
15.5
30
0.645161
4
31
5
1
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
1
31
31
0.740741
0.774194
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
3aeeee8c10662384c592268d3f789f4a372aac36
59
py
Python
tests/__init__.py
levibaba/pytiled_parser
c0d464359a7255e1c764a623b8b472ab1fe98cc6
[ "MIT" ]
3
2019-08-15T16:46:37.000Z
2020-05-31T03:33:51.000Z
tests/__init__.py
levibaba/pytiled_parser
c0d464359a7255e1c764a623b8b472ab1fe98cc6
[ "MIT" ]
null
null
null
tests/__init__.py
levibaba/pytiled_parser
c0d464359a7255e1c764a623b8b472ab1fe98cc6
[ "MIT" ]
null
null
null
import pytest pytest.main(["--tb=native", "-s", "tests"])
14.75
43
0.610169
8
59
4.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.101695
59
3
44
19.666667
0.679245
0
0
0
0
0
0.305085
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
c90b5beab7298cc40d7e9e55287ab2fa071828b0
210
py
Python
parts/__init__.py
aprilis/donkey_lite
ea27961410a683d6bbc2d6a3f08b2a87a3c01c3f
[ "MIT" ]
1
2018-10-22T13:22:54.000Z
2018-10-22T13:22:54.000Z
parts/__init__.py
aprilis/donkey_lite
ea27961410a683d6bbc2d6a3f08b2a87a3c01c3f
[ "MIT" ]
null
null
null
parts/__init__.py
aprilis/donkey_lite
ea27961410a683d6bbc2d6a3f08b2a87a3c01c3f
[ "MIT" ]
3
2018-09-25T16:08:25.000Z
2020-02-28T14:09:33.000Z
from .actuator import BluePill, CarStatus from .camera import PiCamera, FakeCamera from .data import TubWriter, TubReader from .pilot import KerasPilot from .timer import Timer from .web import WebStatus
30
42
0.8
27
210
6.222222
0.592593
0
0
0
0
0
0
0
0
0
0
0
0.157143
210
6
43
35
0.949153
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
c91dc2106c6349527666712095c1e9fb811cf1cd
272
py
Python
pywikc/__init__.py
RESSLab-Team/WIKC
5e54da2e402f571a8a3b14f37e3a2a4c0699d179
[ "MIT" ]
null
null
null
pywikc/__init__.py
RESSLab-Team/WIKC
5e54da2e402f571a8a3b14f37e3a2a4c0699d179
[ "MIT" ]
null
null
null
pywikc/__init__.py
RESSLab-Team/WIKC
5e54da2e402f571a8a3b14f37e3a2a4c0699d179
[ "MIT" ]
1
2022-03-07T18:21:57.000Z
2022-03-07T18:21:57.000Z
from . import imperfections from .abaqus_i_coupling_writer import AbaqusICouplingWriter from .component_reader import AbaqusInpToComponentReader from .processing import gen_aba_couples_imperfections, gen_aba_couples, gen_aba_imperfections from .dir_maker import dir_maker
45.333333
93
0.893382
34
272
6.764706
0.5
0.078261
0.113043
0
0
0
0
0
0
0
0
0
0.080882
272
5
94
54.4
0.92
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
c924f28bc1c2b2e2500f94d3dd1aabe374285975
43
py
Python
extract/__init__.py
gzhang2016/dma-v2-new
7d91277426d7b5cde677672f5b13b677ac53492d
[ "BSD-3-Clause" ]
1
2019-01-27T18:55:41.000Z
2019-01-27T18:55:41.000Z
extract/__init__.py
gzhang2016/dma-v2
0ed23e3ccefd93b710934966e4bfec02f369f469
[ "BSD-3-Clause" ]
null
null
null
extract/__init__.py
gzhang2016/dma-v2
0ed23e3ccefd93b710934966e4bfec02f369f469
[ "BSD-3-Clause" ]
null
null
null
__all__ = ['sql_from_db', 'sql_from_file']
21.5
42
0.72093
7
43
3.285714
0.714286
0.608696
0
0
0
0
0
0
0
0
0
0
0.093023
43
1
43
43
0.589744
0
0
0
0
0
0.55814
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c937a83caf826cbb87c729bf7c488a7774916931
79
py
Python
zentral/contrib/okta/__init__.py
gwhitehawk/zentral
156134aed3d7ff8a7cb40ab6f2269a763c316459
[ "Apache-2.0" ]
634
2015-10-30T00:55:40.000Z
2022-03-31T02:59:00.000Z
zentral/contrib/okta/__init__.py
gwhitehawk/zentral
156134aed3d7ff8a7cb40ab6f2269a763c316459
[ "Apache-2.0" ]
145
2015-11-06T00:17:33.000Z
2022-03-16T13:30:31.000Z
zentral/contrib/okta/__init__.py
gwhitehawk/zentral
156134aed3d7ff8a7cb40ab6f2269a763c316459
[ "Apache-2.0" ]
103
2015-11-07T07:08:49.000Z
2022-03-18T17:34:36.000Z
# django default_app_config = "zentral.contrib.okta.apps.ZentralOktaAppConfig"
26.333333
69
0.835443
9
79
7.111111
1
0
0
0
0
0
0
0
0
0
0
0
0.063291
79
2
70
39.5
0.864865
0.075949
0
0
0
0
0.647887
0.647887
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c94a7118aeed9af98ddebd20084d1505a7c8b6a1
138
py
Python
tests/test_model_db.py
donghak-shin/dp-tornado
095bb293661af35cce5f917d8a2228d273489496
[ "MIT" ]
18
2015-04-07T14:28:39.000Z
2020-02-08T14:03:38.000Z
tests/test_model_db.py
donghak-shin/dp-tornado
095bb293661af35cce5f917d8a2228d273489496
[ "MIT" ]
7
2016-10-05T05:14:06.000Z
2021-05-20T02:07:22.000Z
tests/test_model_db.py
donghak-shin/dp-tornado
095bb293661af35cce5f917d8a2228d273489496
[ "MIT" ]
11
2015-12-15T09:49:39.000Z
2021-09-06T18:38:21.000Z
# -*- coding: utf-8 -*- import uuid from . import utils def mysql(): utils.expecting_text('get', '/model/db/mysql', 'done', 200)
12.545455
63
0.608696
19
138
4.368421
0.842105
0
0
0
0
0
0
0
0
0
0
0.035714
0.188406
138
10
64
13.8
0.705357
0.152174
0
0
0
0
0.191304
0
0
0
0
0
0
1
0.25
true
0
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
0
0
0
4
c951646594a389d0103d1ea83aac58a5a58f8580
109
py
Python
cgi-bin/pydeliciouslibs/__init__.py
datadreamer/research-chronology-revisited
951bcdda19e69bdb53de16206cad0515251953a1
[ "MIT" ]
null
null
null
cgi-bin/pydeliciouslibs/__init__.py
datadreamer/research-chronology-revisited
951bcdda19e69bdb53de16206cad0515251953a1
[ "MIT" ]
null
null
null
cgi-bin/pydeliciouslibs/__init__.py
datadreamer/research-chronology-revisited
951bcdda19e69bdb53de16206cad0515251953a1
[ "MIT" ]
null
null
null
## # License: pydelicious is released under the bsd license. # See 'license.txt' for more informations. #
15.571429
58
0.715596
14
109
5.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.183486
109
6
59
18.166667
0.876404
0.889908
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
c9745715319b6fd8fc942d532a8bd7c3a2f88a20
157
py
Python
monstro/forms/__init__.py
bindlock/monstro
f7715426a0933f9ad3d0df73095ef735b20861fc
[ "MIT" ]
null
null
null
monstro/forms/__init__.py
bindlock/monstro
f7715426a0933f9ad3d0df73095ef735b20861fc
[ "MIT" ]
6
2016-08-31T09:15:55.000Z
2017-05-13T12:01:40.000Z
monstro/forms/__init__.py
pyvim/monstro
f7715426a0933f9ad3d0df73095ef735b20861fc
[ "MIT" ]
null
null
null
from monstro.utils import Choices from .fields import * # pylint: disable=W0401 from .forms import Form, ModelForm from .exceptions import ValidationError
26.166667
46
0.802548
20
157
6.3
0.7
0
0
0
0
0
0
0
0
0
0
0.02963
0.140127
157
5
47
31.4
0.903704
0.133758
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
a311198dc19da3217fd5c3c207dec550bdea8266
97
py
Python
trainer/__init__.py
kaylode/custom-template
9cea501fe0fa1b90cd468d12a6906f531aa66ab1
[ "MIT" ]
12
2021-02-06T19:27:57.000Z
2021-12-13T01:33:03.000Z
trainer/__init__.py
kaylode/custom-template
9cea501fe0fa1b90cd468d12a6906f531aa66ab1
[ "MIT" ]
6
2021-05-23T13:34:01.000Z
2022-02-12T06:06:53.000Z
trainer/__init__.py
kaylode/custom-template
9cea501fe0fa1b90cd468d12a6906f531aa66ab1
[ "MIT" ]
7
2021-04-02T06:59:03.000Z
2021-11-20T07:19:30.000Z
from .checkpoint import Checkpoint, load_checkpoint, get_epoch_iters from .trainer import Trainer
48.5
68
0.865979
13
97
6.230769
0.615385
0
0
0
0
0
0
0
0
0
0
0
0.092784
97
2
69
48.5
0.920455
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
a32fb845d18e582a9ac799b13be88333649c03ed
297
py
Python
testhelpers/__init__.py
wtsi-hgi/python-test-helpers
1118b0bd31940cde9f005f7bb3fb1aea5ea38ef4
[ "MIT" ]
null
null
null
testhelpers/__init__.py
wtsi-hgi/python-test-helpers
1118b0bd31940cde9f005f7bb3fb1aea5ea38ef4
[ "MIT" ]
null
null
null
testhelpers/__init__.py
wtsi-hgi/python-test-helpers
1118b0bd31940cde9f005f7bb3fb1aea5ea38ef4
[ "MIT" ]
null
null
null
from testhelpers.generator import TestUsingType, create_tests, get_classes_to_test, \ TEST_LATEST_ONLY_ENVIRONMENT_VARIABLE_SET_VALUE, TestUsingObject, create_tests_using_objects, \ create_tests_using_types, TypeUsedInTest, ObjectTypeUsedInTest, TEST_LATEST_ONLY_ENVIRONMENT_VARIABLE_NAME
74.25
110
0.882155
35
297
6.885714
0.685714
0.136929
0.116183
0.207469
0.273859
0
0
0
0
0
0
0
0.077441
297
3
111
99
0.879562
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
a34a2fad274c87979bbc16c0bf09500e48c77547
1,703
py
Python
openpnm/topotools/__init__.py
bryanwweber/OpenPNM
0547b5724ffedc0a593aae48639d36fe10e0baed
[ "MIT" ]
1
2021-02-19T18:16:21.000Z
2021-02-19T18:16:21.000Z
openpnm/topotools/__init__.py
kvmkrao/OpenPNM
0547b5724ffedc0a593aae48639d36fe10e0baed
[ "MIT" ]
null
null
null
openpnm/topotools/__init__.py
kvmkrao/OpenPNM
0547b5724ffedc0a593aae48639d36fe10e0baed
[ "MIT" ]
null
null
null
r""" **openpnm.topotools** ---- This module contains a selection of functions that deal specifically with network topology. """ from .topotools import add_boundary_pores from .topotools import bond_percolation from .topotools import clone_pores from .topotools import connect_pores from .topotools import extend from .topotools import find_path from .topotools import find_surface_pores from .topotools import find_neighbor_sites from .topotools import find_neighbor_bonds from .topotools import find_connected_sites from .topotools import find_connecting_bonds from .topotools import find_pore_to_pore_distance from .topotools import find_clusters from .topotools import find_complement from .topotools import generate_base_points from .topotools import iscoplanar from .topotools import isoutside from .topotools import issymmetric from .topotools import ispercolating from .topotools import istriu from .topotools import istril from .topotools import istriangular from .topotools import label_faces from .topotools import merge_networks from .topotools import merge_pores from .topotools import plot_connections from .topotools import plot_coordinates from .topotools import plot_networkx from .topotools import reduce_coordination from .topotools import reflect_base_points from .topotools import remove_isolated_clusters from .topotools import site_percolation from .topotools import stitch from .topotools import subdivide from .topotools import template_cylinder_annulus from .topotools import template_sphere_shell from .topotools import trim from .topotools import trim_occluded_throats from .topotools import vor_to_am from .topotools import tri_to_am from .topotools import conns_to_am
31.537037
73
0.856723
228
1,703
6.188596
0.337719
0.377746
0.552091
0.146704
0.180723
0
0
0
0
0
0
0
0.109806
1,703
53
74
32.132075
0.930739
0.070464
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.97619
0
0.97619
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
a359aeca97e5cbfc2b5011677a360c6fdf64178d
54
py
Python
nautobot_ssot_servicenow/tests/__init__.py
nautobot/nautobot-plugin-ssot-servicenow
9c87f40e173cf2accdcae63da5a515199ab28aaa
[ "Apache-2.0" ]
2
2022-01-25T18:37:15.000Z
2022-03-15T14:48:02.000Z
nautobot_ssot_servicenow/tests/__init__.py
nautobot/nautobot-plugin-ssot-servicenow
9c87f40e173cf2accdcae63da5a515199ab28aaa
[ "Apache-2.0" ]
1
2022-01-14T17:21:18.000Z
2022-01-14T17:21:18.000Z
nautobot_ssot_servicenow/tests/__init__.py
nautobot/nautobot-plugin-ssot-servicenow
9c87f40e173cf2accdcae63da5a515199ab28aaa
[ "Apache-2.0" ]
1
2022-03-15T14:48:03.000Z
2022-03-15T14:48:03.000Z
"""Unit tests for nautobot_ssot_servicenow plugin."""
27
53
0.777778
7
54
5.714286
1
0
0
0
0
0
0
0
0
0
0
0
0.092593
54
1
54
54
0.816327
0.87037
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
a36c0657a2cbffc975cf121f5bdc2d691d31a7f1
147
py
Python
src/testphone/Game.py
JunYinghu/appium-test-automation
848f1d7426ce14f53f656c0fe161c0c9bee44364
[ "MIT" ]
null
null
null
src/testphone/Game.py
JunYinghu/appium-test-automation
848f1d7426ce14f53f656c0fe161c0c9bee44364
[ "MIT" ]
null
null
null
src/testphone/Game.py
JunYinghu/appium-test-automation
848f1d7426ce14f53f656c0fe161c0c9bee44364
[ "MIT" ]
null
null
null
class game(object): def gamce(self): x = raw_input() (i,j) = map(int,raw_input(x).split()) print i return i
14.7
45
0.496599
21
147
3.380952
0.761905
0.225352
0
0
0
0
0
0
0
0
0
0
0.360544
147
9
46
16.333333
0.755319
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.166667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
a384685f651f833160ab5613cc71c9d41ac94b98
101
py
Python
django_todo/app/user/forms.py
OceanOver/DjangoTodo
dac88da9964bac5e2c9b8edf06436bea2fb4c104
[ "MIT" ]
1
2020-08-12T07:53:44.000Z
2020-08-12T07:53:44.000Z
django_todo/app/user/forms.py
OceanOver/DjangoTodo
dac88da9964bac5e2c9b8edf06436bea2fb4c104
[ "MIT" ]
null
null
null
django_todo/app/user/forms.py
OceanOver/DjangoTodo
dac88da9964bac5e2c9b8edf06436bea2fb4c104
[ "MIT" ]
null
null
null
from django import forms class ProfileForm(forms.Form): picture = forms.ImageField(label='图片')
16.833333
42
0.742574
13
101
5.769231
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.148515
101
5
43
20.2
0.872093
0
0
0
0
0
0.019802
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
6e60479ce7891d8b049e08e5a7f4d43465ee2a48
32
py
Python
Python/Topics/Shorthands/A part of something bigger/main.py
drtierney/hyperskill-problems
b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0
[ "MIT" ]
5
2020-08-29T15:15:31.000Z
2022-03-01T18:22:34.000Z
Python/Topics/Shorthands/A part of something bigger/main.py
drtierney/hyperskill-problems
b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0
[ "MIT" ]
null
null
null
Python/Topics/Shorthands/A part of something bigger/main.py
drtierney/hyperskill-problems
b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0
[ "MIT" ]
1
2020-12-02T11:13:14.000Z
2020-12-02T11:13:14.000Z
import re regex = r'python\B'
6.4
19
0.65625
6
32
3.5
1
0
0
0
0
0
0
0
0
0
0
0
0.21875
32
4
20
8
0.84
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
6e713041c2c8fea046d6219dfb77c0a962359d33
82
py
Python
frontend/pwfrontend/main/forms.py
Quinn-With-Two-Ns/psychic-waffle
b71ad500249158372c919da339c2664098ca69bf
[ "MIT" ]
null
null
null
frontend/pwfrontend/main/forms.py
Quinn-With-Two-Ns/psychic-waffle
b71ad500249158372c919da339c2664098ca69bf
[ "MIT" ]
null
null
null
frontend/pwfrontend/main/forms.py
Quinn-With-Two-Ns/psychic-waffle
b71ad500249158372c919da339c2664098ca69bf
[ "MIT" ]
null
null
null
from django import forms class NameForm(forms.Form): name = forms.CharField()
20.5
28
0.743902
11
82
5.545455
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.158537
82
4
28
20.5
0.884058
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
6e75282845eaadc0a987fbc3b99a3e0ee7d78e20
32
py
Python
confpy/options/__init__.py
kevinconway/confpy
edd4f1f91e491c10fb7cbc8aab60d7f59fba96a1
[ "MIT" ]
2
2018-03-14T05:05:28.000Z
2018-04-20T05:09:04.000Z
confpy/options/__init__.py
kevinconway/confpy
edd4f1f91e491c10fb7cbc8aab60d7f59fba96a1
[ "MIT" ]
1
2015-12-17T10:14:50.000Z
2019-08-25T03:02:44.000Z
confpy/options/__init__.py
kevinconway/confpy
edd4f1f91e491c10fb7cbc8aab60d7f59fba96a1
[ "MIT" ]
null
null
null
"""Validated option modules."""
16
31
0.6875
3
32
7.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.09375
32
1
32
32
0.758621
0.78125
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6eb9a4cc150a40046b9708396233c228584f160c
97
py
Python
metlink_status/parser/__init__.py
finncodes/metlink-status
5e0a08127ebf3c8a7cbbf3f9d448b52e16314492
[ "MIT" ]
null
null
null
metlink_status/parser/__init__.py
finncodes/metlink-status
5e0a08127ebf3c8a7cbbf3f9d448b52e16314492
[ "MIT" ]
null
null
null
metlink_status/parser/__init__.py
finncodes/metlink-status
5e0a08127ebf3c8a7cbbf3f9d448b52e16314492
[ "MIT" ]
null
null
null
from .api_key_parser import get_opendata_api_key from .route_parser import parse_informed_entity
32.333333
48
0.896907
16
97
4.9375
0.6875
0.151899
0
0
0
0
0
0
0
0
0
0
0.082474
97
2
49
48.5
0.88764
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
6ebcd387d30977c4472a373ff66587d93043b1f6
860
py
Python
version3/python/nist384.py
kirk-baird/amcl
d936b54c991ede110eee8a3c89bead13106168dd
[ "Apache-2.0" ]
72
2016-05-23T17:06:30.000Z
2021-12-17T16:34:32.000Z
version3/python/nist384.py
kirk-baird/amcl
d936b54c991ede110eee8a3c89bead13106168dd
[ "Apache-2.0" ]
37
2016-11-30T14:53:10.000Z
2021-05-18T16:54:36.000Z
version3/python/nist384.py
kirk-baird/amcl
d936b54c991ede110eee8a3c89bead13106168dd
[ "Apache-2.0" ]
28
2016-05-24T22:43:47.000Z
2021-11-10T17:52:36.000Z
# NIST384 curve constants from constants import * SHA = 'sha384' # hash type to use with this curve EFS = 48 # elliptic curve field size in bytes CurveType = WEIERSTRASS # field modulus p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319 r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643 # group order # elliptic curve A = -3 B = 27580193559959705877849011840389048093056905856361568521428707301988689241309860865136260764883745107765439761230575 # generator point Gx = 26247035095799689268623156744566981891852923491109213387815615900925518854738050089022388053975719786650872476732087 Gy = 8325710961489029985546751289520108179287853048861315594709205902480503199884419224438643760392947333078086511627871
43
137
0.890698
45
860
17.022222
0.866667
0.033943
0
0
0
0
0
0
0
0
0
0.748092
0.086047
860
19
138
45.263158
0.226463
0.172093
0
0
0
0
0.008523
0
0
1
0
0
0
1
0
false
0
0.1
0
0.1
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
6ebfa15aa657fadb938ea45f50e4f8ac357c7efc
63
py
Python
src/record_keeper/module/__init__.py
williamwissemann/record-keeper
b28775a348f400a98f54b6521ce57297ba538861
[ "MIT" ]
3
2019-03-08T00:03:50.000Z
2021-02-20T02:50:39.000Z
src/record_keeper/module/pvp_iv/__init__.py
williamwissemann/record-keeper
b28775a348f400a98f54b6521ce57297ba538861
[ "MIT" ]
null
null
null
src/record_keeper/module/pvp_iv/__init__.py
williamwissemann/record-keeper
b28775a348f400a98f54b6521ce57297ba538861
[ "MIT" ]
null
null
null
"""Messaging modules which handle discord command messages."""
31.5
62
0.777778
7
63
7
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
63
1
63
63
0.875
0.888889
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6ec791abcb83fad9b2c863ad7d3842276e0cb70a
125
py
Python
2760.py
gabzin/beecrowd
177bdf3f87bacfd924bd031a973b8db877379fe5
[ "MIT" ]
3
2021-12-15T20:27:14.000Z
2022-03-01T12:30:08.000Z
2760.py
gabzin/uri
177bdf3f87bacfd924bd031a973b8db877379fe5
[ "MIT" ]
null
null
null
2760.py
gabzin/uri
177bdf3f87bacfd924bd031a973b8db877379fe5
[ "MIT" ]
null
null
null
s1=input() s2=input() s3=input() print(s1+s2+s3) print(s2+s3+s1) print(s3+s1+s2) print('%s%s%s'%(s1[:10], s2[:10], s3[:10]))
15.625
43
0.592
28
125
2.642857
0.25
0.108108
0
0
0
0
0
0
0
0
0
0.181034
0.072
125
7
44
17.857143
0.456897
0
0
0
0
0
0.048
0
0
0
0
0
0
1
0
false
0
0
0
0
0.571429
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
6ee4bffef750765f74529b5fe981e8ac57cf8d13
3,622
py
Python
web/impact/impact/v1/views/__init__.py
masschallenge/impact-api
81075ced8fcc95de9390dd83c15e523e67fc48c0
[ "MIT" ]
5
2017-10-19T15:11:52.000Z
2020-03-08T07:16:21.000Z
web/impact/impact/v1/views/__init__.py
masschallenge/impact-api
81075ced8fcc95de9390dd83c15e523e67fc48c0
[ "MIT" ]
182
2017-06-21T19:32:13.000Z
2021-03-22T13:38:16.000Z
web/impact/impact/v1/views/__init__.py
masschallenge/impact-api
81075ced8fcc95de9390dd83c15e523e67fc48c0
[ "MIT" ]
1
2018-06-23T11:53:18.000Z
2018-06-23T11:53:18.000Z
# MIT License # Copyright (c) 2017 MassChallenge, Inc. from impact.v1.views.impact_view import ImpactView from impact.v1.views.allocate_applications_view import ( ALREADY_ASSIGNED_ERROR, AllocateApplicationsView, find_criterion_helpers, JUDGING_ROUND_INACTIVE_ERROR, NO_APP_LEFT_FOR_JUDGE, NO_DATA_FOR_JUDGE, ) from impact.v1.views.analyze_judging_round_view import AnalyzeJudgingRoundView from impact.v1.views.base_list_view import INVALID_IS_ACTIVE_ERROR from impact.v1.views.cancel_office_hour_reservation_view import ( CancelOfficeHourReservationView, formatted_success_notification, NO_SUCH_RESERVATION, NO_SUCH_OFFICE_HOUR, SUCCESS_NOTIFICATION, ) from impact.v1.views.clone_criteria_view import ( CloneCriteriaView, SOURCE_JUDGING_ROUND_KEY, TARGET_JUDGING_ROUND_KEY, ) from impact.v1.views.application_detail_view import ApplicationDetailView from impact.v1.views.application_list_view import ApplicationListView from impact.v1.views.credit_code_detail_view import CreditCodeDetailView from impact.v1.views.credit_code_list_view import CreditCodeListView from impact.v1.views.criterion_detail_view import CriterionDetailView from impact.v1.views.criterion_list_view import CriterionListView from impact.v1.views.criterion_option_spec_list_view import ( CriterionOptionSpecListView, ) from impact.v1.views.criterion_option_spec_detail_view import ( CriterionOptionSpecDetailView, ) from impact.v1.views.functional_expertise_detail_view import ( FunctionalExpertiseDetailView ) from impact.v1.views.functional_expertise_list_view import ( FunctionalExpertiseListView ) from impact.v1.views.industry_detail_view import IndustryDetailView from impact.v1.views.industry_list_view import IndustryListView from impact.v1.views.judging_round_criteria_header_view import ( JudgingRoundCriteriaHeaderView, ) from impact.v1.views.judging_round_detail_view import JudgingRoundDetailView from impact.v1.views.judging_round_list_view import ( INVALID_ROUND_TYPE_ERROR, JudgingRoundListView, ) from impact.v1.views.office_hours_calendar_view import ( ISO_8601_DATE_FORMAT, OfficeHoursCalendarView, ) from impact.v1.views.organization_detail_view import OrganizationDetailView from impact.v1.views.organization_history_view import OrganizationHistoryView from impact.v1.views.organization_list_view import OrganizationListView from impact.v1.views.organization_users_view import OrganizationUsersView from impact.v1.views.post_mixin import PostMixin from impact.v1.views.program_cycle_detail_view import ProgramCycleDetailView from impact.v1.views.program_cycle_list_view import ProgramCycleListView from impact.v1.views.program_detail_view import ProgramDetailView from impact.v1.views.program_family_detail_view import ProgramFamilyDetailView from impact.v1.views.program_family_list_view import ProgramFamilyListView from impact.v1.views.program_list_view import ProgramListView from impact.v1.views.reserve_office_hour_view import ReserveOfficeHourView from impact.v1.views.user_confidential_view import UserConfidentialView from impact.v1.views.user_detail_view import UserDetailView from impact.v1.views.user_history_view import UserHistoryView from impact.v1.views.user_list_view import UserListView from impact.v1.views.user_organizations_view import UserOrganizationsView from impact.v1.views.cancel_office_hour_session_view import ( CancelOfficeHourSessionView, ) from impact.v1.views.mentor_participation_view import ( MentorParticipationView ) from impact.v1.views.office_hour_view import ( OfficeHourViewSet )
41.632184
78
0.861126
456
3,622
6.528509
0.267544
0.141082
0.169298
0.239839
0.316426
0.157541
0.046355
0
0
0
0
0.015156
0.089177
3,622
86
79
42.116279
0.887239
0.013805
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.518519
0
0.518519
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
6ee94fc56a5583998b4c890b64ea61a3fbd895d5
281
py
Python
community_supplied/Security/CGNAT/get-float-from-percentage-string.py
vvikramb/healthbot-rules
72bdad144bebb512e9ac32d607b5924d96225334
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
community_supplied/Security/CGNAT/get-float-from-percentage-string.py
vvikramb/healthbot-rules
72bdad144bebb512e9ac32d607b5924d96225334
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
community_supplied/Security/CGNAT/get-float-from-percentage-string.py
vvikramb/healthbot-rules
72bdad144bebb512e9ac32d607b5924d96225334
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
from __future__ import division import re ''' This function returns float value of a percentage string ''' def get_float_from_percentage_string(percentage_string, **kwargs): match_value = re.search(r"(\d+\.\d+)\s+\%", percentage_string) return float(match_value.group(1))
28.1
66
0.747331
40
281
4.95
0.625
0.323232
0
0
0
0
0
0
0
0
0
0.004065
0.124555
281
9
67
31.222222
0.800813
0
0
0
0
0
0.069124
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
42db6b3daacddf6288d9810f964d412998a1912f
59
py
Python
src/westpa/westext/hamsm_restarting/__init__.py
jdrusso/westpa
676fdafe23b4ae8229d311b01df051ecde5b331c
[ "MIT" ]
null
null
null
src/westpa/westext/hamsm_restarting/__init__.py
jdrusso/westpa
676fdafe23b4ae8229d311b01df051ecde5b331c
[ "MIT" ]
null
null
null
src/westpa/westext/hamsm_restarting/__init__.py
jdrusso/westpa
676fdafe23b4ae8229d311b01df051ecde5b331c
[ "MIT" ]
null
null
null
from . import restart_driver __all__ = ['restart_driver']
14.75
28
0.762712
7
59
5.571429
0.714286
0.666667
0
0
0
0
0
0
0
0
0
0
0.135593
59
3
29
19.666667
0.764706
0
0
0
0
0
0.237288
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
42de697e7678efa885f63886f75f608009bef4de
1,638
py
Python
tests/test_greedy.py
TheJoeSmo/convertible
e1b8b727dfc3becf684f41e9067887ae0a1dc9f0
[ "MIT" ]
null
null
null
tests/test_greedy.py
TheJoeSmo/convertible
e1b8b727dfc3becf684f41e9067887ae0a1dc9f0
[ "MIT" ]
null
null
null
tests/test_greedy.py
TheJoeSmo/convertible
e1b8b727dfc3becf684f41e9067887ae0a1dc9f0
[ "MIT" ]
null
null
null
from typing import List from convertible import convert, Convertible from convertible.Convertible.Greedy import Greedy from convertible.Convert.ConvertHandler.ConvertHandler import ConvertHandler class Test(Convertible): def __repr__(self) -> str: return f"{self.__class__.__name__}()" def convert(self, argument: int) -> str: return str(argument) def test_class_arg(): class Foo: @convert(ConvertHandler(Greedy(Test()))) def test(self, args: List[str]) -> list[str]: return args assert [str(1)] == Foo().test(1) def test_class_args(): class Foo: @convert(ConvertHandler(Greedy(Test()))) def test(self, args: List[str]) -> List[str]: return args assert [str(1), str(2)] == Foo().test(1, 2) def test_class_both(): class Foo: @convert(ConvertHandler(Greedy(Test()), test=Test())) def test(self, args: List[str], test: str) -> List[str]: return [test] + args assert [str(1), str(2), str(3)] == Foo().test(2, 3, test=1) def test_function_arg(): @convert(ConvertHandler(Greedy(Test()))) def test(args: List[str]) -> List[str]: return args assert [str(1)] == test(1) def test_function_args(): @convert(ConvertHandler(Greedy(Test()))) def test(args: List[str]) -> List[str]: return args assert [str(1), str(2)] == test(1, 2) def test_function_both(): @convert(ConvertHandler(Greedy(Test()), test=Test())) def test(args: List[str], test: str) -> List[str]: return [test] + args assert [str(1), str(2), str(3)] == test(2, 3, test=1)
25.2
76
0.614164
217
1,638
4.525346
0.129032
0.08554
0.164969
0.189409
0.648676
0.564155
0.556008
0.551935
0.458248
0.458248
0
0.018809
0.221001
1,638
64
77
25.59375
0.750784
0
0
0.404762
0
0
0.016484
0.016484
0
0
0
0
0.142857
1
0.333333
false
0
0.095238
0.190476
0.714286
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
6e547da6ed4d04fcb86dad766f93cb62c0de4267
4,810
py
Python
tools/create_pkgsrc_csv.py
kiaderouiche/netbsd-branch-info
ea9b7c8e6d163caf1c0df5b0c98d7e8a1079a635
[ "MIT" ]
null
null
null
tools/create_pkgsrc_csv.py
kiaderouiche/netbsd-branch-info
ea9b7c8e6d163caf1c0df5b0c98d7e8a1079a635
[ "MIT" ]
null
null
null
tools/create_pkgsrc_csv.py
kiaderouiche/netbsd-branch-info
ea9b7c8e6d163caf1c0df5b0c98d7e8a1079a635
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # ''' http://www.netbsd.org/about/history.html https://www.netbsd.org/releases/release-map.html ''' import csv with open('pkgsrc.csv', mode='w') as pkgsrc_f: netbsd_writer = csv.writer(pkgsrc_f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) netbsd_writer.writerow(['version', 'branch', 'release', 'eos']) netbsd_writer.writerow(['2007Q1', 'pkgsrc-2007Q2', '05-01-2008', '',]) netbsd_writer.writerow(['2007Q2', 'pkgsrc-2007Q3', '05-01-2008', '',]) netbsd_writer.writerow(['2007Q3', 'pkgsrc-2007Q4', '05-01-2008', '',]) netbsd_writer.writerow(['2007Q4', 'pkgsrc-2007Q4', '05-01-2008', '',]) netbsd_writer.writerow(['2008Q1', 'pkgsrc-2008Q1', '02-01-2008', '',]) netbsd_writer.writerow(['2008Q2', 'pkgsrc-2008Q2', '02-01-2008', '',]) netbsd_writer.writerow(['2008Q3', 'pkgsrc-2008Q3', '02-01-2008', '',]) netbsd_writer.writerow(['2008Q4', 'pkgsrc-2008Q4', '02-01-2008', '',]) netbsd_writer.writerow(['2009Q1', 'pkgsrc-2009Q1', '02-01-2008', '',]) netbsd_writer.writerow(['2009Q2', 'pkgsrc-2009Q2', '02-01-2008', '',]) netbsd_writer.writerow(['2009Q3', 'pkgsrc-2009Q3', '02-01-2008', '',]) netbsd_writer.writerow(['2009Q4', 'pkgsrc-2009Q4', '02-01-2008', '',]) netbsd_writer.writerow(['2010Q1', 'pkgsrc-2010Q1', '20-04-2010', '',]) netbsd_writer.writerow(['2010Q2', 'pkgsrc-2010Q2', '02-01-2015', '',]) netbsd_writer.writerow(['2010Q3', 'pkgsrc-2010Q3', '02-01-2015', '',]) netbsd_writer.writerow(['2010Q4', 'pkgsrc-2010Q4', '02-01-2015', '',]) netbsd_writer.writerow(['2011Q1', 'pkgsrc-2011Q1', '06-04-2011', '',]) netbsd_writer.writerow(['2011Q2', 'pkgsrc-2011Q2', '06-04-2011', '',]) netbsd_writer.writerow(['2011Q3', 'pkgsrc-2011Q3', '06-04-2011', '',]) netbsd_writer.writerow(['2011Q4', 'pkgsrc-2011Q4', '03-10-2011', '',]) netbsd_writer.writerow(['2012Q1', 'pkgsrc-2012Q1', '07-04-2012', '',]) netbsd_writer.writerow(['2012Q2', 'pkgsrc-2012Q2', '03-07-2012', '',]) netbsd_writer.writerow(['2012Q3', 'pkgsrc-2012Q3', '01-10-2012', '',]) netbsd_writer.writerow(['2012Q4', 'pkgsrc-2012Q4', '11-01-2013', '',]) netbsd_writer.writerow(['2013Q1', 'pkgsrc-2013Q1', '01-04-2013', '',]) netbsd_writer.writerow(['2013Q2', 'pkgsrc-2013Q2', '04-07-2013', '',]) netbsd_writer.writerow(['2012Q4', 'pkgsrc-2012Q4', '11-01-2013', '',]) netbsd_writer.writerow(['2013Q1', 'pkgsrc-2013Q1', '04-10-2013', '',]) netbsd_writer.writerow(['2013Q2', 'pkgsrc-2013Q2', '04-10-2013', '',]) netbsd_writer.writerow(['2013Q3', 'pkgsrc-2013Q3', '04-10-2013', '',]) netbsd_writer.writerow(['2013Q4', 'pkgsrc-2013Q4', '04-10-2013', '',]) netbsd_writer.writerow(['2014Q1', 'pkgsrc-2014Q1', '03-07-2014', '',]) netbsd_writer.writerow(['2014Q2', 'pkgsrc-2014Q2', '03-07-2014', '',]) netbsd_writer.writerow(['2014Q3', 'pkgsrc-2014Q3', '03-07-2014', '',]) netbsd_writer.writerow(['2014Q4', 'pkgsrc-2015Q4', '02-01-2015', '',]) netbsd_writer.writerow(['2015Q1', 'pkgsrc-2015Q1', '14-04-2015', '',]) netbsd_writer.writerow(['2015Q2', 'pkgsrc-2015Q2', '06-07-2015', '',]) netbsd_writer.writerow(['2015Q3', 'pkgsrc-2015Q3', '30-09-2015', '',]) netbsd_writer.writerow(['2015Q4', 'pkgsrc-2015Q4', '30-09-2015', '',]) netbsd_writer.writerow(['2016Q1', 'pkgsrc-2016Q1', '09-05-2016', '',]) netbsd_writer.writerow(['2016Q2', 'pkgsrc-2016Q2', '09-05-2016', '',]) netbsd_writer.writerow(['2016Q3', 'pkgsrc-2016Q3', '09-05-2016', '',]) netbsd_writer.writerow(['2016Q4', 'pkgsrc-2016Q4', '04-01-2016', '',]) netbsd_writer.writerow(['2017Q1', 'pkgsrc-2017Q1', '03-04-2017', '',]) netbsd_writer.writerow(['2017Q2', 'pkgsrc-2017Q2', '03-04-2017', '',]) netbsd_writer.writerow(['2017Q3', 'pkgsrc-2017Q3', '03-04-2017', '',]) netbsd_writer.writerow(['2017Q4', 'pkgsrc-2017Q4', '03-04-2017', '',]) netbsd_writer.writerow(['2018Q1', 'pkgsrc-2018Q1', '31-12-2018', '',]) netbsd_writer.writerow(['2018Q2', 'pkgsrc-2018Q2', '31-12-2018', '',]) netbsd_writer.writerow(['2018Q3', 'pkgsrc-2018Q3', '31-12-2018', '',]) netbsd_writer.writerow(['2018Q4', 'pkgsrc-2018Q4', '31-12-2018', '',]) netbsd_writer.writerow(['2019Q1', 'pkgsrc-2019Q1', '03-10-2019', '',]) netbsd_writer.writerow(['2019Q2', 'pkgsrc-2019Q2', '03-10-2019', '',]) netbsd_writer.writerow(['2019Q3', 'pkgsrc-2019Q3', '03-10-2019', '',]) netbsd_writer.writerow(['2019Q4', 'pkgsrc-2019Q4', '03-10-2019', '',]) netbsd_writer.writerow(['2020Q1', 'pkgsrc-2020Q1', '30-06-2020', '',]) netbsd_writer.writerow(['2020Q2', 'pkgsrc-2020Q2', '30-06-2020', '',]) netbsd_writer.writerow(['2020Q3', 'pkgsrc-2020Q3', '09-10-2020', '',]) netbsd_writer.writerow(['2020Q4', 'pkgsrc-2020Q4', '07-01-2021', '',])
65
74
0.627027
580
4,810
5.089655
0.203448
0.247967
0.406504
0.073171
0.512873
0.512873
0.113821
0.113821
0.056911
0.056911
0
0.249063
0.112682
4,810
73
75
65.890411
0.442596
0.023077
0
0.03125
0
0
0.372495
0
0
0
0
0
0
1
0
false
0
0.015625
0
0.015625
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
280a53742fbfcdab61909f46a592f3063adda79e
685
py
Python
code/tira-flask-file-upload/test/test_software_id.py
scai-conf/SCAI-QReCC-21
7e00409b9bff28f3207d0c026abe4c8b26f211ae
[ "MIT" ]
15
2021-06-02T19:34:44.000Z
2022-02-25T08:36:40.000Z
code/tira-flask-file-upload/test/test_software_id.py
scai-conf/SCAI-QReCC-21
7e00409b9bff28f3207d0c026abe4c8b26f211ae
[ "MIT" ]
null
null
null
code/tira-flask-file-upload/test/test_software_id.py
scai-conf/SCAI-QReCC-21
7e00409b9bff28f3207d0c026abe4c8b26f211ae
[ "MIT" ]
null
null
null
from util import next_software_num def test_next_software_num_for_non_existing_user_1(): expected = 1 actual = next_software_num(vm_id='does-not-exist-1') assert expected == actual def test_next_software_num_for_non_existing_user_2(): expected = 1 actual = next_software_num(vm_id='does-not-exist-2') assert expected == actual def test_next_sotware_num_for_existing_user_1(): expected = 2 actual = next_software_num(vm_id='scai-qrecc21-simple-baseline') assert expected == actual def test_next_sotware_num_for_existing_user_2(): expected = 6 actual = next_software_num(vm_id='test-user') print(actual) assert expected == actual
28.541667
68
0.751825
104
685
4.528846
0.278846
0.178344
0.22293
0.178344
0.751592
0.751592
0.602972
0.602972
0.602972
0.433121
0
0.02087
0.160584
685
23
69
29.782609
0.798261
0
0
0.333333
0
0
0.100877
0.040936
0
0
0
0
0.222222
1
0.222222
false
0
0.055556
0
0.277778
0.055556
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
284de405382496169883b5a05a1848761fee70df
43
py
Python
keepercommander/plugins/oracle/__init__.py
Mkn-yskz/Commandy
e360306f41112534ae71102658f560fd974a1f45
[ "MIT" ]
151
2015-11-02T02:04:46.000Z
2022-01-20T00:07:01.000Z
keepercommander/plugins/oracle/__init__.py
Mkn-yskz/Commandy
e360306f41112534ae71102658f560fd974a1f45
[ "MIT" ]
145
2015-12-31T00:11:35.000Z
2022-03-31T19:13:54.000Z
keepercommander/plugins/oracle/__init__.py
Mkn-yskz/Commandy
e360306f41112534ae71102658f560fd974a1f45
[ "MIT" ]
73
2015-10-30T00:53:10.000Z
2022-03-30T03:50:53.000Z
from .oracle import * __all__ = ["rotate"]
14.333333
21
0.674419
5
43
5
1
0
0
0
0
0
0
0
0
0
0
0
0.162791
43
3
22
14.333333
0.694444
0
0
0
0
0
0.136364
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
285272b2cd37af9c26b254eef702ef8cfb407081
10,524
py
Python
Algorithm.Python/stubs/QuantConnect/Data/__Fundamental_36.py
gaoxiaojun/Lean
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
[ "Apache-2.0" ]
2
2020-12-08T11:27:20.000Z
2021-04-06T13:21:15.000Z
Algorithm.Python/stubs/QuantConnect/Data/__Fundamental_36.py
gaoxiaojun/Lean
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
[ "Apache-2.0" ]
null
null
null
Algorithm.Python/stubs/QuantConnect/Data/__Fundamental_36.py
gaoxiaojun/Lean
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
[ "Apache-2.0" ]
1
2020-12-08T11:27:21.000Z
2020-12-08T11:27:21.000Z
from .__Fundamental_37 import * import typing import System.IO import System.Collections.Generic import System import QuantConnect.Data.Fundamental.MultiPeriodField import QuantConnect.Data.Fundamental import QuantConnect.Data import QuantConnect import datetime class InvestmentContractLiabilitiesBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ Liabilities due on the insurance investment contract. InvestmentContractLiabilitiesBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentContractLiabilitiesBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentContractLiabilitiesIncurredIncomeStatement(QuantConnect.Data.Fundamental.MultiPeriodField): """ Income/Expenses due to the insurer's liabilities incurred in Investment Contracts. InvestmentContractLiabilitiesIncurredIncomeStatement(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentContractLiabilitiesIncurredIncomeStatement: pass NineMonths: float SixMonths: float ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentinFinancialAssetsBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ Represents the sum of all financial investments (trading securities, available-for-sale securities, held-to-maturity securities, etc.) InvestmentinFinancialAssetsBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentinFinancialAssetsBalanceSheet: pass ThreeMonths: float TwelveMonths: float TwoMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentPropertiesBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ Company's investments in properties net of accumulated depreciation, which generate a return. InvestmentPropertiesBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentPropertiesBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentsAndAdvancesBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ All investments in affiliates, real estate, securities, etc. Non-current investment, not including marketable securities. InvestmentsAndAdvancesBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentsAndAdvancesBalanceSheet: pass NineMonths: float OneMonth: float SixMonths: float ThreeMonths: float TwelveMonths: float TwoMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentsinAssociatesatCostBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ A stake in any company which is more than 20% but less than 50%. InvestmentsinAssociatesatCostBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentsinAssociatesatCostBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentsinJointVenturesatCostBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ A 50% stake in any company in which remaining 50% belongs to other company. InvestmentsinJointVenturesatCostBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentsinJointVenturesatCostBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentsInOtherVenturesUnderEquityMethodBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ This item represents the carrying amount on the company's balance sheet of its investments in common stock of an equity method. This item is typically available for the insurance industry. InvestmentsInOtherVenturesUnderEquityMethodBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentsInOtherVenturesUnderEquityMethodBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class InvestmentsinSubsidiariesatCostBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ A stake in any company which is more than 51%. InvestmentsinSubsidiariesatCostBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.InvestmentsinSubsidiariesatCostBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class IssuanceOfCapitalStockCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField): """ The cash inflow from offering common stock, which is the additional capital contribution to the entity during the PeriodAsByte. IssuanceOfCapitalStockCashFlowStatement(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.IssuanceOfCapitalStockCashFlowStatement: pass NineMonths: float OneMonth: float SixMonths: float ThreeMonths: float TwelveMonths: float TwoMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class IssuanceOfDebtCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField): """ The cash inflow due to an increase in long term debt. IssuanceOfDebtCashFlowStatement(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.IssuanceOfDebtCashFlowStatement: pass NineMonths: float OneMonth: float SixMonths: float ThreeMonths: float TwelveMonths: float TwoMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class IssueExpensesCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField): """ Cost associated with issuance of debt/equity capital in the Financing Cash Flow section. IssueExpensesCashFlowStatement(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.IssueExpensesCashFlowStatement: pass NineMonths: float SixMonths: float ThreeMonths: float TwelveMonths: float TwoMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField] class ItemsinTheCourseofTransmissiontoOtherBanksBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField): """ Carrying amount as of the balance sheet date of drafts and bills of exchange that have been accepted by the reporting bank or by others for its own account, as its liability to holders of the drafts. ItemsinTheCourseofTransmissiontoOtherBanksBalanceSheet(store: IDictionary[str, Decimal]) """ def GetPeriodValue(self, period: str) -> float: pass def SetPeriodValue(self, period: str, value: float) -> None: pass def __init__(self, store: System.Collections.Generic.IDictionary[str, float]) -> QuantConnect.Data.Fundamental.ItemsinTheCourseofTransmissiontoOtherBanksBalanceSheet: pass ThreeMonths: float TwelveMonths: float Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
31.414925
171
0.742968
984
10,524
7.890244
0.163618
0.086553
0.142581
0.149536
0.590804
0.584621
0.584621
0.570196
0.570196
0.564142
0
0.001384
0.176169
10,524
334
172
31.508982
0.894118
0.224439
0
0.772152
0
0
0
0
0
0
0
0
0
1
0.246835
false
0.246835
0.063291
0
0.753165
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
2858aa287f3c09574093bbf8460b93cc379a6cf6
144
py
Python
E2_4/Triangles.py
AidaNajafi/AidaNajafi.github.io
a5d86dc67a1d272794586e0a5c612a3893b75e69
[ "Apache-2.0" ]
null
null
null
E2_4/Triangles.py
AidaNajafi/AidaNajafi.github.io
a5d86dc67a1d272794586e0a5c612a3893b75e69
[ "Apache-2.0" ]
null
null
null
E2_4/Triangles.py
AidaNajafi/AidaNajafi.github.io
a5d86dc67a1d272794586e0a5c612a3893b75e69
[ "Apache-2.0" ]
null
null
null
def print_left_triangle(b): for i in range(1,b+1): print ("*"*i) print ("%"*i) print_left_triangle(20)
6.545455
27
0.486111
20
144
3.3
0.55
0.272727
0.515152
0
0
0
0
0
0
0
0
0.043478
0.361111
144
21
28
6.857143
0.673913
0
0
0
0
0
0.015385
0
0
0
0
0
0
1
0.2
false
0
0
0
0.2
0.8
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
28629d69c5787873f5f59ec5162f2115ac08fbaf
135
py
Python
codegen.py
macrat/PyIMDB
28c8f6f4aa2b8bb875ce42205ecb0ed70970d4f5
[ "MIT" ]
1
2021-09-10T01:24:31.000Z
2021-09-10T01:24:31.000Z
codegen.py
macrat/PyIMDB
28c8f6f4aa2b8bb875ce42205ecb0ed70970d4f5
[ "MIT" ]
null
null
null
codegen.py
macrat/PyIMDB
28c8f6f4aa2b8bb875ce42205ecb0ed70970d4f5
[ "MIT" ]
1
2021-09-10T01:24:32.000Z
2021-09-10T01:24:32.000Z
from grpc.tools import protoc protoc.main({ '', '-I.', '--python_out=.', '--grpc_python_out=.', './msg.proto', })
13.5
29
0.511111
15
135
4.4
0.733333
0.272727
0
0
0
0
0
0
0
0
0
0
0.237037
135
9
30
15
0.640777
0
0
0
0
0
0.348148
0
0
0
0
0
0
1
0
true
0
0.125
0
0.125
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
2878abde5ff70ffbfa85bcd4dd7af65fb50a1b15
149
py
Python
t/test.py
teddywing/git-hook-pre-commit-python-javascript-syntax-linter
7a5b8b3e0df6c236e96deb6a1fa1a82b93425f61
[ "MIT" ]
null
null
null
t/test.py
teddywing/git-hook-pre-commit-python-javascript-syntax-linter
7a5b8b3e0df6c236e96deb6a1fa1a82b93425f61
[ "MIT" ]
null
null
null
t/test.py
teddywing/git-hook-pre-commit-python-javascript-syntax-linter
7a5b8b3e0df6c236e96deb6a1fa1a82b93425f61
[ "MIT" ]
null
null
null
import datetime 'this is a long that is longer than 79 characters, or it will be whenever this sentence finishes' missing_spaces_around_operator=0
24.833333
97
0.818792
25
149
4.76
0.92
0
0
0
0
0
0
0
0
0
0
0.02381
0.154362
149
5
98
29.8
0.920635
0
0
0
0
0
0.637584
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
9539fde50213c2a37dcf060f128756b12ad6da55
117
py
Python
applications/Corpus/controllers/groundcontrol.py
jolivaresc/corpus
1d2f3885778c29cb56dd1447140376e3e7cd5831
[ "BSD-3-Clause" ]
1
2017-07-25T20:15:56.000Z
2017-07-25T20:15:56.000Z
applications/Corpus/controllers/groundcontrol.py
jolivaresc/corpus
1d2f3885778c29cb56dd1447140376e3e7cd5831
[ "BSD-3-Clause" ]
null
null
null
applications/Corpus/controllers/groundcontrol.py
jolivaresc/corpus
1d2f3885778c29cb56dd1447140376e3e7cd5831
[ "BSD-3-Clause" ]
null
null
null
def Corpus_bello(): return 'Corpus' def actionMan(s): return 'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFU'
19.5
57
0.769231
10
117
8.9
0.7
0
0
0
0
0
0
0
0
0
0
0
0.145299
117
5
58
23.4
0.89
0
0
0
0
0
0.42735
0.376068
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
9546ddc00d010b3e841308dafcfb42d282dabaf7
1,307
py
Python
blizzard/guild.py
DiegoLing33/prestij.xyz-api
69a11a2c93dd98975f9becbc4b8f596e4941a05f
[ "MIT" ]
null
null
null
blizzard/guild.py
DiegoLing33/prestij.xyz-api
69a11a2c93dd98975f9becbc4b8f596e4941a05f
[ "MIT" ]
null
null
null
blizzard/guild.py
DiegoLing33/prestij.xyz-api
69a11a2c93dd98975f9becbc4b8f596e4941a05f
[ "MIT" ]
null
null
null
# ██╗░░░░░██╗███╗░░██╗░██████╗░░░░██████╗░██╗░░░░░░█████╗░░█████╗░██╗░░██╗ # ██║░░░░░██║████╗░██║██╔════╝░░░░██╔══██╗██║░░░░░██╔══██╗██╔══██╗██║░██╔╝ # ██║░░░░░██║██╔██╗██║██║░░██╗░░░░██████╦╝██║░░░░░███████║██║░░╚═╝█████═╝░ # ██║░░░░░██║██║╚████║██║░░╚██╗░░░██╔══██╗██║░░░░░██╔══██║██║░░██╗██╔═██╗░ # ███████╗██║██║░╚███║╚██████╔╝░░░██████╦╝███████╗██║░░██║╚█████╔╝██║░╚██╗ # ╚══════╝╚═╝╚═╝░░╚══╝░╚═════╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝ # # Developed by Yakov V. Panov (C) Ling • Black 2020 # @site http://ling.black from urllib.parse import quote from blizzard.core import default_params, blizzard_request from config import guild_name, server_slug def blizzard_guild_roster(guild: str = guild_name, data=default_params, sleep: int = 10): """ Returns the guild roster :param guild: :param data: :param sleep: :return: """ name = quote(guild.lower()) return blizzard_request(f"data/wow/guild/{server_slug}/{name}/roster", data, sleep) def blizzard_guild_info(guild: str = guild_name, data=default_params, sleep: int = 10): """ Returns the guild info :param guild: :param data: :param sleep: :return: """ name = quote(guild.lower()) return blizzard_request(f"data/wow/guild/{server_slug}/{name}", data, sleep)
34.394737
89
0.442234
123
1,307
8.089431
0.390244
0.039196
0.032161
0.034171
0.323618
0.323618
0.323618
0.323618
0.323618
0.323618
0
0.007136
0.142311
1,307
37
90
35.324324
0.494202
0.511094
0
0.222222
0
0
0.135563
0.135563
0
0
0
0
0
1
0.222222
false
0
0.333333
0
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
955c0fee510196a7645665d331c6c967f6899f74
160
py
Python
tests/conext.py
MateuszMazurkiewicz/LinkedList
7a371f3d51079d2391242245e19d369d03181b6f
[ "MIT" ]
null
null
null
tests/conext.py
MateuszMazurkiewicz/LinkedList
7a371f3d51079d2391242245e19d369d03181b6f
[ "MIT" ]
null
null
null
tests/conext.py
MateuszMazurkiewicz/LinkedList
7a371f3d51079d2391242245e19d369d03181b6f
[ "MIT" ]
null
null
null
import os import sys from pathlib import Path package_path = str((Path(__file__).parent / "..").resolve()) sys.path.insert(0, package_path) import linked_list
20
60
0.75625
24
160
4.75
0.625
0.192982
0
0
0
0
0
0
0
0
0
0.007042
0.1125
160
8
61
20
0.795775
0
0
0
0
0
0.012422
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
959179672d1b2360ae427faf9295bb6159538280
113
py
Python
py_tdlib/constructors/connected_websites.py
Mr-TelegramBot/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
24
2018-10-05T13:04:30.000Z
2020-05-12T08:45:34.000Z
py_tdlib/constructors/connected_websites.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
3
2019-06-26T07:20:20.000Z
2021-05-24T13:06:56.000Z
py_tdlib/constructors/connected_websites.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
5
2018-10-05T14:29:28.000Z
2020-08-11T15:04:10.000Z
from ..factory import Type class connectedWebsites(Type): websites = None # type: "vector<connectedWebsite>"
18.833333
52
0.752212
12
113
7.083333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.141593
113
5
53
22.6
0.876289
0.283186
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
95cd943371db9907ad397f6e28fa5a75915f003d
63
py
Python
A/A 1331 Is it rated.py
zielman/Codeforces-solutions
636f11a9eb10939d09d2e50ddc5ec53327d0b7ab
[ "MIT" ]
null
null
null
A/A 1331 Is it rated.py
zielman/Codeforces-solutions
636f11a9eb10939d09d2e50ddc5ec53327d0b7ab
[ "MIT" ]
1
2021-05-05T17:05:03.000Z
2021-05-05T17:05:03.000Z
A/A 1331 Is it rated.py
zielman/Codeforces-solutions
636f11a9eb10939d09d2e50ddc5ec53327d0b7ab
[ "MIT" ]
null
null
null
# https://codeforces.com/problemset/problem/1331/A print('NO')
21
50
0.746032
9
63
5.222222
1
0
0
0
0
0
0
0
0
0
0
0.066667
0.047619
63
3
51
21
0.716667
0.761905
0
0
0
0
0.142857
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
95e6b63bcc8a3c6311c3af04d3557f40441098f0
51
py
Python
test/hello.py
sofusalbertsen/radon-functionhub-client
abe8d19e9f225648c4feebdf5d5253564d22d3dc
[ "Apache-2.0" ]
1
2020-05-19T12:51:44.000Z
2020-05-19T12:51:44.000Z
test/hello.py
sofusalbertsen/radon-functionhub-client
abe8d19e9f225648c4feebdf5d5253564d22d3dc
[ "Apache-2.0" ]
17
2020-06-19T08:23:21.000Z
2021-06-02T01:51:10.000Z
test/example/hello.py
radon-h2020/functionHub-client
8a0954fa67f4edd52ef850357a3d28d0104b5986
[ "Apache-2.0" ]
2
2020-07-30T14:04:56.000Z
2021-01-12T16:53:04.000Z
def hello(event,context): print("hello world")
17
25
0.686275
7
51
5
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.156863
51
2
26
25.5
0.813953
0
0
0
0
0
0.215686
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
4
255b1e96ba5fff682b52f21fbeaef2f93b649d96
184
py
Python
deepdiy/utils/get_parent_path.py
IEWbgfnYDwHRoRRSKtkdyMDUzgdwuBYgDKtDJWd/diy
080ddece4f982f22f3d5cff8d9d82e12fcd946a1
[ "MIT" ]
57
2019-05-01T05:27:19.000Z
2022-03-06T12:11:55.000Z
deepdiy/utils/get_parent_path.py
markusj1201/deepdiy
080ddece4f982f22f3d5cff8d9d82e12fcd946a1
[ "MIT" ]
6
2020-01-28T22:58:35.000Z
2022-02-10T00:16:27.000Z
deepdiy/utils/get_parent_path.py
markusj1201/deepdiy
080ddece4f982f22f3d5cff8d9d82e12fcd946a1
[ "MIT" ]
13
2019-05-08T03:19:58.000Z
2021-08-02T04:24:15.000Z
import sys,os def get_parent_path(level=1): bundle_dir=os.path.abspath(__file__) for i in range(1,level): bundle_dir=os.path.dirname(bundle_dir) return bundle_dir
23
46
0.717391
31
184
3.935484
0.612903
0.295082
0.180328
0.245902
0
0
0
0
0
0
0
0.013245
0.179348
184
7
47
26.285714
0.794702
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c2576485bd0027cc5fa8d2fe7dd0c037dd23a505
103
py
Python
run.py
edubenetskiy/ProgTech-Lab6
67d355abd185231a5265c3a8bd84ceeae8e0b8b0
[ "Apache-2.0", "MIT" ]
null
null
null
run.py
edubenetskiy/ProgTech-Lab6
67d355abd185231a5265c3a8bd84ceeae8e0b8b0
[ "Apache-2.0", "MIT" ]
2
2021-03-31T19:34:38.000Z
2021-12-13T20:37:21.000Z
run.py
edubenetskiy/Retrogress
67d355abd185231a5265c3a8bd84ceeae8e0b8b0
[ "Apache-2.0", "MIT" ]
null
null
null
from app import app if __name__ == '__main__': app.run(host='127.0.0.1', port='8080', debug=True)
20.6
54
0.650485
18
103
3.277778
0.833333
0
0
0
0
0
0
0
0
0
0
0.114943
0.15534
103
4
55
25.75
0.563218
0
0
0
0
0
0.203884
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
c25957440f5decdca58411415cb7b5217c31835d
132
py
Python
Leetcode/2001-3000/2239. Find Closest Number to Zero/2239.py
Next-Gen-UI/Code-Dynamics
a9b9d5e3f27e870b3e030c75a1060d88292de01c
[ "MIT" ]
null
null
null
Leetcode/2001-3000/2239. Find Closest Number to Zero/2239.py
Next-Gen-UI/Code-Dynamics
a9b9d5e3f27e870b3e030c75a1060d88292de01c
[ "MIT" ]
null
null
null
Leetcode/2001-3000/2239. Find Closest Number to Zero/2239.py
Next-Gen-UI/Code-Dynamics
a9b9d5e3f27e870b3e030c75a1060d88292de01c
[ "MIT" ]
null
null
null
class Solution: def findClosestNumber(self, nums: List[int]) -> int: nums.sort(key=lambda x: (abs(x), -x)) return nums[0]
26.4
54
0.643939
20
132
4.25
0.75
0
0
0
0
0
0
0
0
0
0
0.009259
0.181818
132
4
55
33
0.777778
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
c26caf5f62e647716f06b6cc28622a160da05fed
185
py
Python
main/models.py
NikOneZ1/MarkovChainText
894f3de75c2a5781f95c95557e40fbfbe29ef051
[ "MIT" ]
1
2022-01-20T17:26:29.000Z
2022-01-20T17:26:29.000Z
main/models.py
NikOneZ1/MarkovChainText
894f3de75c2a5781f95c95557e40fbfbe29ef051
[ "MIT" ]
7
2022-01-11T16:24:12.000Z
2022-01-21T23:05:19.000Z
main/models.py
NikOneZ1/MarkovChainText
894f3de75c2a5781f95c95557e40fbfbe29ef051
[ "MIT" ]
null
null
null
from django.db import models class PresetText(models.Model): name = models.CharField(max_length=50) text = models.TextField() def __str__(self): return self.name
18.5
42
0.691892
24
185
5.125
0.791667
0
0
0
0
0
0
0
0
0
0
0.013699
0.210811
185
9
43
20.555556
0.828767
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.166667
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
c2878f5dfb71e87b48dfe0f3ec14a5212fe87261
252
py
Python
costflow/utils.py
StdioA/costflow
31335c0452d2a8a8b32014ef09cab62c1a4c244f
[ "MIT" ]
null
null
null
costflow/utils.py
StdioA/costflow
31335c0452d2a8a8b32014ef09cab62c1a4c244f
[ "MIT" ]
null
null
null
costflow/utils.py
StdioA/costflow
31335c0452d2a8a8b32014ef09cab62c1a4c244f
[ "MIT" ]
null
null
null
from jinja2 import Environment, meta def fetch_variables(tmpl): env = Environment() ast = env.parse(tmpl) return meta.find_undeclared_variables(ast) def check_account(account): # TODO: Check account name (utf-8 validation) pass
19.384615
49
0.718254
33
252
5.363636
0.69697
0.135593
0
0
0
0
0
0
0
0
0
0.009852
0.194444
252
12
50
21
0.862069
0.170635
0
0
0
0
0
0
0
0
0
0.083333
0
1
0.285714
false
0.142857
0.142857
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
0
1
0
0
4
c2c00b1719d2e6bcb4613dae0212ead5476fb1b2
104
py
Python
urbansim_templates/shared/__init__.py
AZMAG/urbansim_templates
723b83b4187da53a50ee03fdba4842a464f68240
[ "BSD-3-Clause" ]
19
2018-10-20T21:18:11.000Z
2021-11-15T07:11:03.000Z
urbansim_templates/shared/__init__.py
AZMAG/urbansim_templates
723b83b4187da53a50ee03fdba4842a464f68240
[ "BSD-3-Clause" ]
91
2018-03-15T17:42:44.000Z
2022-03-21T18:56:07.000Z
urbansim_templates/shared/__init__.py
AZMAG/urbansim_templates
723b83b4187da53a50ee03fdba4842a464f68240
[ "BSD-3-Clause" ]
12
2018-06-22T15:45:15.000Z
2021-10-02T00:13:36.000Z
from .core import CoreTemplateSettings from .output_column import OutputColumnSettings, register_column
34.666667
64
0.884615
11
104
8.181818
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.086538
104
2
65
52
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
c2da73528609fb752f86088c9a6e6d7b1e650d86
341
py
Python
datamunger/__init__.py
bacross/datamunger
d3a7e4c22004ee83afdc4964a86d3a96b90398ed
[ "MIT" ]
1
2018-04-16T18:24:49.000Z
2018-04-16T18:24:49.000Z
datamunger/__init__.py
bacross/datamunger
d3a7e4c22004ee83afdc4964a86d3a96b90398ed
[ "MIT" ]
1
2018-01-02T18:48:29.000Z
2018-01-02T18:48:29.000Z
datamunger/__init__.py
bacross/datamunger
d3a7e4c22004ee83afdc4964a86d3a96b90398ed
[ "MIT" ]
null
null
null
from .imputeKNN import splitDfNansNot,buildTrainingSet,kNNRegress,fillColNans,chooseNanFill,parseDf,imputeMissingDataForCol,imputeMissingDataKNN,outlierToNanCol,outlierToNanDF,imputeOutlierKNN import numpy as np import pandas as pd import random from sklearn.neighbors import KNeighborsRegressor import os from joblib import Parallel,delayed
48.714286
192
0.894428
36
341
8.472222
0.75
0
0
0
0
0
0
0
0
0
0
0
0.067449
341
7
193
48.714286
0.95912
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
c2e2edb7b075549aa4da0763de917d065fd28fc4
2,939
py
Python
tests/test_methods_zcs.py
soutys/aorn
033ad148dc9b2c50b4973a618ed394d85e514621
[ "MIT" ]
null
null
null
tests/test_methods_zcs.py
soutys/aorn
033ad148dc9b2c50b4973a618ed394d85e514621
[ "MIT" ]
null
null
null
tests/test_methods_zcs.py
soutys/aorn
033ad148dc9b2c50b4973a618ed394d85e514621
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- '''Zero-crossings method module tests ''' from __future__ import with_statement, division, absolute_import, print_function from tempfile import NamedTemporaryFile from aorn.methods import zcs from aorn.methods.antest import ANTest from aorn.samplesstore import SamplesStore from tests.generators import synth_complex, synth_noise def test_methods_zcs_init_dry_run(): assert issubclass(zcs, ANTest) tester = zcs(dry_run=True) assert tester.is_audio() is None assert tester.isnt_audio() is None def test_methods_zcs_init_no_args(): try: zcs() assert False except RuntimeError: pass def test_methods_zcs_init_float_level(): zcs(0.5) def test_methods_zcs_init_bad_float_level(): try: zcs(0.0) assert False except RuntimeError: pass try: zcs(1.0) assert False except RuntimeError: pass def test_methods_zcs_analyze_audio_ok(): tmp = NamedTemporaryFile(delete=True, prefix='sample_') data_sz = 10000 synth_complex(freqs=[440, 330, 1300, 3000, 300, 120], coefs=[1, 1, 1, 1, 1, 1], datasize=data_sz, fname=tmp.name) samples_store = SamplesStore() samples_store.load_samples(tmp.name) assert samples_store.get_samples() is not None assert len(samples_store.get_samples()) == data_sz tester = zcs(0.2) tester.analyze(samples_store) assert tester.is_audio() def test_methods_zcs_analyze_audio_fail(): tmp = NamedTemporaryFile(delete=True, prefix='sample_') data_sz = 10000 synth_noise(coef=2.0, datasize=data_sz, fname=tmp.name) samples_store = SamplesStore() samples_store.load_samples(tmp.name) assert samples_store.get_samples() is not None assert len(samples_store.get_samples()) == data_sz tester = zcs(0.2) tester.analyze(samples_store) assert not tester.is_audio() def test_methods_zcs_analyze_non_audio_ok(): tmp = NamedTemporaryFile(delete=True, prefix='sample_') data_sz = 10000 synth_noise(coef=2.0, datasize=data_sz, fname=tmp.name) samples_store = SamplesStore() samples_store.load_samples(tmp.name) assert samples_store.get_samples() is not None assert len(samples_store.get_samples()) == data_sz tester = zcs(0.2) tester.analyze(samples_store) assert tester.isnt_audio() def test_methods_zcs_analyze_non_audio_fail(): tmp = NamedTemporaryFile(delete=True, prefix='sample_') data_sz = 10000 synth_complex(freqs=[440, 330, 1300, 3000, 300, 120], coefs=[1, 1, 1, 1, 1, 1], datasize=data_sz, fname=tmp.name) samples_store = SamplesStore() samples_store.load_samples(tmp.name) assert samples_store.get_samples() is not None assert len(samples_store.get_samples()) == data_sz tester = zcs(0.2) tester.analyze(samples_store) assert not tester.isnt_audio() # vim: ts=4:sw=4:et:fdm=indent:ff=unix
26.241071
80
0.709085
416
2,939
4.742788
0.223558
0.121642
0.056766
0.068931
0.758236
0.724278
0.70299
0.70299
0.653827
0.603142
0
0.039026
0.18918
2,939
111
81
26.477477
0.788922
0.031984
0
0.657895
0
0
0.00987
0
0
0
0
0
0.236842
1
0.105263
false
0.039474
0.078947
0
0.184211
0.013158
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6c75e7bb6c413f898a1764ddb2769a26c115f8ec
68
py
Python
internal/__init__.py
betanzos/py-jar-modularizer
b8117c86feed924ae3047e3656a63297f7723961
[ "MIT" ]
1
2019-05-15T07:28:23.000Z
2019-05-15T07:28:23.000Z
internal/__init__.py
betanzos/py-jar-modularizer
b8117c86feed924ae3047e3656a63297f7723961
[ "MIT" ]
null
null
null
internal/__init__.py
betanzos/py-jar-modularizer
b8117c86feed924ae3047e3656a63297f7723961
[ "MIT" ]
null
null
null
from .modularizer import Modularizer from .compiler import Compiler
22.666667
36
0.852941
8
68
7.25
0.5
0
0
0
0
0
0
0
0
0
0
0
0.117647
68
2
37
34
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
6668a29f17a6cc0bb9a6af384a0d4788b5cc0eab
94
py
Python
read_later/apps.py
krzysztofzuraw/reddit-stars
525f61401e0b9be7e810e41d96466c3018e98702
[ "MIT" ]
3
2016-06-22T10:07:19.000Z
2019-03-14T09:45:19.000Z
read_later/apps.py
krzysztofzuraw/reddit-stars
525f61401e0b9be7e810e41d96466c3018e98702
[ "MIT" ]
null
null
null
read_later/apps.py
krzysztofzuraw/reddit-stars
525f61401e0b9be7e810e41d96466c3018e98702
[ "MIT" ]
null
null
null
from django.apps import AppConfig class ReadLaterConfig(AppConfig): name = 'read_later'
15.666667
33
0.765957
11
94
6.454545
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.159574
94
5
34
18.8
0.898734
0
0
0
0
0
0.106383
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
666f0f95f3afe0109e3f9f4c3be5905be42de3b5
67
py
Python
adapters/BluetoothAdapter.py
sciurolocutus/btControl
bc51991add9cb231684272cb2e6c56b712eab02c
[ "MIT" ]
null
null
null
adapters/BluetoothAdapter.py
sciurolocutus/btControl
bc51991add9cb231684272cb2e6c56b712eab02c
[ "MIT" ]
null
null
null
adapters/BluetoothAdapter.py
sciurolocutus/btControl
bc51991add9cb231684272cb2e6c56b712eab02c
[ "MIT" ]
null
null
null
class BluetoothAdapter: def list_bt_devices(self): pass
22.333333
30
0.701493
8
67
5.625
1
0
0
0
0
0
0
0
0
0
0
0
0.238806
67
3
31
22.333333
0.882353
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
66c1327ae1a01ee985ba84f663d60db26811d5c7
819
py
Python
tests/css_matcher/test_parse.py
jingyuexing/py-emmet
e3b1ecb875e0067fc9ef4f4479c7a8d4646aaa11
[ "MIT" ]
29
2019-11-12T16:15:15.000Z
2022-02-06T10:51:25.000Z
tests/css_matcher/test_parse.py
jingyuexing/py-emmet
e3b1ecb875e0067fc9ef4f4479c7a8d4646aaa11
[ "MIT" ]
3
2020-04-25T11:02:53.000Z
2021-11-25T10:39:09.000Z
tests/css_matcher/test_parse.py
jingyuexing/py-emmet
e3b1ecb875e0067fc9ef4f4479c7a8d4646aaa11
[ "MIT" ]
7
2020-04-25T09:42:54.000Z
2021-02-16T20:29:41.000Z
import unittest import sys sys.path.append('../../') from emmet.css_matcher import split_value def tokens(value: str): return [value[r[0]:r[1]] for r in split_value(value)] class TestCSSParser(unittest.TestCase): def test_split_value(self): self.assertEqual(tokens('10px 20px'), ['10px', '20px']) self.assertEqual(tokens(' 10px 20px '), ['10px', '20px']) self.assertEqual(tokens('10px, 20px'), ['10px', '20px']) self.assertEqual(tokens('20px'), ['20px']) self.assertEqual(tokens('no-repeat, 10px - 5'), ['no-repeat', '10px', '5']) self.assertEqual(tokens('url("foo bar") no-repeat'), ['url("foo bar")', 'no-repeat']) self.assertEqual(tokens('--my-prop'), ['--my-prop']) self.assertEqual(tokens('calc(100% - 80px)'), ['calc(100% - 80px)'])
37.227273
93
0.612943
105
819
4.733333
0.371429
0.241449
0.338028
0.201207
0.334004
0.265594
0.265594
0.265594
0.265594
0.265594
0
0.067647
0.169719
819
21
94
39
0.663235
0
0
0.125
0
0
0.247863
0
0
0
0
0
0.5
1
0.125
false
0
0.1875
0.0625
0.4375
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
66c1eaed8ecf5b50a9c0512a01eb037dd47f48d8
119
py
Python
CHECLabPy/utils/__init__.py
ConteFrancesco/CHECLabPy
b2d0a12cae062603b618132957a555c404a4a4c9
[ "BSD-3-Clause" ]
4
2018-04-23T09:14:21.000Z
2019-05-02T22:12:47.000Z
CHECLabPy/utils/__init__.py
watsonjj/CHECLabPy
c67bf0b190ba4b799d4da150591d602e16b1d6b0
[ "BSD-3-Clause" ]
28
2018-03-29T21:50:45.000Z
2019-11-12T07:51:01.000Z
CHECLabPy/utils/__init__.py
watsonjj/CHECLabPy
c67bf0b190ba4b799d4da150591d602e16b1d6b0
[ "BSD-3-Clause" ]
16
2018-03-23T15:29:38.000Z
2019-07-24T12:19:51.000Z
""" This module contains the various utilities that may be useful in lab anaylysis and operating on the waveforms. """
23.8
68
0.773109
18
119
5.111111
0.944444
0
0
0
0
0
0
0
0
0
0
0
0.168067
119
4
69
29.75
0.929293
0.92437
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
66efd25a26116f9ed2c65025435bd78604eb54a0
140
py
Python
src/Cafe/Tab/Shared.py
DieAntonie/eventstore-python
1f162a466bcfdf6138248b6a89b4b6df1ce67c6c
[ "BSD-3-Clause" ]
1
2021-10-02T20:59:57.000Z
2021-10-02T20:59:57.000Z
src/Cafe/Tab/Shared.py
DieAntonie/eventstore-python
1f162a466bcfdf6138248b6a89b4b6df1ce67c6c
[ "BSD-3-Clause" ]
2
2020-03-24T16:36:16.000Z
2020-03-24T16:51:03.000Z
src/Cafe/Tab/Shared.py
DieAntonie/eventstore-python
1f162a466bcfdf6138248b6a89b4b6df1ce67c6c
[ "BSD-3-Clause" ]
null
null
null
from dataclasses import dataclass @dataclass class OrderedItem: MenuNumber: int Description: str IsDrink: bool Price: float
17.5
33
0.735714
15
140
6.866667
0.933333
0
0
0
0
0
0
0
0
0
0
0
0.221429
140
8
34
17.5
0.944954
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.142857
0
0.857143
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
dd08c71fcbe436834cdbc59b78769859901aea47
183
py
Python
examples/detailed_use_cases/__init__.py
hase1128/dragonfly
4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c
[ "MIT" ]
675
2018-08-23T17:30:46.000Z
2022-03-30T18:37:23.000Z
examples/detailed_use_cases/__init__.py
hase1128/dragonfly
4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c
[ "MIT" ]
62
2018-11-30T23:40:19.000Z
2022-03-10T19:47:27.000Z
examples/detailed_use_cases/__init__.py
hase1128/dragonfly
4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c
[ "MIT" ]
349
2018-09-10T19:04:34.000Z
2022-03-31T13:10:45.000Z
""" Demos on some detailed use cases for Dragonfly. We use domains and configurations from an electrolyte design task, but use synthetic functions instead. -- kirthevasank """
26.142857
83
0.754098
24
183
5.75
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.185792
183
6
84
30.5
0.926175
0.912568
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
dd1338ab1fffea740ae40538b89faf32a2a50b81
215
py
Python
generic_api/generics/retries_handler.py
guestready/generic_api
4830995ec2f6ea77b1b3bff1d86d4152530b0942
[ "BSD-2-Clause" ]
1
2020-11-24T07:49:37.000Z
2020-11-24T07:49:37.000Z
generic_api/generics/retries_handler.py
guestready/generic_api
4830995ec2f6ea77b1b3bff1d86d4152530b0942
[ "BSD-2-Clause" ]
null
null
null
generic_api/generics/retries_handler.py
guestready/generic_api
4830995ec2f6ea77b1b3bff1d86d4152530b0942
[ "BSD-2-Clause" ]
null
null
null
class GenericRetriesHandler: def __init__(self, *args, **kwargs): self.count = 0 def is_eligible(self, response): raise NotImplementedError def increment(self): self.count += 1
21.5
40
0.637209
23
215
5.73913
0.695652
0.136364
0
0
0
0
0
0
0
0
0
0.012658
0.265116
215
9
41
23.888889
0.822785
0
0
0
0
0
0
0
0
0
0
0
0
1
0.428571
false
0
0
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
dd14e7dfb3eda95e7ed71613bdca2ecb4f5d10df
1,426
py
Python
tspetl/apache_log_tool.py
jdgwartney/tsp-etl
70540d3d13261849af512d97c153fc4f1e414bf5
[ "Apache-2.0" ]
null
null
null
tspetl/apache_log_tool.py
jdgwartney/tsp-etl
70540d3d13261849af512d97c153fc4f1e414bf5
[ "Apache-2.0" ]
null
null
null
tspetl/apache_log_tool.py
jdgwartney/tsp-etl
70540d3d13261849af512d97c153fc4f1e414bf5
[ "Apache-2.0" ]
null
null
null
# # Copyright 2016 BMC Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from tspetl import ETLTool class ApacheLogTool(ETLTool): def __init__(self): super(ApacheLogTool, self).__init__() @property def name(self): return 'apachelog' @property def help(self): return 'Parses apache logs for page status. (Future Release)' def add_parser(self, sub_parser): super(ApacheLogTool, self).add_parser(sub_parser) self._parser.add_argument('-f', '--file', dest='file_path', metavar="file_path", help="Path to file to import", required=False) self._parser.add_argument('-b', '--batch', dest='batch_count', metavar="batch_count", help="How measurements to send in each API call", required=False) def _handle_arguments(self, args): pass def run(self, args): self._handle_arguments(args)
33.162791
135
0.68864
194
1,426
4.938144
0.551546
0.06263
0.02714
0.033403
0
0
0
0
0
0
0
0.007124
0.212482
1,426
42
136
33.952381
0.845948
0.389201
0
0.105263
0
0
0.211944
0
0
0
0
0
0
1
0.315789
false
0.052632
0.105263
0.105263
0.578947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
dd2141c163bcbba513b81ee2ed41ca02c3bca04e
231
py
Python
jolly-jellyfish/src/page_maker/admin.py
Vthechamp22/summer-code-jam-2021
0a8bf1f22f6c73300891fd779da36efd8e1304c1
[ "MIT" ]
40
2020-08-02T07:38:22.000Z
2021-07-26T01:46:50.000Z
jolly-jellyfish/src/page_maker/admin.py
Vthechamp22/summer-code-jam-2021
0a8bf1f22f6c73300891fd779da36efd8e1304c1
[ "MIT" ]
134
2020-07-31T12:15:45.000Z
2020-12-13T04:42:19.000Z
jolly-jellyfish/src/page_maker/admin.py
Artemis21/summer-code-jam-2020
1323288cb1e75b3aa4141c2c6e378f9219cf73d0
[ "MIT" ]
101
2020-07-31T12:00:47.000Z
2021-11-01T09:06:58.000Z
from django.contrib import admin from .models import Template, Webpage, Comment, Like # Register your models here. admin.site.register(Template) admin.site.register(Webpage) admin.site.register(Comment) admin.site.register(Like)
23.1
52
0.805195
32
231
5.8125
0.4375
0.193548
0.365591
0
0
0
0
0
0
0
0
0
0.095238
231
9
53
25.666667
0.889952
0.112554
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
dd3ef78345b6915b6f39049988a8a86058f8c967
416
py
Python
mysite/weeklyreport/models.py
wssc2208/python-django1
265f215299e670e3c83ab9bbe816398a155357cb
[ "Apache-2.0" ]
null
null
null
mysite/weeklyreport/models.py
wssc2208/python-django1
265f215299e670e3c83ab9bbe816398a155357cb
[ "Apache-2.0" ]
null
null
null
mysite/weeklyreport/models.py
wssc2208/python-django1
265f215299e670e3c83ab9bbe816398a155357cb
[ "Apache-2.0" ]
null
null
null
from django.db import models # Create your models here. class weeklyUser(models.Model): username = models.CharField(max_length=20) password = models.CharField(max_length=30) email = models.EmailField() class WeeklyReportContent(models.Model): username = models.CharField(max_length=20) UpdateTime = models.DateTimeField('修改时间', auto_now=True) content = models.CharField(max_length = 100000)
34.666667
60
0.754808
51
416
6.058824
0.568627
0.194175
0.23301
0.31068
0.291262
0.291262
0.291262
0.291262
0
0
0
0.033613
0.141827
416
12
61
34.666667
0.831933
0.057692
0
0.222222
0
0
0.01023
0
0
0
0
0
0
1
0
false
0.111111
0.111111
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
dd5a6737b6ffafcb9833a5e90abc71a824bb05c0
144
py
Python
indel_analysis/i1/run_all_compile_i1.py
kaskamal/SelfTarget
c0bff0f11f4e69bafd80a1fa4d36b0f9689b9af7
[ "MIT" ]
20
2018-08-27T01:27:02.000Z
2022-03-07T07:12:56.000Z
indel_analysis/i1/run_all_compile_i1.py
kaskamal/SelfTarget
c0bff0f11f4e69bafd80a1fa4d36b0f9689b9af7
[ "MIT" ]
6
2019-01-18T19:54:52.000Z
2021-03-19T23:56:28.000Z
indel_analysis/i1/run_all_compile_i1.py
kaskamal/SelfTarget
c0bff0f11f4e69bafd80a1fa4d36b0f9689b9af7
[ "MIT" ]
14
2018-10-12T21:31:31.000Z
2021-11-08T08:32:40.000Z
from selftarget.util import runPerSubdir if __name__ == '__main__': runPerSubdir('compile_i1.py', 'out_i1', __file__, extra_args='. ')
28.8
70
0.708333
17
144
5.117647
0.882353
0
0
0
0
0
0
0
0
0
0
0.016393
0.152778
144
5
70
28.8
0.696721
0
0
0
0
0
0.2
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
dd93775fd2df91af00c5d928932cfa1784da8106
210
py
Python
dcmrtstruct2nii/adapters/output/abstractoutputadapter.py
Sikerdebaard/dcmrtstruct2mask
4f85bccd2d8ee95c34b399950ec5e528baad5e77
[ "Apache-2.0" ]
50
2019-01-23T13:32:07.000Z
2022-03-23T01:10:45.000Z
dcmrtstruct2nii/adapters/output/abstractoutputadapter.py
Sikerdebaard/dcmrtstruct2mask
4f85bccd2d8ee95c34b399950ec5e528baad5e77
[ "Apache-2.0" ]
20
2019-07-11T12:30:28.000Z
2022-03-05T09:26:55.000Z
dcmrtstruct2nii/adapters/output/abstractoutputadapter.py
Sikerdebaard/dcmrtstruct2mask
4f85bccd2d8ee95c34b399950ec5e528baad5e77
[ "Apache-2.0" ]
19
2019-07-20T08:07:12.000Z
2022-02-22T03:03:49.000Z
from abc import ABC, abstractmethod class AbstractOutputAdapter(ABC): def __init__(self): super().__init__() @abstractmethod def write(self, pixel_arrays, *args, **kwargs): pass
17.5
51
0.661905
22
210
5.909091
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.233333
210
11
52
19.090909
0.807453
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0.142857
0.142857
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
dd9da5a2ae607144855d1f439b8dd87bc788e5ee
288
py
Python
twlived/utils/__init__.py
tausackhn/twlived
e065fe5efc479ad2ec0ee0053994cba857e39ae2
[ "MIT" ]
11
2017-04-11T13:09:36.000Z
2021-11-27T22:14:34.000Z
twlived/utils/__init__.py
tausackhn/twlived
e065fe5efc479ad2ec0ee0053994cba857e39ae2
[ "MIT" ]
1
2017-09-07T10:29:53.000Z
2017-09-07T16:01:01.000Z
twlived/utils/__init__.py
tausackhn/twlived
e065fe5efc479ad2ec0ee0053994cba857e39ae2
[ "MIT" ]
1
2021-04-15T16:07:58.000Z
2021-04-15T16:07:58.000Z
from .pubsub import BaseEvent, Provider, Publisher, Subscriber from .utils import retry_on_exception, chunked, sanitize_filename, fails_in_row __all__ = ['BaseEvent', 'Provider', 'Publisher', 'Subscriber', 'retry_on_exception', 'chunked', 'sanitize_filename', 'fails_in_row']
48
116
0.756944
33
288
6.181818
0.545455
0.166667
0.254902
0.352941
0.480392
0.480392
0.480392
0.480392
0.480392
0
0
0
0.125
288
5
117
57.6
0.809524
0
0
0
0
0
0.3125
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
dd9f3b224b33240ef69fe2241ee0febc03472cdd
72
py
Python
zentral/contrib/munki/__init__.py
gwhitehawk/zentral
156134aed3d7ff8a7cb40ab6f2269a763c316459
[ "Apache-2.0" ]
634
2015-10-30T00:55:40.000Z
2022-03-31T02:59:00.000Z
zentral/contrib/munki/__init__.py
gwhitehawk/zentral
156134aed3d7ff8a7cb40ab6f2269a763c316459
[ "Apache-2.0" ]
145
2015-11-06T00:17:33.000Z
2022-03-16T13:30:31.000Z
zentral/contrib/munki/__init__.py
gwhitehawk/zentral
156134aed3d7ff8a7cb40ab6f2269a763c316459
[ "Apache-2.0" ]
103
2015-11-07T07:08:49.000Z
2022-03-18T17:34:36.000Z
default_app_config = "zentral.contrib.munki.apps.ZentralMunkiAppConfig"
36
71
0.861111
8
72
7.5
1
0
0
0
0
0
0
0
0
0
0
0
0.041667
72
1
72
72
0.869565
0
0
0
0
0
0.666667
0.666667
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
dd9f82a511afbbb5ca2e0d9e8f0a4390ba4bbccf
123
py
Python
remotelogin/devices/managers/__init__.py
filintod/pyremotelogin
e2a4df7fd69d21eccdf1aec55c33a839de9157f1
[ "MIT" ]
1
2018-11-20T17:45:20.000Z
2018-11-20T17:45:20.000Z
remotelogin/devices/managers/__init__.py
filintod/pyremotelogin
e2a4df7fd69d21eccdf1aec55c33a839de9157f1
[ "MIT" ]
3
2018-10-16T18:07:50.000Z
2018-10-16T18:10:06.000Z
remotelogin/devices/managers/__init__.py
filintod/pyremotelogin
e2a4df7fd69d21eccdf1aec55c33a839de9157f1
[ "MIT" ]
null
null
null
import logging from . import connections, files, services, users, interfaces, tunnels log = logging.getLogger(__name__)
17.571429
70
0.780488
14
123
6.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.138211
123
6
71
20.5
0.867925
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
06b3bf6054351aff4acd0c67e6ce17f991349b9c
50
py
Python
AOJ/ITP1_2_C.py
sireline/PyCode
8578467710c3c1faa89499f5d732507f5d9a584c
[ "MIT" ]
null
null
null
AOJ/ITP1_2_C.py
sireline/PyCode
8578467710c3c1faa89499f5d732507f5d9a584c
[ "MIT" ]
null
null
null
AOJ/ITP1_2_C.py
sireline/PyCode
8578467710c3c1faa89499f5d732507f5d9a584c
[ "MIT" ]
null
null
null
print(*sorted([int(n) for n in input().split()]))
25
49
0.62
9
50
3.444444
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.1
50
1
50
50
0.688889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
06b66e8bc3c22be67e02cf55bf507baf6903a81b
21
py
Python
py2app_tests/__init__.py
flupke/py2app
8eb6c618f9c63d6ac970fb145a7f7782b71bcb4d
[ "MIT" ]
193
2020-01-15T09:34:20.000Z
2022-03-18T19:14:16.000Z
py2app_tests/__init__.py
flupke/py2app
8eb6c618f9c63d6ac970fb145a7f7782b71bcb4d
[ "MIT" ]
185
2020-01-15T08:38:27.000Z
2022-03-27T17:29:29.000Z
py2app_tests/__init__.py
flupke/py2app
8eb6c618f9c63d6ac970fb145a7f7782b71bcb4d
[ "MIT" ]
23
2020-01-24T14:47:18.000Z
2022-02-22T17:19:47.000Z
""" py2app tests """
10.5
20
0.52381
2
21
5.5
1
0
0
0
0
0
0
0
0
0
0
0.058824
0.190476
21
1
21
21
0.588235
0.571429
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
06c7db1cb823df368d56a46077b557b51cdedf40
151
py
Python
Python/Effective_Python/chapter1/4.py
sunyunxian/test_lib
5e98fff1074b301960d39165aa99d60db880b262
[ "Apache-2.0" ]
1
2021-12-17T14:57:30.000Z
2021-12-17T14:57:30.000Z
Python/Effective_Python/chapter1/4.py
ok-frank/test_lib
5e98fff1074b301960d39165aa99d60db880b262
[ "Apache-2.0" ]
null
null
null
Python/Effective_Python/chapter1/4.py
ok-frank/test_lib
5e98fff1074b301960d39165aa99d60db880b262
[ "Apache-2.0" ]
null
null
null
# 插值 f-string,插值表达式 key = 'my_var' value = '1.234' formatted = f'{key}: {value}' print(formatted) formatted = f'{key} = {value}' print(formatted)
11.615385
30
0.635762
22
151
4.318182
0.545455
0.210526
0.273684
0.378947
0.673684
0.673684
0
0
0
0
0
0.031746
0.165563
151
12
31
12.583333
0.722222
0.112583
0
0.333333
0
0
0.30303
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
06cd98c018ce9ed41c3196a5cdefec30aa89cd44
190
py
Python
beautifulsoup.py
boniaditya/scraping
e53278afecc87641e891eb650b095a549bdedd70
[ "Apache-2.0" ]
null
null
null
beautifulsoup.py
boniaditya/scraping
e53278afecc87641e891eb650b095a549bdedd70
[ "Apache-2.0" ]
null
null
null
beautifulsoup.py
boniaditya/scraping
e53278afecc87641e891eb650b095a549bdedd70
[ "Apache-2.0" ]
null
null
null
from urllib.request import urlopen from bs4 import BeautifulSoup htmldata = urlopen("http://www.pythonscraping.com/pages/page1.html") Object = BeautifulSoup(htmldata.read()) print(Object.h1)
38
68
0.805263
25
190
6.12
0.76
0.27451
0
0
0
0
0
0
0
0
0
0.017045
0.073684
190
5
69
38
0.852273
0
0
0
0
0
0.240838
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0.2
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
06d6bba9cad43cf8c4a327914250c3b223b2d4ea
206
py
Python
Diena_11_modules_packages_std/m_utils.py
edzya/Python_RTU_08_20
d2921d998c611c18328dd523daf976a27ce858c1
[ "MIT" ]
8
2020-08-31T16:10:54.000Z
2021-11-24T06:37:37.000Z
Diena_11_modules_packages_std/m_utils.py
edzya/Python_RTU_08_20
d2921d998c611c18328dd523daf976a27ce858c1
[ "MIT" ]
8
2021-06-08T22:30:29.000Z
2022-03-12T00:48:55.000Z
Diena_11_modules_packages_std/m_utils.py
edzya/Python_RTU_08_20
d2921d998c611c18328dd523daf976a27ce858c1
[ "MIT" ]
12
2020-09-28T17:06:52.000Z
2022-02-17T12:12:46.000Z
import math def sum_prod(seq_a, seq_b): res1 = math.prod(seq_a) + math.prod(seq_b) return res1 def sum_prod_multi(*seqs): res2 = math.fsum([math.prod(seq) for seq in seqs]) return res2
18.727273
54
0.665049
37
206
3.513514
0.432432
0.215385
0.253846
0
0
0
0
0
0
0
0
0.024691
0.213592
206
11
55
18.727273
0.777778
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0
0.714286
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
6611a0e0ef6c781916fd4e856d451158e604bc6b
782
py
Python
pyscreener/docking/runner.py
coleygroup/pyscreener
7d3920ca7f75ee44e4f4875f90c75759ea33ac56
[ "MIT" ]
34
2021-01-08T00:32:01.000Z
2022-02-20T20:02:55.000Z
pyscreener/docking/runner.py
rafalbachorz/pyscreener
ca462f75c563d2295b63cf99dbffbdf4f8f00db1
[ "MIT" ]
24
2021-01-29T18:28:45.000Z
2022-03-22T21:48:01.000Z
pyscreener/docking/runner.py
coleygroup/pyscreener
7d3920ca7f75ee44e4f4875f90c75759ea33ac56
[ "MIT" ]
13
2021-01-09T11:07:03.000Z
2022-02-10T23:08:11.000Z
from abc import ABC, abstractmethod from typing import Optional, Sequence from pyscreener.docking.data import CalculationData from pyscreener.docking.metadata import CalculationMetadata class DockingRunner(ABC): @staticmethod @abstractmethod def prepare_receptor(data: CalculationData) -> CalculationData: pass @staticmethod @abstractmethod def prepare_ligand(data: CalculationData) -> CalculationData: pass @staticmethod @abstractmethod def run(data: CalculationData) -> Optional[Sequence[float]]: pass @staticmethod @abstractmethod def prepare_and_run(data: CalculationData) -> CalculationData: pass @staticmethod def validate_metadata(metadata: CalculationMetadata): return
26.066667
67
0.7289
70
782
8.071429
0.371429
0.184071
0.20531
0.19115
0.40885
0.237168
0.237168
0
0
0
0
0
0.207161
782
30
68
26.066667
0.91129
0
0
0.541667
0
0
0
0
0
0
0
0
0
1
0.208333
false
0.166667
0.166667
0.041667
0.458333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
662de715f7c172d0c8465ae237131b75eb01302c
66
py
Python
scripts/jsutils/__init__.py
mallarke/JSKit
de60ff1dbb37aa5973f85c6f95b99089f8c12571
[ "Apache-2.0" ]
null
null
null
scripts/jsutils/__init__.py
mallarke/JSKit
de60ff1dbb37aa5973f85c6f95b99089f8c12571
[ "Apache-2.0" ]
null
null
null
scripts/jsutils/__init__.py
mallarke/JSKit
de60ff1dbb37aa5973f85c6f95b99089f8c12571
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python from jsutils import * addPythonPath(__file__)
11
23
0.757576
8
66
5.75
1
0
0
0
0
0
0
0
0
0
0
0
0.121212
66
5
24
13.2
0.793103
0.242424
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
66464c220d3023d3ba4f1c3b5dacd83a7edb4969
68
py
Python
tkcode/__init__.py
whmsft/tkcode
4f1ddd37000b709cec856302451781f84c492e10
[ "MIT" ]
13
2021-06-21T20:28:01.000Z
2022-01-19T01:41:01.000Z
tkcode/__init__.py
whmsft/tkcode
4f1ddd37000b709cec856302451781f84c492e10
[ "MIT" ]
6
2021-06-22T18:30:12.000Z
2021-11-25T15:18:09.000Z
tkcode/__init__.py
whmsft/tkcode
4f1ddd37000b709cec856302451781f84c492e10
[ "MIT" ]
2
2021-08-05T13:56:13.000Z
2021-11-25T11:16:12.000Z
from .codeblock import CodeBlock from .codeeditor import CodeEditor
22.666667
34
0.852941
8
68
7.25
0.5
0
0
0
0
0
0
0
0
0
0
0
0.117647
68
2
35
34
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
b080ce36cf5f4b0d857110e8ed9eb3ad8b3ed0d0
104
py
Python
Clase_1/snippets/hint_q4.py
uncrayon/python-para-sistemas
fd5cf613996c02465780b3f0f058681e824f831a
[ "MIT" ]
null
null
null
Clase_1/snippets/hint_q4.py
uncrayon/python-para-sistemas
fd5cf613996c02465780b3f0f058681e824f831a
[ "MIT" ]
null
null
null
Clase_1/snippets/hint_q4.py
uncrayon/python-para-sistemas
fd5cf613996c02465780b3f0f058681e824f831a
[ "MIT" ]
null
null
null
# Recuerda la precedencia de operadores # También te recomiendo que veas de nuevo la división entera: //
52
64
0.788462
15
104
5.466667
0.866667
0
0
0
0
0
0
0
0
0
0
0
0.163462
104
2
64
52
0.942529
0.961538
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b09a76c9dc7d6d4397fd6a94b2ca033cc666f6cf
115
py
Python
ethical_framework_app/apps.py
Topflee/ethicalFramework_React
485063046decfb718797cdec9c983e94530b9783
[ "MIT" ]
null
null
null
ethical_framework_app/apps.py
Topflee/ethicalFramework_React
485063046decfb718797cdec9c983e94530b9783
[ "MIT" ]
null
null
null
ethical_framework_app/apps.py
Topflee/ethicalFramework_React
485063046decfb718797cdec9c983e94530b9783
[ "MIT" ]
null
null
null
from django.apps import AppConfig class EthicalFrameworkAppConfig(AppConfig): name = 'ethical_framework_app'
19.166667
43
0.808696
12
115
7.583333
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.130435
115
5
44
23
0.91
0
0
0
0
0
0.182609
0.182609
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b0b086a74f7686065c06ad4f1534feea0125f4b0
947
py
Python
luncho_python/test/test_luncho_api.py
HIRANO-Satoshi/luncho
caf41fc68e8c95130dcda386ebd0e61e4af3e698
[ "MIT" ]
1
2021-05-21T09:42:57.000Z
2021-05-21T09:42:57.000Z
luncho_python/test/test_luncho_api.py
HIRANO-Satoshi/luncho
caf41fc68e8c95130dcda386ebd0e61e4af3e698
[ "MIT" ]
25
2021-05-21T23:07:39.000Z
2022-03-02T11:19:15.000Z
luncho_python/test/test_luncho_api.py
HIRANO-Satoshi/luncho
caf41fc68e8c95130dcda386ebd0e61e4af3e698
[ "MIT" ]
null
null
null
""" Client library for Luncho API. Use luncho.ts and luncho.py rather than LunchoAPI.ts and others. # noqa: E501 The version of the OpenAPI document: 0.0.1 Generated by: https://openapi-generator.tech """ import unittest import luncho_python from luncho_python.api.luncho_api import LunchoApi # noqa: E501 class TestLunchoApi(unittest.TestCase): """LunchoApi unit test stubs""" def setUp(self): self.api = LunchoApi() # noqa: E501 def tearDown(self): pass def test_countries(self): """Test case for countries Countries # noqa: E501 """ pass def test_luncho_data(self): """Test case for luncho_data Lunchodata # noqa: E501 """ pass def test_luncho_datas(self): """Test case for luncho_datas Lunchodatas # noqa: E501 """ pass if __name__ == '__main__': unittest.main()
18.94
82
0.61246
114
947
4.929825
0.447368
0.085409
0.058719
0.080071
0.163701
0.088968
0
0
0
0
0
0.03139
0.293559
947
49
83
19.326531
0.80867
0.42661
0
0.25
1
0
0.017897
0
0
0
0
0
0
1
0.3125
false
0.25
0.1875
0
0.5625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
b0b15288c8c6b7e9d259ab1c7bc37dc5747f179d
113
py
Python
sameproject/version/commands.py
js-ts/fix-same-dataset-tests
d76091d1b7bac4d267caaf9b6e04dd255aef8053
[ "Apache-2.0" ]
null
null
null
sameproject/version/commands.py
js-ts/fix-same-dataset-tests
d76091d1b7bac4d267caaf9b6e04dd255aef8053
[ "Apache-2.0" ]
null
null
null
sameproject/version/commands.py
js-ts/fix-same-dataset-tests
d76091d1b7bac4d267caaf9b6e04dd255aef8053
[ "Apache-2.0" ]
null
null
null
import click @click.command() def version(): """Prints the versions for the CLI""" click.echo("0.0.1")
14.125
41
0.628319
17
113
4.176471
0.764706
0
0
0
0
0
0
0
0
0
0
0.032967
0.19469
113
7
42
16.142857
0.747253
0.274336
0
0
0
0
0.065789
0
0
0
0
0
0
1
0.25
true
0
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
b0df0bc9a015ca41c3f831fc3db0c169ed76132b
5,445
py
Python
benchmarks/benchmarks.py
alimanfoo/skallel-tensor
0019440b24de24141b046739a02ad587dc621748
[ "MIT" ]
2
2019-08-22T21:48:58.000Z
2020-02-17T15:44:23.000Z
benchmarks/benchmarks.py
alimanfoo/skallel-tensor
0019440b24de24141b046739a02ad587dc621748
[ "MIT" ]
9
2019-07-04T00:42:22.000Z
2019-10-01T18:41:13.000Z
benchmarks/benchmarks.py
alimanfoo/skallel-tensor
0019440b24de24141b046739a02ad587dc621748
[ "MIT" ]
1
2019-06-25T07:36:51.000Z
2019-06-25T07:36:51.000Z
import numpy as np import dask.array as da from numba import cuda import os from skallel_tensor import numpy_backend, dask_backend, cuda_backend cudasim = False if os.environ.get("NUMBA_ENABLE_CUDASIM", "0") == "1": cudasim = True class TimeGenotypes3D: """Timing benchmarks for genotypes 3D functions.""" def setup(self): self.data = np.random.randint(-1, 4, size=(20000, 1000, 2), dtype="i1") self.data_dask = da.from_array(self.data, chunks=(2000, 1000, 2)) if not cudasim: self.data_cuda = cuda.to_device(self.data) self.data_dask_cuda = self.data_dask.map_blocks(cuda.to_device) def time_locate_hom_numpy(self): numpy_backend.genotypes_3d_locate_hom(self.data) def time_locate_hom_dask(self): dask_backend.genotypes_3d_locate_hom(self.data_dask).compute() def time_locate_het_numpy(self): numpy_backend.genotypes_3d_locate_het(self.data) def time_locate_het_dask(self): dask_backend.genotypes_3d_locate_het(self.data_dask).compute() def time_locate_call_numpy(self): numpy_backend.genotypes_3d_locate_call( self.data, np.array([0, 1], dtype="i1") ) def time_locate_call_dask(self): dask_backend.genotypes_3d_locate_call( self.data_dask, call=np.array([0, 1], dtype="i1") ).compute() def time_count_alleles_numpy(self): numpy_backend.genotypes_3d_count_alleles(self.data, max_allele=3) def time_count_alleles_cuda(self): if not cudasim: cuda_backend.genotypes_3d_count_alleles( self.data_cuda, max_allele=3 ) cuda.synchronize() def time_count_alleles_dask(self): dask_backend.genotypes_3d_count_alleles( self.data_dask, max_allele=3 ).compute() def time_count_alleles_dask_cuda(self): if not cudasim: dask_backend.genotypes_3d_count_alleles( self.data_dask_cuda, max_allele=3 ).compute(scheduler="single-threaded") def time_to_called_allele_counts_numpy(self): numpy_backend.genotypes_3d_to_called_allele_counts(self.data) def time_to_called_allele_counts_dask(self): dask_backend.genotypes_3d_to_called_allele_counts( self.data_dask ).compute() def time_to_missing_allele_counts_numpy(self): numpy_backend.genotypes_3d_to_missing_allele_counts(self.data) def time_to_missing_allele_counts_dask(self): dask_backend.genotypes_3d_to_missing_allele_counts( self.data_dask ).compute() def time_to_allele_counts_numpy(self): numpy_backend.genotypes_3d_to_allele_counts(self.data, max_allele=3) def time_to_allele_counts_dask(self): dask_backend.genotypes_3d_to_allele_counts( self.data_dask, max_allele=3 ).compute() def time_to_allele_counts_melt_numpy(self): numpy_backend.genotypes_3d_to_allele_counts_melt( self.data, max_allele=3 ) def time_to_allele_counts_melt_dask(self): dask_backend.genotypes_3d_to_allele_counts_melt( self.data_dask, max_allele=3 ).compute() def time_to_major_allele_counts_numpy(self): numpy_backend.genotypes_3d_to_major_allele_counts( self.data, max_allele=3 ) def time_to_major_allele_counts_dask(self): dask_backend.genotypes_3d_to_major_allele_counts( self.data_dask, max_allele=3 ).compute() class TimeAlleleCounts2D: """Timing benchmarks for allele counts 2D functions.""" def setup(self): self.data = np.random.randint(0, 100, size=(10000000, 4), dtype="i4") self.data_dask = da.from_array(self.data, chunks=(100000, -1)) def time_to_frequencies_numpy(self): numpy_backend.allele_counts_2d_to_frequencies(self.data) def time_allelism_numpy(self): numpy_backend.allele_counts_2d_allelism(self.data) def time_max_allele_numpy(self): numpy_backend.allele_counts_2d_max_allele(self.data) def time_to_frequencies_dask(self): dask_backend.allele_counts_2d_to_frequencies(self.data_dask).compute() def time_allelism_dask(self): dask_backend.allele_counts_2d_allelism(self.data_dask).compute() def time_max_allele_dask(self): dask_backend.allele_counts_2d_max_allele(self.data_dask).compute() class TimeAlleleCounts3D: """Timing benchmarks for allele counts 3D functions.""" def setup(self): gt = np.random.randint(-1, 4, size=(10000, 1000, 2), dtype="i1") self.data = numpy_backend.genotypes_3d_to_allele_counts( gt, max_allele=3 ) self.data_dask = da.from_array(self.data, chunks=(1000, 200, -1)) def time_to_frequencies_numpy(self): numpy_backend.allele_counts_3d_to_frequencies(self.data) def time_to_frequencies_dask(self): dask_backend.allele_counts_3d_to_frequencies(self.data_dask).compute() def time_allelism_numpy(self): numpy_backend.allele_counts_3d_allelism(self.data) def time_allelism_dask(self): dask_backend.allele_counts_3d_allelism(self.data_dask).compute() def time_max_allele_numpy(self): numpy_backend.allele_counts_3d_max_allele(self.data) def time_max_allele_dask(self): dask_backend.allele_counts_3d_max_allele(self.data_dask).compute()
33.20122
79
0.705969
758
5,445
4.651715
0.109499
0.102099
0.071469
0.089336
0.836358
0.775666
0.718945
0.569767
0.538571
0.176687
0
0.028182
0.204959
5,445
163
80
33.404908
0.786325
0.02663
0
0.299145
0
0
0.008895
0
0
0
0
0
0
1
0.299145
false
0
0.042735
0
0.367521
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
9fdaffebf26f4d7093c0bd80841cdcf348c7c7a4
1,081
py
Python
parse/main/kanjiData.py
Tomazis/kioku
31fc433581dd63d9e973c48beb3232e99abb4ad8
[ "Apache-2.0" ]
null
null
null
parse/main/kanjiData.py
Tomazis/kioku
31fc433581dd63d9e973c48beb3232e99abb4ad8
[ "Apache-2.0" ]
null
null
null
parse/main/kanjiData.py
Tomazis/kioku
31fc433581dd63d9e973c48beb3232e99abb4ad8
[ "Apache-2.0" ]
null
null
null
from dataclasses import dataclass, field from typing import List @dataclass class Kanji: # __slots__ = ['name', 'primary', 'alternatives', 'onyomi', 'kunyomi', 'progress'] name: str = None primary: str = None alternatives: List[str] = field(default_factory=lambda: []) onyomi: List[str] = field(default_factory=lambda: []) kunyomi: List[str] = field(default_factory=lambda: []) progress: str = None level: int = None @dataclass class Sentence: # __slots__ = ['jap', 'eng'] jap: str eng: str @dataclass class Word: # __slots__ = ['name', 'primary', 'alternatives', 'reading', 'wordType', 'sentences', 'composition', 'progress'] name: str = None primary: str = None alternatives: List[str] = field(default_factory=lambda: []) reading: List[str] = field(default_factory=lambda: []) wordType: List[str] = field(default_factory=lambda: []) sentences: List[Sentence] = field(default_factory=lambda: []) composition: List[str] = field(default_factory=lambda: []) progress: str = None level: int = None
33.78125
116
0.6605
121
1,081
5.735537
0.256198
0.138329
0.21902
0.288184
0.530259
0.530259
0.391931
0.391931
0.391931
0.391931
0
0
0.189639
1,081
32
117
33.78125
0.792237
0.201665
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0
0.076923
0
0.884615
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
9fecf838b6dbc93f73afdac921c4570912ed1a04
74,738
py
Python
src/python2/sdp/model/wave/propagator.py
LeiShi/Synthetic-Diagnostics-Platform
870120d3fd14b2a3c89c6e6e85625d1e9109a2de
[ "BSD-3-Clause" ]
5
2019-08-16T22:08:19.000Z
2021-02-24T02:47:05.000Z
src/python2/sdp/model/wave/propagator.py
justthepython/Synthetic-Diagnostics-Platform
5f1cb5c29d182490acbd4f3c167f0e09ec211236
[ "BSD-3-Clause" ]
1
2016-05-11T12:58:00.000Z
2016-05-11T17:18:36.000Z
src/python2/sdp/model/wave/propagator.py
justthepython/Synthetic-Diagnostics-Platform
5f1cb5c29d182490acbd4f3c167f0e09ec211236
[ "BSD-3-Clause" ]
5
2018-04-29T12:35:59.000Z
2020-01-10T03:38:30.000Z
# -*- coding: utf-8 -*- """ Created on Tue Jan 26 15:20:15 2016 @author: lei Propagators for electromagnetic waves propagating in plasma """ from __future__ import print_function import sys from time import clock from abc import ABCMeta, abstractmethod, abstractproperty from math import cos import warnings import numpy as np from numpy.fft import fft, ifft, fftfreq from scipy.integrate import cumtrapz, quadrature, trapz from scipy.interpolate import interp1d from ...plasma.dielectensor import HotDielectric, Dielectric, \ ColdElectronColdIon, ResonanceError from ...plasma.profile import PlasmaProfile from ...settings.unitsystem import cgs class Propagator(object): __metaclass__ = ABCMeta @abstractmethod def propagate(self, omega, x_start, x_end, nx, E_start, Y1D, Z1D): pass @abstractproperty def power_flow(self): pass @property def properties(self): """Serializable data for transferring in parallel run""" return Propagator_property(self) class Propagator_property(object): def __init__(self, propagator): self.E = propagator.E self.eps0 = propagator.eps0 self.deps = propagator.deps self.dimension = propagator.dimension if(self.dimension == 1): self.x_coords = propagator.x_coords else: self.x_coords = propagator.calc_x_coords self.y_coords = propagator.y_coords self.z_coords = propagator.z_coords self.power_flow = propagator.power_flow class ParaxialPerpendicularPropagator1D(Propagator): r""" The paraxial propagator for perpendicular propagation of 1D waves. Initialization ************** ParaxialPerpendicularPropagator1D(self, plasma, dielectric_class, polarization, direction, unitsystem=cgs, tol=1e-14) :param plasma: plasma profile under study :type plasma: :py:class:`...plasma.PlasmaProfile.PlasmaProfile` object :param dielectric_class: dielectric tensor model used for calculating full epsilon :type dielectric_class: derived class from :py:class:`...plasma.DielectricTensor.Dielectric` :param polarization: specific polarization for cold plasma perpendicular waves. Either 'X' or 'O'. :type polarization: string, either 'X' or 'O' :param direction: propagation direction. 1 means propagating along positive x direction, -1 along negative x direction. :type direction: int, either 1 or -1. :param unitsystem: unit system used :type unitsystem: :py:class:`...settings.UnitSystem` object :param float tol: the tolerance for testing zero components and determining resonance and cutoff. Default to be 1e-14 :param int max_harmonic: highest order harmonic to keep. Only used in hot electron models. :param int max_power: highest power in lambda to keep. Only used in hot electron models. :raise AssertionError: if parameters passed in are not as expected. geometry ******** The usual coordinates system is used. z direction: The background magnetic field direction. Magnetic field is assumed no shear. x direction: The direction of the wave's main propagation. In Tokamak diagnostic case, it's usually very close to major radius direction. For near mid- plane diagnostics, it's also along the gradiant of density and temperature profile. y direction: The 3rd direction which perpendicular to both x and z. (x,y,z) should form a right-handed system Approximations ************** Paraxial approximation: wave propagates mainly along x direction. Refraction and diffraction effects are weak with in the region of interest 1D approximation: plasma perturbation is uniform in both y and z directions. Wave amplitude can be Fourier transformed along both of these directions. Method ******* Electromagnetic wave equation in plasma is solved under above approximations. WKB kind of solution is assumed, and it's phase and amplitude obtained by solving 0th and 1st order equations. The original equation is .. math:: -\nabla \times \nabla \times E + \frac{\omega^2}{c^2} \epsilon\cdot E=0 Using Paraxial approximation and the WKB solution [1]_: .. math:: E = E_0(x) \exp\left( \mathrm{i} \int\limits^x k(x')\mathrm{d}x'\right) :label: WKB_solution 1. The 0th order equation is then: .. math:: (\epsilon - n^2 {\bf I} + n^2\hat{x}\hat{x}) \cdot E = 0 where :math:`n \equiv ck/\omega` is the refractive index. Non-zero solution requires zero determinant of the tensor in front of E, this gives us the usual dispersion relation. There are two solutions of :math:`n`: .. math:: n^2 = \epsilon_{zz} \quad \text{(for O-mode)} n^2 = \frac{\epsilon_{yy}\epsilon_{xx}-\epsilon_{xy}\epsilon_{yx}} {\epsilon_{xx}} \quad \text{(for X-mode)} The sign of :math:`k` is then determined by direction of propagation. In our convention, :math:`k>0` means propagation along positive x, :math:`k<0` along negative x. The corresponding eigen-vectors are: .. math:: e_O = \begin{pmatrix} 0 \\ 0 \\ 1 \end{pmatrix} \;, \quad e_X =\frac{1}{\sqrt{|\epsilon_{xy}|^2+|\epsilon_{xx}|^2}} \begin{pmatrix} -\epsilon_{xy} \\ \epsilon_{xx} \\ 0 \end{pmatrix} 2. The 2nd order equation is: a. O-mode: .. math:: 2\mathrm{i}(kE_0' + \frac{1}{2}k'E_0) + \left( \frac{\partial^2} {\partial y^2}+ P \frac{\partial^2}{\partial z^2}\right) E_0 + \frac{\omega^2}{c^2}\delta \epsilon_{OO} E_0 = 0, where :math:`\delta \epsilon_{OO} \equiv e^*_{O} \cdot \delta \epsilon \cdot e_{O}` is the perturbed dielectric tensor element projected by O mode eigen vector. Since :math:`\delta\epsilon` does not depend on y and z, we can Fourier transform along y and z direction, and obtain the equation for :math:`\hat{E}_0(x, k_y, k_z)`: .. math:: 2\mathrm{i}(k\hat{E}_0' + \frac{1}{2}k'\hat{E}_0) - \left( k_y^2 + P k_z^2 \right) \hat{E}_0 + \frac{\omega^2}{c^2}\delta \epsilon_{OO} \hat{E}_0 = 0, b. X-mode: .. math:: 2\mathrm{i}\left[k\left(\frac{S}{(S^2+D^2)^{1/2}}E_0\right)' + \frac{1}{2}k'\left(\frac{S}{(S^2+D^2)^{1/2}}E_0\right)\right] + \left[ \frac{\partial^2}{\partial y^2} + \left( \frac{S^2+D^2}{S^2}- \frac{(S^2-D^2)D^2}{(S-P)S^2}\right) \frac{\partial^2}{\partial z^2}\right] E_0 + \frac{S^2+D^2}{S^2} \frac{\omega^2}{c^2}\delta \epsilon_{XX} E_0 = 0, Fourier transform along y, z directions, we have equation for :math:`\hat{E}_0`. .. math:: 2\mathrm{i}\left[k\left(\frac{S}{(S^2+D^2)^{1/2}}\hat{E}_0\right)' + \frac{1}{2}k'\left(\frac{S}{(S^2+D^2)^{1/2}}\hat{E}_0\right)\right] - \left[ k_y^2 + \left( \frac{S^2+D^2}{S^2}- \frac{(S^2-D^2)D^2}{(S-P)S^2}\right) k_z^2 \right] \hat{E}_0 + \frac{S^2+D^2}{S^2} \frac{\omega^2}{c^2}\delta \epsilon_{XX} \hat{E}_0 = 0, Letting :math:`F \equiv |k|^{1/2}\frac{S}{(S^2+D^2)^{1/2}}\hat{E}_0` we have .. math:: 2\mathrm{i}k F'(x, k_y, k_z) - \left[k_y^2 + C(x) k_z^2\right] F(x, k_y, k_z) + A(x) \frac{\omega^2}{c^2}\delta \epsilon_{OO/XX} F(x, k_y, k_z)= 0. A Formal solution to this equation is .. math:: F(x, k_y, k_z) =\exp\left( \mathrm{i} \int\limits_0^x \frac{1}{2k(x')}\left(A(x')\frac{\omega^2}{c^2}\delta \epsilon (x') - k_y^2 - C(x') k_z^2 \right) \mathrm{d}x'\right) F(0) :label: 2nd_order_solution where :math:`A(x')=1, C(x') = P` for O-mode, :math:`A(x')=\frac{S^2+D^2}{S^2}, C(x')=\frac{S^2+D^2}{S^2}- \frac{(S^2-D^2)D^2}{(S-P)S^2}` for X-mode. Equation :eq:`WKB_solution` and :eq:`2nd_order_solution` gives us the whole solution up to the 2nd order. 3. corrections to finite kz Since the paraxial approximation is only accurate up to :math:`o((k_z/k_0)^2)`. If :math:`k_z > k_0/10`, the error can be at a level of 1%. Since we want to extend the validity of our paraxial model into the regimes where :math:`k_z` is reasonably low, but finite, we need to find a way to remedy this error. We will give a warning when marginal kz is beyond :math:`k_0/10` to let users aware of this potential lose of accuracy. Another method, mainly concerning the decay of the wave field, is to correct the decay step by taking into account the propagation direction of the main ray. .. math:: ds = \frac{dx}{\cos \theta_h \cos \theta_v} where :math:`\theta_h` and :math:`\theta_v` are tilted angles in horizontal and vertical directions of the antenna respectively. When propagating the wave, pure phase part will still be advancing in :math:`dx`, while decay part will use :math:`ds`. Numerical Scheme **************** We need to numerically evaluate the phase advance for electric field with each k_y,k_z value, then we inverse Fourier transform it back to y,z space. Phase term includes two parts: 1. main phase :math:`k_0`. This part is from 0-th order equation, and the solution is the normal dispersion relation: O-Mode: .. math:: \frac{c^2k_0^2}{\omega^2} = \epsilon_{zz} X-Mode: .. math:: \frac{c^2k_0^2}{\omega^2} = \frac{\epsilon_{yy} \epsilon_{xx} - \epsilon_{xy}*\epsilon_{yx}}{\epsilon_{xx}} 2. 2nd order phase correction. This part is in 2nd order solution, and will be retained by solving for :math:`F(x)` using :eq:`2nd_order_solution`. So, two integrations over ``x`` will be evaluated numerically. Trapezoidal integration is used to have 2nd order accurancy in ``dx``. References ********** .. [1] WKB Approximation on Wikipedia. https://en.wikipedia.org/wiki/WKB_approximation """ def __init__(self, plasma, dielectric_class, polarization, direction, base_dielectric_class=ColdElectronColdIon, unitsystem=cgs, tol=1e-14, max_harmonic=4, max_power=4, mute=False): assert isinstance(plasma, PlasmaProfile) assert issubclass(dielectric_class, Dielectric) assert polarization in ['X','O'] assert direction in [1, -1] self.main_dielectric = base_dielectric_class(plasma) if issubclass(dielectric_class, HotDielectric): self.fluc_dielectric = dielectric_class(plasma, max_harmonic=max_harmonic, max_power=max_power) else: self.fluc_dielectric = dielectric_class(plasma) self.polarization = polarization self.direction = direction self.tol = tol self.unit_system = unitsystem self.dimension = 1 if not mute: print('Propagator 1D initialized.', file=sys.stdout) def _SDP(self, omega): # Prepare the cold plasma dielectric components x_fine = self.main_dielectric.plasma.grid.X1D eps0_fine = self.main_dielectric.epsilon([x_fine], omega, True) S = np.real(eps0_fine[0,0]) D = np.imag(eps0_fine[1,0]) P = np.real(eps0_fine[2,2]) self._S = interp1d(x_fine, S) self._D = interp1d(x_fine, D) self._P = interp1d(x_fine, P) def _k0(self, x): """ evaluate main wave vector at specified x locations This function is mainly used to carry out the main phase integral with increased accuracy. """ c = cgs['c'] if self.polarization == 'O': try: n2 = self._P(x) except ValueError as e: print('x out of boundary. Please provide a plasma Profile \ containing larger plasma area.') raise e else: try: S = self._S(x) D = self._D(x) except ValueError as e: print('x out of boundary. Please provide a plasma Profile \ containing larger plasma area.', file=sys.stderr) raise e try: n2 = (S*S - D*D)/S except ZeroDivisionError as e: raise ResonanceError('Cold X-mode resonance encountered. \ Paraxial Propagators can not handle this situation properly. Please try to \ avoid this.') if np.any( n2 <= 0): raise ResonanceError('Cold cutoff encountered. Paraxial \ Propagators can not handle this situation properly. Please try to avoid this.') return self.direction * np.sqrt(n2)*self.omega/c def _generate_main_phase(self, mute=True): r""" Integrate k_0 along x, and return the phase at self.x_coordinates """ tstart = clock() try: omega = self.omega self._SDP(omega) self.main_phase = np.empty_like(self.x_coords) self._main_phase_err = np.empty_like(self.x_coords) # Initial phase is set to 0 self.main_phase[0] = 0 self._main_phase_err[0] = 0 # The rest of the phases are numerically integrated over k_0 for i, xi in enumerate(self.x_coords[:-1]): xi_n = self.x_coords[i+1] self.main_phase[i+1], self._main_phase_err[i+1] = \ quadrature(self._k0, xi, xi_n) self.main_phase[i+1] += self.main_phase[i] self._main_phase_err[i+1] += self._main_phase_err[i] except AttributeError as e: print('Main phase function can only be called AFTER propagate \ function is called.', file=sys.stderr) raise e tend = clock() if not mute: print('Main phase generated. Time used: {:.3}'.format(tend-tstart)) def _generate_epsilon0(self, mute=True): r"""Generate main dielectric :math:`\epsilon_0` along the ray The main ray is assumed along x direction. Main dielectric tensor uses Cold Electron Cold Ion model Needs Attribute: self.omega self.x_coords self.main_dielectric Create Attribute: self.eps0 """ tstart = clock() omega = self.omega x_coords = self.x_coords self.eps0 = self.main_dielectric.epsilon([x_coords], omega, True) tend = clock() if not mute: print('Epsilon0 generated. Time used: {:.3}s'.format(tend-tstart), file=sys.stdout) def _generate_k(self, mask_order=4, mute=True): """Calculate k_0 along the reference ray path :param mask_order: the decay order where kz will be cut off. If |E_k| peaks at k0, then we pick the range (k0-dk, k0+dk) to use in calculating delta_epsilon. dk is determined by the standard deviation of |E_k| times the mask_order. i.e. the masked out part have |E_k| less than exp(-mask_order**2/2)*|E_k,max|. """ tstart = clock() omega = self.omega c=self.unit_system['c'] eps0 = self.eps0 if self.polarization == 'O': P = np.real(eps0[2,2,:]) if np.any(P < self.tol): raise ResonanceError('Cutoff of O mode occurs. Paraxial \ propagator is not appropriate in this case. Use full wave solver instead.') self.k_0 = self.direction*omega/c * np.sqrt(P) else: S = np.real(eps0[0,0,:]) D = np.imag(eps0[1,0,:]) numerator = S*S - D*D if np.any(S < self.tol): raise ResonanceError('Cold Resonance of X mode occurs. Change \ to full wave solver with Relativistic Dielectric Tensor to overcome this.') if np.any(numerator < self.tol): raise ResonanceError('Cutoff of X mode occrus. Use full wave \ solver instead of paraxial solver.') self.k_0 = self.direction*omega/c * np.sqrt(numerator/S) # generate wave vector arrays # Fourier transform E along y and z self.E_k_start = np.fft.fft2(self.E_start) self.nz = len(self.z_coords) self.dz = self.z_coords[1] - self.z_coords[0] self.kz = 2*np.pi*np.fft.fftfreq(self.nz, self.dz) self.ny = len(self.y_coords) self.dy = self.y_coords[1] - self.y_coords[0] self.ky = 2*np.pi*np.fft.fftfreq(self.ny, self.dy) # we need to mask kz in order to avoid non-physical zero k_parallel # components # find out the peak location marg = np.argmax(np.abs(self.E_k_start)) # find the y index of the peak myarg = marg % self.ny Ekmax = np.max(np.abs(self.E_k_start)) E_margin = Ekmax*np.exp(-mask_order**2/2.) # create the mask for components greater than our marginal E, they will # be considered as significant mask = np.abs(self.E_k_start[:,myarg]) > E_margin self.central_kz_idx = marg // self.ny self.central_kz = self.kz[self.central_kz_idx] # choose the largest kz in kept part as the marginal kz kz_margin = np.max(np.abs(self.kz[mask])) self.delta_kz = kz_margin - self.central_kz if not self._optimize_z: # No kz optimization, all kz fields will be propagated. But with a # filtered value to avoid false alarm of kz too small. # fill all outside kz with the marginal kz self.masked_kz = np.copy(self.kz) self.masked_kz[~mask] = kz_margin else: # with kz optimization, E_k_start and kz arrays will shunk to the # minimum size contatining only the significant components of # wave-vectors. They will be restored back into spatial space after # propagation. self._mask_z = mask # keep a record of the original E_k_start, for restoration after # propagation self._E_k_origin = self.E_k_start self._nz_origin = self.nz self.E_k_start = self.E_k_start[mask, :] self.masked_kz = self.kz[mask] self.nz = self.masked_kz.shape[0] tend = clock() if not mute: print('k0, ky, kz generated. Time used: {:.3}s'.format\ (tend-tstart), file=sys.stdout) def _generate_delta_epsilon(self, mute=True): r"""Generate fluctuated dielectric :math:`\delta\epsilon` on full mesh Fluctuated dielectric tensor may use any dielectric model. Needs Attribute:: self.omega self.x_coords self.k_0 self.kz self.eps0 Create Attribute:: self.deps """ tstart = clock() omega = self.omega x_coords = self.x_coords k_perp = self.k_0 k_para = self.masked_kz time = self.time self.deps = np.empty((3,3,len(k_para),len(x_coords)),dtype='complex') for i,x in enumerate(x_coords): self.deps[... ,i] = self.fluc_dielectric.epsilon([x],omega, k_para, k_perp[i], self.eq_only, time)-\ self.eps0[:,:,np.newaxis,i] # add one dimension for ky, between kz, and spatial coordinates. self.deps = self.deps[..., np.newaxis, :] tend = clock() if not mute: print('Delta_epsilon generated.Time used: {:.3}s'.format(tend-\ tstart), file=sys.stdout) def _generate_eOX(self, mute=True): """Create unit polarization vectors along the ray """ tstart = clock() if self.polarization == 'O': self.e_x = 0 self.e_y = 0 self.e_z = 1 else: exx = self.eps0[0, 0, :] # eyy = self.eps0[1, 1, :] exy = self.eps0[0, 1, :] # eyx = self.eps0[1, 0, :] exy_mod = np.abs(exy) exx_mod = np.abs(exx) norm = 1/np.sqrt(exy_mod*exy_mod + exx_mod*exx_mod) self.e_x = -exy * norm self.e_y = exx * norm self.e_z = 0 tend = clock() if not mute: print('Polarization eigen-vector generated. Time used: {:.3}s'.\ format(tend-tstart), file=sys.stdout) def _generate_F(self, mute=True): """integrate the phase term to get F. Note: F=k^(1/2) E """ tstart = clock() ny=self.ny nz=self.nz ky = self.ky[np.newaxis, :, np.newaxis] kz = self.masked_kz[:, np.newaxis, np.newaxis] omega2 = self.omega*self.omega c = self.unit_system['c'] c2 = c*c S = np.real(self.eps0[0,0]) D = np.imag(self.eps0[1,0]) P = np.real(self.eps0[2,2]) if self.polarization == 'O': de_O = self.deps[2, 2, ... ] F_k0 = self.E_k_start * np.sqrt(np.abs(self.k_0[0])) if(self._debug): self.dphi_eps = cumtrapz(omega2/c2*de_O/(2*self.k_0), x=self.x_coords, initial=0) self.dphi_ky = cumtrapz(-ky*ky/(2*self.k_0), x=self.x_coords, initial=0) self.dphi_kz = cumtrapz(-P*kz*kz/(2*self.k_0), x=self.x_coords, initial=0) self.delta_phase = self.dphi_eps + self.dphi_ky + self.dphi_kz else: self.delta_phase = cumtrapz((omega2/c2*de_O-ky*ky- \ P*kz*kz)/(2*self.k_0), x=self.x_coords, initial=0) self.E_k0 = np.exp(1j*self.delta_phase)*F_k0[..., np.newaxis] /\ np.sqrt(np.abs(self.k_0)) else: dexx = self.deps[0, 0, ...] dexy = self.deps[0, 1, ...] deyx = self.deps[1, 0, ...] deyy = self.deps[1, 1, ...] S2 = S*S D2 = D*D # vacuum case needs special attention. C coefficient has a 0/0 part # the limit gives C=1, which is correct for vacuum. vacuum_idx = np.abs(D) < self.tol non_vacuum = np.logical_not(vacuum_idx) C = np.empty_like(self.x_coords) C[vacuum_idx] = 1 C[non_vacuum] = (S2+D2)/S2 - (S2-D2)*D2/(S2*(S-P)) ex = self.e_x ey = self.e_y ex_conj = np.conj(ex) ey_conj = np.conj(ey) ey_mod = np.sqrt(ey*ey_conj) de_X = ex_conj*dexx*ex + ex_conj*dexy*ey + ey_conj*deyx*ex + \ ey_conj*deyy*ey de_X = de_X * np.ones((nz,ny,1)) F_k0 =self.E_k_start * np.sqrt(np.abs(self.k_0[0])) * ey_mod[0] if self._oblique_correction: oblique_coeff = np.abs(cos(self.tilt_h)*cos(self.tilt_v)) else: oblique_coeff = 1 if(self._debug): self.pe = (S2+D2)/S2* omega2/c2 *de_X /\ (2*self.k_0) # decay step size needs to be corrected for finite tilted angle self.dphi_eps = cumtrapz(np.real(self.pe), x=self.x_coords, initial=0) + \ 1j*cumtrapz(np.imag(self.pe), x=self.x_coords, initial=0) / oblique_coeff self.dphi_ky = cumtrapz(-ky*ky/(2*self.k_0), x=self.x_coords, initial=0) self.dphi_kz = cumtrapz(-C*kz*kz/(2*self.k_0), x=self.x_coords, initial=0) self.delta_phase = self.dphi_eps + self.dphi_ky + self.dphi_kz else: self.delta_phase = cumtrapz(((S2+D2)/S2* omega2/c2 *\ np.real(de_X) -\ ky*ky-C*kz*kz)/(2*self.k_0), x=self.x_coords, initial=0) +\ 1j*cumtrapz(((S2+D2)/S2* omega2/c2 *np.imag(de_X)),\ x=self.x_coords, initial=0) / oblique_coeff self.E_k0 = np.exp(1j*self.delta_phase)*F_k0[..., np.newaxis] / \ np.sqrt(np.abs(self.k_0)) / ey_mod tend = clock() if not mute: print('F function calculated. Time used: {:.3}s'.format\ (tend-tstart), file=sys.stdout) def _generate_E(self, mute=True): """Calculate the total E including the main phase advance """ tstart = clock() if self._include_main_phase: self._generate_main_phase(mute=mute) self.E_k = self.E_k0 * np.exp(1j*self.main_phase) else: self.E_k = self.E_k0 if self._optimize_z: # restore to the original shape in z self.nz = self._nz_origin self._Ek_calc = self.E_k self.E_k = np.zeros((self.nz, self.ny, self.nx), dtype='complex') self.E_k[self._mask_z] = self._Ek_calc if self._keepFFTz: self.E = self.E_k else: self.E = np.fft.ifft2(self.E_k, axes=(0,1)) tend = clock() if not mute: print('E field calculated. Time used: {:.3}s'.format(tend-tstart), file=sys.stdout) def propagate(self, omega, x_start, x_end, nx, E_start, y_E, z_E, x_coords=None, time=None, tilt_v=0, tilt_h=0, mute=True, debug_mode=False, include_main_phase=False, keepFFTz=False, normalize_E=False, kz_mask_order=4, oblique_correction=True, tolrel=1e-3, optimize_z=True): r"""propagate(self, omega, x_start, x_end, nx, E_start, y_E, z_E, x_coords=None, regular_E_mesh=True, time=None) Propagate electric field from x_start to x_end The propagation is assumed mainly in x direction. The (ray_y,ray_z) is the (y,z) coordinates of the reference ray path, on which the equilibrium dielectric tensor is taken to be :math:`\epsilon_0`. See :py:class:`ParaxialPerpendicularPropagator1D` for detailed description of the method and assumptions. :param float omega: angular frequency of the wave, omega must be positive. :param E_start: complex amplitude of the electric field at x_start, :type E_start: ndarray of complex with shape (nz, ny), :param float x_start: starting point for propagation :param float x_end: end point for propagation :param int nx: number of intermediate steps to use for propagation :param y_E: y coordinates of the E_start mesh, uniformly placed :type y_E: 1D array of float :param z_E: z coordinates of E_start mesh, uniformly placed :type z_E: 1D array of float :param x_coords: *Optional*, x coordinates to use for propagation, if given, *x_start*, *x_end*, and *nx* are ignored. :type x_coords: 1d array of float. Must be monotonic. :param int time: chosen time step of perturbation in plasma. If None, only equilibrium plasma is used. :param float tilt_v: tilted angle of the main ray in vertical direction , in radian. Positive means tilted upwards. :param float tilt_h: tilted angle of the main ray in horizontal direction, in radian. Positive means tilted towards positive Z direction. :param bool mute: if True, no intermediate outputs for progress. :param bool debug_mode: if True, additional detailed information will be saved for later inspection. :param bool include_main_phase: if True, the calculated E field will have contribution from eikonal phase term :math:`\exp(i\int k_0 dx)`. Default to be False. :param bool keepFFTz: if True, result E field won't take inverse fft in z direction, thus still represent components in kz space. Default is False. :param bool normalize_E: if True, maximum incidental E field will be normalized to 1 before propagation, and be rescaled back afterwards. This may be good for extreme amplitude incidental waves. Default is False. :param kz_mask_order: mask order to pass into _generate_k. After taking FFT on E0, a Gaussian-like intensity is expected in kz space. In order to avoid numerical difficulties around high kz and/or zero kz, we mask out kz components that are very small compared to the central kz component. kz_mask_order controls how small components are cut off. In unit of standard deviation, e.g. kz_mask_order=4 means kzs farther than 4 standard deviation away from central kz will be masked out . Default is 4, which means kzs where E(kz) < 3e-4 Emax will be ignored. :param oblique_correction: if True, correction to oblique incident wave will be added. The decay part will have :math:`\cos(\theta_h)\cos(\theta_v)` term. Default is True. :type oblique_correction: bool :param bool optimize_z: if True, an optimization in Z direction will be applied. A filter in kz space will be set, wave vectors outside a certain margin from the central wave vector will be masked out, and won't propagate. In oblique cases, this optimization may provide a maximum 10 times speed boost. Default is True. """ tstart = clock() assert omega > 0 assert E_start.ndim==2, 'Initial E field must be specified on a Z-Y \ plane' assert E_start.shape[1] == y_E.shape[0] assert E_start.shape[0] == z_E.shape[0] if time is None: self.eq_only = True self.time=None else: self.eq_only = False self.time = time self._debug = debug_mode self._include_main_phase = include_main_phase self._keepFFTz = keepFFTz self._normalize_E = normalize_E self._oblique_correction = oblique_correction self._optimize_z = optimize_z self.omega = omega self.tilt_v = tilt_v self.tilt_h = tilt_h if (abs(cos(tilt_v)*cos(tilt_h)-1) > tolrel): if self._oblique_correction: warnings.warn('Tilted angle beyond relative error tolerance! \ {0:.3}, The phase of the result won\'t be as accurate as expected. However, \ the decay of the wave is corrected.'.format(tolrel)) else: warnings.warn('Tilted angle beyond relative error tolerance \ {0:.3}! The phase and amplitude of the result won\'t be as accurate as \ expected.'.format(tolrel)) if self._normalize_E: self.E_norm = np.max(np.abs(E_start)) self.E_start = E_start/self.E_norm else: self.E_start = E_start self.y_coords = np.copy(y_E) self.z_coords = np.copy(z_E) if (x_coords is None): self.x_coords = np.linspace(x_start, x_end, nx+1) else: self.x_coords = x_coords self.nx = len(self.x_coords) self._generate_epsilon0(mute=mute) self._generate_k(mute=mute, mask_order=kz_mask_order) self._generate_delta_epsilon(mute=mute) self._generate_eOX(mute=mute) self._generate_F(mute=mute) self._generate_E(mute=mute) if self._normalize_E: self.E *= self.E_norm tend = clock() if not mute: print('1D Propogation Finish! Check the returned E field. More \ infomation is available in Propagator object.\nTotal Time used: {:.3}s\n'.\ format(tend-tstart), file=sys.stdout) return self.E @property def power_flow(self): """Calculates the total power flow going through y-z plane. Normalized with the local velocity, so the value should be conserved in lossless plasma region. """ E2 = np.real(np.conj(self.E) * self.E) c = cgs['c'] E2_integrate_z = trapz(E2, x=self.z_coords, axis=0) E2_integrate_yz = trapz(E2_integrate_z,x=self.y_coords, axis=0) power_norm = c/(8*np.pi)*E2_integrate_yz * (c*self.k_0/self.omega) *\ (self.e_y*np.conj(self.e_y) + self.e_z*np.conj(self.e_z)) return power_norm class ParaxialPerpendicularPropagator2D(Propagator): r""" The paraxial propagator for perpendicular propagation of 2D waves. 1. Initialization :param plasma: plasma profile under study :type plasma: :py:class:`...plasma.PlasmaProfile.PlasmaProfile` object :param dielectric_class: dielectric tensor model used for calculating full epsilon :type dielectric_class: derived class from :py:class:`...plasma.DielectricTensor.Dielectric` :param polarization: specific polarization for cold plasma perpendicular waves. Either 'X' or 'O'. :type polarization: string, either 'X' or 'O' :param direction: propagation direction. 1 means propagating along positive x direction, -1 along negative x direction. :type direction: int, either 1 or -1. :param float ray_y: y coordinate of central ray. :param unitsystem: Unit System to be used. Optional, for now, only cgs is supported. :param float tol: the tolerance for testing zero components and determining resonance and cutoff. Default to be 1e-14 :param int max_harmonic: highest order harmonic to keep. Only used in hot electron models. :param int max_power: highest power in lambda to keep. Only used in hot electron models. :raise AssertionError: if parameters passed in are not as expected. 2. geometry The usual coordinates system is used. z direction: The background magnetic field direction. Magnetic field is assumed no shear. x direction: The direction of the wave's main propagation. In Tokamak diagnostic case, it's usually very close to major radius direction. For near mid- plane diagnostics, it's also along the gradiant of density and temperature profile. y direction: The 3rd direction which perpendicular to both x and z. (x,y,z) should form a right-handed system 3. Approximations Paraxial approximation: wave propagates mainly along x direction. Refraction and diffraction effects are weak with in the region of interest 2D approximation: Plasma perturbations are assumed uniform along magnetic field lines, so the perturbed dielectric tensor is not a function of z. So we can Fourier transform the wave amplitude in z direction and analyze each k_parallel component separately. 4. Ordering We assume the length scales in the problem obey the following ordering: .. math:: \frac{\lambda}{E}\frac{\partial E}{\partial y} \sim \delta .. math:: \frac{\delta\epsilon}{\epsilon_0} \sim \delta^2 where :math:`\epsilon_0` is chosen to be the equilibrium dielectric tensor along main light path, normally use Cold or Warm formulation, and :math:`\delta\epsilon` the deviation of full dielectric tensor from :math:`\epsilon_0` due to fluctuations, away from main light path, and/or relativistic kinetic effects. 5. Method Electromagnetic wave equation in plasma is solved under above approximations. WKB kind of solution is assumed, and it's phase and amplitude obtained by solving 0th and 2nd order equations. The original equation is .. math:: -\nabla \times \nabla \times E + \frac{\omega^2}{c^2} \epsilon\cdot E=0 Using Paraxial approximation and the WKB solution [1]_: .. math:: E = E_0(x,y,z) \exp\left( \mathrm{i} \int\limits^x k(x')\mathrm{d}x' \right) :label: WKB_solution a. The 0th order equation .. math:: (\epsilon_0 - n^2 {\bf I} + n^2\hat{x}\hat{x}) \cdot E = 0 where :math:`n \equiv ck/\omega` is the refractive index. Non-zero solution requires zero determinant of the tensor in front of E, this gives us the usual dispersion relation. There are two solutions of :math:`n`: .. math:: n^2 = \epsilon_{zz} \quad \text{(for O-mode)} n^2 = \frac{\epsilon_{yy}\epsilon_{xx}-\epsilon_{xy}\epsilon_{yx}} {\epsilon_{xx}} \quad \text{(for X-mode)} The corresponding eigen-vectors are: .. math:: e_O = \begin{pmatrix} 0 \\ 0 \\ 1 \end{pmatrix} \;, \quad e_X =\frac{1}{\sqrt{|\epsilon_{xy}|^2+|\epsilon_{xx}|^2}} \begin{pmatrix} -\epsilon_{xy} \\ \epsilon_{xx} \\ 0 \end{pmatrix} *The 1st order equation is natually satisfied.* b. The 2nd order equation 2nd order equations are different for O-mode and X-mode (i) O-mode .. math:: 2\mathrm{i}(kE_0' + \frac{1}{2}k'E_0) + \frac{\partial^2 E_0}{\partial y^2} + P\frac{\partial^2 E_0}{\partial z^2} + \frac{\omega^2}{c^2}e_O^* \cdot \delta\epsilon \cdot e_O E_0 = 0. Letting :math:`F \equiv k^{1/2}E_0`, we have .. math:: 2\mathrm{i}k \frac{\partial F(x,y,k_z)}{\partial x} + \frac{\partial^2}{\partial y^2} F(x,y,k_z) - P k_z^2 F(x,y,k_z) +\frac{\omega^2}{c^2}\delta \epsilon_{OO} F(x,y,k_z) = 0, where :math:`\delta\epsilon_{OO} \equiv e_O^* \cdot\delta\epsilon\cdot e_O = \delta \epsilon_{zz}`, and :math:`P \equiv \epsilon_{0,zz}`. (ii) X-mode .. math:: 2\mathrm{i}\left[k\left(\frac{S}{(S^2+D^2)^{1/2}}E_0\right)' + \frac{1}{2}k'\left(\frac{S}{(S^2+D^2)^{1/2}}E_0\right)\right] + \left[ \frac{\partial^2}{\partial y^2} + \left( \frac{S^2+D^2}{S^2}- \frac{(S^2-D^2)D^2}{(S-P)S^2}\right) \frac{\partial^2}{\partial z^2}\right] E_0 + \frac{S^2+D^2}{S^2} \frac{\omega^2}{c^2}\delta \epsilon_{XX} E_0 = 0, Letting :math:`F \equiv k^{1/2}\frac{S}{(S^2+D^2)^{1/2}} E_0`, and Fourier transform along z direction, we have .. math:: 2\mathrm{i}k F'(x, y, k_z) + \frac{\partial^2}{\partial y^2}F(x,y,k_z) -\left( \frac{S^2+D^2}{S^2}- \frac{(S^2-D^2)D^2}{(S-P)S^2}\right) k_z^2 F(x, y, k_z) + \frac{S^2+D^2}{S^2} \frac{\omega^2}{c^2}\delta \epsilon_{XX} F(x, y, k_z)= 0. where :math:`S \equiv \epsilon_{0,xx}` and :math:`D \equiv \mathrm{i} \epsilon_{0,xy}` are notations adopted from Cold Plasma Dielectric tensor, and :math:`\delta \epsilon_{XX} \equiv e_X^* \cdot \delta \epsilon \cdot e_X` is tensor element projected on X-mode eigen-vector. The O-mod and X-mode equations need to be solved numerically because they contain partial derivatives respect to y, and dielectric tensor depends on y. The scheme is described in the next section. c. corrections to finite kz Since the paraxial approximation is only accurate up to :math:`o((k_z/k_0)^2)`. If :math:`k_z > k_0/10`, the error can be at a level of 1%. Since we want to extend the validity of our paraxial model into the regimes where :math:`k_z` is reasonably low, but finite, we need to find a way to remedy this error. We will give a warning when marginal kz is beyond :math:`k_0/10` to let users aware of this potential lose of accuracy. Another method, mainly concerning the decay of the wave field, is to correct the decay step by taking into account the propagation direction of the main ray. .. math:: ds = \frac{dx}{\cos \theta_h \cos \theta_v} where :math:`\theta_h` and :math:`\theta_v` are tilted angles in horizontal and vertical directions of the antenna respectively. When propagating the wave, pure phase part will still be advancing in :math:`dx`, while decay part will use :math:`ds`. 6. Numerical Scheme The full solution includes a main phase part and an amplitude part. a. Main phase As in 1D case, the main phase is integration of :math:`k_0` over x. :math:`k_0` is obtained through dispersion relation which is the solvability condition for 0th order equation. O-mode: .. math:: k_0^2 = \frac{\omega^2}{c^2} \epsilon_{0,zz} X-mode: .. math:: k_0^2 = \frac{\omega^2}{c^2}\frac{\epsilon_{0,yy} \epsilon_{0,xx} - \epsilon_{0,xy}\epsilon_{0,yx}}{\epsilon_{0,xx}} The sign of :math:`k_0` is determined by direction of the propagation. b. Amplitude The amplitude equation is more complicated than that in 1D, because now perturbed dielectric tensor depends on y, we can no longer Fourier transform in y direction. The equation now has a general form of .. math:: 2\mathrm{i}k \frac{\partial F}{\partial x} + \frac{\partial^2 F}{\partial y^2} + C(y) F = 0, We notice that :math:`B\equiv \partial^2/\partial y^2` operator does not commute with :math:`C(y)`, so there is not a single eigen state :math:`F` for both operators. A numerical technique to solve this equation is that we propagate F along x with very small steps. Within each step, we propagate operator :math:`B` and :math:`C` separately, so we can use their own eigen state in their substeps. The scheme is like .. math:: F(x+\delta x, y, k_z) = \exp\left( \frac{\mathrm{i}}{2k} \frac{C\delta x}{2} \right) \cdot \exp \left(\frac{\mathrm{i}}{2k} B \delta x\right) \cdot \exp \left( \frac{\mathrm{i}}{2k} \frac{C\delta x}{2} \right) F(x), We can show that this scheme evolves the phase with an accuracy of :math:`o(\delta x^2)`. Since original equation is an order one differential equation in x, Magnus expansion theorum [2]_ tells us the exact solution to the equation goes like .. math:: F(x') = \exp(\Omega_1 + \Omega_2 + ...)F(x). where .. math:: \Omega_1 = \int\limits_x^{x'} A(x_1) dx_1 .. math:: \Omega_2 = \int\limits_x^{x'}\int\limits_{x}^{x_1} [A(x_1),A(x_2)] dx_1 dx_2 and .. math:: A = \frac{i}{2k(x)} (B+C(x)) .. math:: [A(x_1), A(x_2)] &= A(x_1)A(x_2) - A(x_2)A(x_1) \\ &= -\frac{1}{4k^2} ([B, C(x_2)]-[B, C(x_1)]) if we only propagate x for a small step :math:`\delta x`, we can see that :math:`\Omega_1 \sim \delta x`, but :math:`\Omega_2 \sim \delta x^3`. We write .. math:: F(x+\delta x) &= \exp( A(x_1) \delta x + o(\delta x^3)) F(x) \\ &= \exp\left( \frac{i\delta x}{2k}(B+C) + o(\delta x^3)\right) F(x). Then using Baker-Campbell-Housdorff formula [3]_, we can show: .. math:: \exp\left( \frac{\mathrm{i}}{2k} \frac{C\delta x}{2} \right) \cdot \exp \left(\frac{\mathrm{i}}{2k} B \delta x\right) \cdot \exp \left( \frac{\mathrm{i}}{2k} \frac{C\delta x}{2} \right) = \exp\left( \frac{i\delta x}{2k}(B+C) + o(\delta x^3)\right) So, finally, we show that our scheme gives a :math:`F(x+\delta x)` with a phase error of :math:`o(\delta x^3)`. Since the total step goes as :math:`1/\delta x`, we finally get a :math:`F(x)` with phase error :math:`\sim o(\delta x^2)`. 7. References .. [1] WKB Approximation on Wikipedia. https://en.wikipedia.org/wiki/WKB_approximation .. [2] https://en.wikipedia.org/wiki/Magnus_expansion .. [3] https://en.wikipedia.org/wiki/ Baker-Campbell-Hausdorff_formula """ def __init__(self, plasma, dielectric_class, polarization, direction, ray_y, unitsystem=cgs, base_dielectric_class=ColdElectronColdIon, tol=1e-14, max_harmonic=4, max_power=4, mute=False): assert isinstance(plasma, PlasmaProfile) assert issubclass(dielectric_class, Dielectric) assert polarization in ['X','O'] assert direction in [1, -1] self.main_dielectric = base_dielectric_class(plasma) self.ray_y = ray_y if issubclass(dielectric_class, HotDielectric): self.fluc_dielectric = dielectric_class(plasma, max_harmonic=max_harmonic, max_power=max_power) else: self.fluc_dielectric = dielectric_class(plasma) self.polarization = polarization self.direction = direction self.tol = tol self.unit_system = unitsystem self.dimension = 2 if not mute: print('Propagator 2D initialized.', file=sys.stdout) def _SDP(self, omega): # Prepare the cold plasma dielectric components x_fine = self.main_dielectric.plasma.grid.R1D y_fine = self.ray_y + np.zeros_like(x_fine) eps0_fine = self.main_dielectric.epsilon([y_fine, x_fine], omega, True) S = np.real(eps0_fine[0,0]) D = np.imag(eps0_fine[1,0]) P = np.real(eps0_fine[2,2]) self._S = interp1d(x_fine, S) self._D = interp1d(x_fine, D) self._P = interp1d(x_fine, P) def _k0(self, x): """ evaluate main wave vector at specified x locations This function is mainly used to carry out the main phase integral with increased accuracy. """ c = cgs['c'] if self.polarization == 'O': try: n2 = self._P(x) except ValueError as e: print('x out of boundary. Please provide a plasma Profile \ containing larger plasma area.') raise e else: try: S = self._S(x) D = self._D(x) except ValueError as e: print('x out of boundary. Please provide a plasma Profile \ containing larger plasma area.', file=sys.stderr) raise e try: n2 = (S*S - D*D)/S except ZeroDivisionError as e: raise ResonanceError('Cold X-mode resonance encountered. \ Paraxial Propagators can not handle this situation properly. Please try to \ avoid this.') if np.any( n2 <= 0): raise ResonanceError('Cold cutoff encountered. Paraxial \ Propagators can not handle this situation properly. Please try to avoid this.') return self.direction * np.sqrt(n2)*self.omega/c def _generate_main_phase(self, mute=True): r""" Integrate k_0 along x, and return the phase at self.x_coordinates """ tstart = clock() try: omega = self.omega self._SDP(omega) self.main_phase = np.empty_like(self.calc_x_coords) self._main_phase_err = np.empty_like(self.calc_x_coords) # Initial phase is set to 0 self.main_phase[0] = 0 self._main_phase_err[0] = 0 # The rest of the phases are numerically integrated over k_0 for i, xi in enumerate(self.calc_x_coords[:-1]): xi_n = self.calc_x_coords[i+1] self.main_phase[i+1], self._main_phase_err[i+1] = \ quadrature(self._k0, xi, xi_n) self.main_phase[i+1] += self.main_phase[i] self._main_phase_err[i+1] += self._main_phase_err[i] except AttributeError as e: print('Main phase function can only be called AFTER propagate \ function is called.', file=sys.stderr) raise e tend = clock() if not mute: print('Main phase generated. Time used: {:.3}'.format(tend-tstart)) def _generate_epsilon(self, mute=True): r"""Generate main dielectric :math:`\epsilon_0` along the ray The main ray is assumed along x direction. Main dielectric tensor uses Cold Electron Cold Ion model Needs Attribute: self.omega self.x_coords self.main_dielectric Create Attribute: self.eps0 """ tstart = clock() omega = self.omega # x_coords needs to be enlarged twice since we need to split each step # into two steps to evolve the two operators self.nx_calc = len(self.x_coords)*2-1 self.calc_x_coords = np.empty((self.nx_calc)) self.calc_x_coords[::2] = self.x_coords self.calc_x_coords[1::2] = (self.x_coords[:-1]+self.x_coords[1:])/2. self.eps0 = self.main_dielectric.epsilon\ ([np.ones_like(self.calc_x_coords)*self.ray_y, self.calc_x_coords], omega, True) tend = clock() if not mute: print('Epsilon0 generated. Time used: {:.3}'.format(tend-tstart), file=sys.stdout) def _generate_k(self, mute=True, mask_order=4): """Calculate k_0 along the reference ray path Need Attributes: self.omega self.eps0 self.polarization self.tol self.direction self.y_coords self.ny self.z_coords self.nz self.E_start Create Attributes: self.k_0 self.ky self.kz self.dy self.dz self.masked_kz self.E_k_start self.margin_kz: index of the marginal kz kept in self.kz self.central_kz: index of the central kz in self.kz """ tstart = clock() omega = self.omega c=self.unit_system['c'] eps0 = self.eps0 if self.polarization == 'O': P = np.real(eps0[2,2,:]) if np.any(P < self.tol): raise ResonanceError('Cutoff of O mode occurs. Paraxial \ propagator is not appropriate in this case. Use full wave solver instead.') self.k_0 = self.direction*omega/c * np.sqrt(P) else: S = np.real(eps0[0,0,:]) D = np.imag(eps0[1,0,:]) numerator = S*S - D*D if np.any(S < self.tol): raise ResonanceError('Cold Resonance of X mode occurs. Change \ to full wave solver with Relativistic Dielectric Tensor to overcome this.') if np.any(numerator < self.tol): raise ResonanceError('Cutoff of X mode occrus. Use full wave \ solver instead of paraxial solver.') self.k_0 = self.direction*omega/c * np.sqrt(numerator/S) # Fourier transform E along z self.E_k_start = np.fft.fft(self.E_start, axis=0) self.nz = len(self.z_coords) self.dz = self.z_coords[1] - self.z_coords[0] self.kz = 2*np.pi*np.fft.fftfreq(self.nz, self.dz)[:, np.newaxis, np.newaxis] self.ny = len(self.y_coords) self.dy = self.y_coords[1] - self.y_coords[0] self.ky = 2*np.pi*np.fft.fftfreq(self.ny, self.dy)[np.newaxis, :, np.newaxis] # we need to mask kz in order to avoid non-physical zero k_parallel # components # find out the peak location marg = np.argmax(np.abs(self.E_k_start)) # find the y index of the peak myarg = marg % self.ny Ekmax = np.max(np.abs(self.E_k_start)) E_margin = Ekmax*np.exp(-mask_order**2/2.) # create the mask for components greater than our marginal E, they will # be considered as significant mask = np.abs(self.E_k_start[:,myarg]) > E_margin self.central_kz_idx = marg // self.ny self.central_kz = self.kz[self.central_kz_idx] # choose the largest kz in kept part as the marginal kz kz_margin = np.max(np.abs(self.kz[mask])) self.delta_kz = kz_margin - self.central_kz if not self._optimize_z: # No kz optimization, all kz fields will be propagated. But with a # filtered value to avoid false alarm of kz too small. # fill all outside kz with the marginal kz self.masked_kz = np.copy(self.kz) self.masked_kz[~mask] = kz_margin else: # with kz optimization, E_k_start and kz arrays will shunk to the # minimum size contatining only the significant components of # wave-vectors. They will be restored back into configuration space # after propagation. self._mask_z = mask # keep a record of the original E_k_start, for restoration after # propagation self._E_k_origin = self.E_k_start self._nz_origin = self.nz self.E_k_start = self.E_k_start[mask, :] self.masked_kz = self.kz[mask] self.nz = self.masked_kz.shape[0] tend = clock() if not mute: print('k0, kz generated. Time used: {:.3}'.format(tend-tstart), file=sys.stdout) def _generate_delta_epsilon(self, mute=True): r"""Generate fluctuated dielectric :math:`\delta\epsilon` on full mesh Fluctuated dielectric tensor may use any dielectric model. Needs Attribute:: self.omega self.x_coords self.y_coords self.k_0 self.kz self.eps0 self.time Create Attribute:: self.deps """ tstart = clock() omega = self.omega time = self.time k_perp = self.k_0 k_para = self.masked_kz[:,0,0] y1d = self.y_coords self.deps = np.empty((3,3,self.nz, self.ny, self.nx_calc), dtype='complex') for i,x in enumerate(self.calc_x_coords): x1d = np.zeros_like(y1d) + x self.deps[..., i] = self.fluc_dielectric.epsilon([y1d, x1d], omega, k_para, k_perp[i], self.eq_only, time)-\ self.eps0[:,:,np.newaxis,np.newaxis,i] tend = clock() if not mute: print('Delta epsilon generated. Time used: {:.3}'.\ format(tend-tstart), file=sys.stdout) def _generate_eOX(self, mute=True): """Create unit polarization vectors along the ray Need Attributes:: self.polarization self.eps0 Create Attributes:: self.e_x self.e_y self.e_z """ tstart = clock() if self.polarization == 'O': self.e_x = 0 self.e_y = 0 self.e_z = 1 else: exx = self.eps0[0, 0, :] # eyy = self.eps0[1, 1, :] exy = self.eps0[0, 1, :] # eyx = self.eps0[1, 0, :] exy_mod = np.abs(exy) exx_mod = np.abs(exx) norm = 1/np.sqrt(exy_mod*exy_mod + exx_mod*exx_mod) self.e_x = -exy * norm self.e_y = exx * norm self.e_z = 0 self._ey_mod = np.sqrt(self.e_y * np.conj(self.e_y)) tend = clock() if not mute: print('Polarization eigen-vector generated. Time used: {:.3}'.\ format(tend-tstart), file=sys.stdout) def _generate_C(self, mute=True): """prepare C operator for refraction propagation C = omega^2 / c^2 * deps[2,2] for O mode C = omega^2/c^2 (D^2 deps[0,0] + iDS (deps[1,0]-deps[0,1]) + S^2 deps[1,1]) /S^2 for X mode Need Attributes:: self.omega self.unit_system self.nx self.ny self.deps self.eps0 Create Attributes:: self.C """ tstart = clock() omega = self.omega c = self.unit_system['c'] self.C = np.empty((self.ny, self.nx), dtype='complex') if self.polarization == 'O': self.C = omega*omega/(c*c) * self.deps[2,2] else: S = np.real(self.eps0[0,0]) D = np.imag(self.eps0[1,0]) S2 = S*S D2 = D*D self.C = omega*omega/(c*c) * ( D2*self.deps[0,0] + \ 1j*D*S*(self.deps[1,0]-self.deps[0,1]) + S2*self.deps[1,1] ) / S2 tend = clock() if not mute: print('Operator C generated. Time used: {:.3}'.format(tend-tstart), file=sys.stdout) def _generate_F(self, mute=True): """Prepare F0(x0,y,kz). Note: F=k^(1/2) E_z for O-mode F=k^(1/2) E_y for X-mode In order to increase efficiency, we change the axis order into [X,Y,Z] for solving F. Afterwards, we'll change back to [Z, Y, X]. Need Attributes:: self.E_k_start self.k_0 self.nz, self.ny, self.nx_calc Create Attributes:: self.F_k_start self.Fk """ tstart = clock() if self.polarization == 'O': self.F_k_start = np.sqrt(np.abs(self.k_0[0])) * self.E_k_start else: self.F_k_start = np.sqrt(np.abs(self.k_0[0])) * self._ey_mod[0] *\ self.E_k_start self.Fk = np.empty((self.nz, self.ny, self.nx_calc), dtype='complex') self.Fk[:,:,0] = self.F_k_start # Now we integrate over x using our scheme, taking care of B,C operator self._generate_C() if self._debug: # in debug mode, we want to store the phase advances due to # diffraction and refractions. self.dphi_eps = np.empty_like(self.C[..., ::2]) self.dphi_ky = np.empty_like(self.C[..., ::2]) self.dphi_eps[0] = 0 self.dphi_ky[0] = 0 self._counter = 1 i=0 while(i < self.nx_calc-1): F = self.Fk[:,:,i] self.Fk[:,:,i+1] = self._refraction(F, i, forward=True) i = i + 1 F = self.Fk[:,:,i] self.Fk[:,:,i+1] = self._diffraction_y(F, i) i = i + 1 F = self.Fk[:,:,i] self.Fk[:,:,i] = self._refraction(F, i, forward=False) tend = clock() if not mute: print('F field calculated. Time used: {:.3}'.format(tend-tstart), file=sys.stdout) def _refraction(self, F, i, forward=True): """ propagate the phase step with operator C advance F with dx using dielectric data at self.calc_x_coords[i] if forward==True, dx = calc_x_coords[i+1]-calc_x_coords[i] otherwise, dx = calc_x_coords[i]-calc_x_coords[i-1] refraction propagation always happens at knots. Need Attributes:: self.calc_x_coords self.k_0 self.C Create Attributes:: None """ if forward: dx = self.calc_x_coords[i+1]-self.calc_x_coords[i] else: dx = self.calc_x_coords[i]-self.calc_x_coords[i-1] C = self.C[...,i] if self._oblique_correction: oblique_coeff = np.abs(cos(self.tilt_h)*cos(self.tilt_v)) else: oblique_coeff = 1 phase = dx* (np.real(C) + \ 1j*np.imag(C)/oblique_coeff) / \ (2*self.k_0[i]) if self._debug: if forward: self._temp_dphi_eps = phase else: self.dphi_eps[..., self._counter] = \ self.dphi_eps[..., self._counter-1]+\ self._temp_dphi_eps + phase self._counter += 1 return np.exp(1j*phase)*F def _diffraction_y(self, F, i): """propagate the phase step with operator B advance F with dx = calc_x_coords[i+1] - calc_x_coords[i-1] Fourier transform along y, and the operator B becomes: B(ky) = -ky^2 diffraction propagation always happens at center between two knots Need Attributes:: self.calc_x_coords self.ky self.k_0 Create Attributes:: None """ dx = self.calc_x_coords[i+1]-self.calc_x_coords[i-1] ky = self.ky[0,:,0] B = -ky*ky phase = B*dx/(2*self.k_0[i]) if self._debug: self.dphi_ky[..., self._counter] = \ self.dphi_ky[..., self._counter-1] + phase Fk = np.exp(1j * phase) * fft(F) return ifft(Fk) def _generate_phase_kz(self, mute=True): """ Propagate the phase due to kz^2 a direct integration can be used Need Attributes:: self.polarization self.eps0 self.kz self.calc_x_coords self.tol Create Attributes:: self.phase_kz """ tstart = clock() if self.polarization == 'O': P = np.real(self.eps0[2,2]) self.phase_kz = cumtrapz(-P*self.masked_kz*self.masked_kz/ \ (2*self.k_0), x=self.calc_x_coords, initial=0) else: S = np.real(self.eps0[0,0]) D = np.imag(self.eps0[1,0]) P = np.real(self.eps0[2,2]) # vacuum case needs special attention. C coefficient has a 0/0 part # the limit gives C=1, which is correct for vacuum. vacuum_idx = np.abs(D) < self.tol non_vacuum = np.logical_not(vacuum_idx) S2 = (S*S)[non_vacuum] D2 = (D*D)[non_vacuum] C = np.empty_like(self.calc_x_coords) C[vacuum_idx] = 1 C[non_vacuum] = (S2+D2)/S2 - (S2-D2)*D2/\ (S2*(S[non_vacuum]-P[non_vacuum])) self.phase_kz = cumtrapz(- C*self.masked_kz*self.masked_kz / \ (2*self.k_0), x=self.calc_x_coords, initial=0) tend = clock() if not mute: print('Phase related to kz generated. Time used: {:.3}'.\ format(tend-tstart), file=sys.stdout) def _generate_E(self, mute=True): """Calculate the total E including the main phase advance Need Attributes: self.k_0 self.calc_x_coords self.Fk self.phase_kz self.k_0 Create Attributes:: self.main_phase self.F self.E """ tstart = clock() self._generate_phase_kz() if self._include_main_phase: self._generate_main_phase(mute=mute) self.Fk = self.Fk * np.exp(1j * self.main_phase) self.Fk = self.Fk * np.exp(1j * self.phase_kz) if self._optimize_z: # restore to the original shape in z self.nz = self._nz_origin self._Fk_calc = self.Fk self.Fk = np.zeros((self.nz, self.ny, self.nx_calc), dtype='complex') self.Fk[self._mask_z] = self._Fk_calc if self._keepFFTz: self.F = self.Fk else: self.F = np.fft.ifft(self.Fk, axis=0) if self.polarization == 'O': self.E = self.F / (np.sqrt(np.abs(self.k_0))) else: self.E = self.F / (np.sqrt(np.abs(self.k_0)) * self._ey_mod) tend = clock() if not mute: print('E field calculated. Time used: {:.3}'.format(tend-tstart), file=sys.stdout) def propagate(self, omega, x_start, x_end, nx, E_start, y_E, z_E, x_coords=None, time=None, tilt_v=0, tilt_h=0, regular_E_mesh=True, mute=True, debug_mode=False, include_main_phase=False, keepFFTz=False, normalize_E=True, kz_mask_order=4, oblique_correction=True, tolrel=1e-3, optimize_z=True): r"""propagate(self, time, omega, x_start, x_end, nx, E_start, y_E, z_E, x_coords=None) Propagate electric field from x_start to x_end The propagation is assumed mainly in x direction. The (ray_y,ray_z) is the (y,z) coordinates of the reference ray path, on which the equilibrium dielectric tensor is taken to be :math:`\epsilon_0`. See :py:class:`ParaxialPerpendicularPropagator1D` for detailed description of the method and assumptions. :param float omega: angular frequency of the wave, omega must be positive. :param E_start: complex amplitude of the electric field at x_start, :type E_start: ndarray of complex with shape (nz, ny), :param float x_start: starting point for propagation :param float x_end: end point for propagation :param int nx: number of intermediate steps to use for propagation :param y_E: y coordinates of the E_start mesh, uniformly placed :type y_E: 1D array of float :param z_E: z coordinates of E_start mesh, uniformly placed :type z_E: 1D array of float :param x_coords: *Optional*, x coordinates to use for propagation, if given, *x_start*, *x_end*, and *nx* are ignored. :type x_coords: 1d array of float. Must be monotonic. :param int time: chosen time step of perturbation in plasma. If None, only equilibrium plasma is used. :param float tilt_v: tilted angle of the main ray in vertical direction , in radian. Positive means tilted upwards. :param float tilt_h: tilted angle of the main ray in horizontal direction, in radian. Positive means tilted towards positive Z direction. :param bool mute: if True, no intermediate outputs for progress. :param bool debug_mode: if True, additional detailed information will be saved for later inspection. :param bool include_main_phase: if True, the calculated E field will have contribution from eikonal phase term :math:`\exp(i\int k_0 dx)`. Default to be False. :param bool keepFFTz: if True, the result E field will keep Fourier components in z-direction, both in returned value , and stored self.E attribute. Default is False. :param bool normalize_E: if True, incidental E field will be normalized so that the maximum amplitude is 1, before propagation, and rescaled back after propagation. Default is True. :param kz_mask_order: mask order to pass into _generate_k. After taking FFT on E0, a Gaussian-like intensity is expected in kz space. In order to avoid numerical difficulties around high kz and/or zero kz, we mask out kz components that are very small compared to the central kz component. kz_mask_order controls how small components are cut off. In unit of standard deviation, e.g. kz_mask_order=4 means kzs farther than 4 standard deviation away from central kz will be masked out . Default is 4, which means kzs where E(kz) < 3e-4 Emax will be ignored. :type kz_mask_order: int :param oblique_correction: if True, correction to oblique incident wave will be added. The decay part will have :math:`\cos(\theta_h)\cos(\theta_v)` term. Default is True. :type oblique_correction: bool :param float tolrel: Optional, a relative error tolarence for oblique effect. If (kz*ky/k0)^2 exceeds tolrel, a warning will be generated. :param bool optimize_z: if True, an optimization in Z direction will be applied. A filter in kz space will be set, wave vectors outside a certain margin from the central wave vector will be masked out, and won't propagate. In oblique cases, this optimization may provide a maximum 10 times speed boost. Default is True. """ tstart = clock() assert omega > 0, 'positive omega is required.' assert E_start.ndim==2, 'Initial E field must be specified on a Z-Y \ plane' assert E_start.shape[1] == y_E.shape[0], 'y coordinates do not match.' assert E_start.shape[0] == z_E.shape[0], 'z coordinates do not match.' if time is None: self.eq_only = True self.time = None else: self.eq_only = False self.time = time self._debug = debug_mode self._include_main_phase = include_main_phase self._keepFFTz = keepFFTz self._normalize_E = normalize_E self._oblique_correction = oblique_correction self._optimize_z = optimize_z self.omega = omega self.tilt_h = tilt_h self.tilt_v = tilt_v if (abs(cos(tilt_v)*cos(tilt_h)-1) > tolrel): if self._oblique_correction: warnings.warn('Tilted angle beyond relative error tolerance! \ {0:.3}, The phase of the result won\'t be as accurate as expected. However, \ the decay of the wave is corrected.'.format(tolrel)) else: warnings.warn('Tilted angle beyond relative error tolerance \ {0:.3}! The phase and amplitude of the result won\'t be as accurate as \ expected.'.format(tolrel)) if (self._normalize_E): self._E_norm = np.max(np.abs(E_start)) self.E_start = E_start/self._E_norm else: self.E_start = E_start self.y_coords = np.copy(y_E) self.ny = len(self.y_coords) self.z_coords = np.copy(z_E) self.nz = len(self.z_coords) if (x_coords is None): self.x_coords = np.linspace(x_start, x_end, nx+1) else: self.x_coords = x_coords self.nx = len(self.x_coords) self._generate_epsilon(mute=mute) self._generate_k(mute=mute, mask_order=kz_mask_order) self._generate_delta_epsilon(mute=mute) self._generate_eOX(mute=mute) self._generate_F(mute=mute) self._generate_E(mute=mute) if(self._normalize_E): self.E *= self._E_norm tend = clock() if not mute: print('2D Propagation Finish! Check the returned E field. More \ infomation is available in Propagator object. Total time used: {:.3}'.\ format(tend-tstart), file=sys.stdout) return self.E[...,::2] @property def power_flow(self): r"""Calculates the total power flow going through y-z plane. Normalized with the local velocity, so the value should be conserved in lossless plasma region. Poynting flux is shown to be [stix92]_: .. math:: P_x = \frac{c^2k}{8\pi\omega} (|E_y|^2 + |E_z|^2) .. [stix92] Waves in Plamsas, T.H.Stix, American Physics Inst. """ e2 = np.real(np.conj(self.e_y)*self.e_y + np.conj(self.e_z)*self.e_z) E2 = np.real(np.conj(self.E) * self.E) c = cgs['c'] if self._keepFFTz: dz = self.z_coords[1]-self.z_coords[0] E2_integrate_z = trapz(np.fft.fftshift(E2, axes=0), x=np.fft.fftshift(self.kz[:,0,0]), axis=0)\ * dz*dz/(2*np.pi) else: E2_integrate_z = trapz(E2, x=self.z_coords, axis=0) E2_integrate_yz = trapz(E2_integrate_z,x=self.y_coords, axis=0) power_norm = c/(8*np.pi)*E2_integrate_yz * (c*self.k_0/self.omega) *e2 return power_norm
35.589524
79
0.561803
10,398
74,738
3.916619
0.079823
0.010804
0.005304
0.00884
0.778048
0.744383
0.722529
0.709466
0.690681
0.672535
0
0.020195
0.333472
74,738
2,099
80
35.606479
0.79733
0.467433
0
0.669173
0
0
0.023684
0
0
0
0
0
0.02005
1
0.0401
false
0.002506
0.016291
0
0.073935
0.033835
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
9ff41203d6fb38e9cc89949b9a9b2920aa2d94e0
1,819
py
Python
documents/migrations/0002_add_indexes_to_document.py
City-of-Helsinki/atv
dca73dab09ab0f3a051a9f691aec5674c6369bde
[ "MIT" ]
null
null
null
documents/migrations/0002_add_indexes_to_document.py
City-of-Helsinki/atv
dca73dab09ab0f3a051a9f691aec5674c6369bde
[ "MIT" ]
34
2021-05-28T06:23:38.000Z
2022-03-08T12:42:01.000Z
documents/migrations/0002_add_indexes_to_document.py
City-of-Helsinki/atv
dca73dab09ab0f3a051a9f691aec5674c6369bde
[ "MIT" ]
1
2021-05-27T10:37:42.000Z
2021-05-27T10:37:42.000Z
# Generated by Django 3.2.3 on 2021-06-28 12:06 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("documents", "0001_initial"), ] operations = [ migrations.AddIndex( model_name="document", index=models.Index(fields=["created_at"], name="document_created_at_idx"), ), migrations.AddIndex( model_name="document", index=models.Index(fields=["updated_at"], name="document_updated_at_idx"), ), migrations.AddIndex( model_name="document", index=models.Index(fields=["business_id"], name="document_business_id_idx"), ), migrations.AddIndex( model_name="document", index=models.Index( fields=["transaction_id"], name="document_transaction_id_idx" ), ), migrations.AddIndex( model_name="document", index=models.Index(fields=["draft"], name="document_draft_idx"), ), migrations.AddIndex( model_name="document", index=models.Index( fields=["locked_after"], name="document_locked_after_idx" ), ), migrations.AddIndex( model_name="document", index=django.contrib.postgres.indexes.GinIndex( fields=["metadata"], name="document_metadata_idx" ), ), migrations.AddIndex( model_name="document", index=models.Index(fields=["status"], name="document_status_idx"), ), migrations.AddIndex( model_name="document", index=models.Index(fields=["type"], name="document_type_idx"), ), ]
31.912281
88
0.570643
171
1,819
5.847953
0.263158
0.216
0.207
0.243
0.528
0.528
0.528
0.485
0.485
0.428
0
0.014984
0.302914
1,819
56
89
32.482143
0.773659
0.024739
0
0.64
1
0
0.208804
0.0807
0
0
0
0
0
1
0
false
0
0.04
0
0.1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b01ea0cd056de8764118cedd8bf9e8f34d1f801c
306
py
Python
Campionato/admin.py
GiadaTrevisani/CampionatoDjango
189a87339acc27125ac256db78bac4024947fb2c
[ "MIT" ]
1
2020-03-25T14:07:24.000Z
2020-03-25T14:07:24.000Z
Campionato/admin.py
GiadaTrevisani/CampionatoDjango
189a87339acc27125ac256db78bac4024947fb2c
[ "MIT" ]
null
null
null
Campionato/admin.py
GiadaTrevisani/CampionatoDjango
189a87339acc27125ac256db78bac4024947fb2c
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import Campionato admin.site.register(Campionato) from .models import Giornata admin.site.register(Giornata) from .models import Squadra admin.site.register(Squadra) from .models import Partita admin.site.register(Partita)
16.105263
32
0.803922
41
306
6
0.341463
0.162602
0.260163
0
0
0
0
0
0
0
0
0
0.120915
306
19
33
16.105263
0.914498
0.084967
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.555556
0
0.555556
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
b029fc05557b5c943eddddb07e500dd8477d11cf
1,456
py
Python
src/maypy/distributions/__init__.py
MVilstrup/maypy
a246da085ac22f8680d82be334cab39c7b5a454a
[ "MIT" ]
null
null
null
src/maypy/distributions/__init__.py
MVilstrup/maypy
a246da085ac22f8680d82be334cab39c7b5a454a
[ "MIT" ]
null
null
null
src/maypy/distributions/__init__.py
MVilstrup/maypy
a246da085ac22f8680d82be334cab39c7b5a454a
[ "MIT" ]
null
null
null
from maypy.distributions.specific.pareto import Pareto from maypy.distributions.specific.turkey_lambda import TurkeyLambda from maypy.distributions.specific.alpha import Alpha from maypy.distributions.specific.gamma import Gamma from maypy.distributions.specific.exponential_norm import ExponentialNorm from maypy.distributions.specific.exponential import Exponential from maypy.distributions.specific.logistic import Logistic from maypy.distributions.specific.power_norm import PowerNorm from maypy.distributions.specific.power_log_norm import PowerLogNorm from maypy.distributions.specific.lognorm import LogNorm from maypy.distributions.specific.dweibull import DWeibull from maypy.distributions.specific.d_gamma import DGamma from maypy.distributions.specific.cosine import Cosine from maypy.distributions.specific.chi import Chi from maypy.distributions.specific.chi2 import Chi2 from maypy.distributions.specific.uniform import Uniform from maypy.distributions.specific.beta import Beta from maypy.distributions.specific.beta_prime import BetaPrime from maypy.distributions.specific.log_gamma import LogGamma from maypy.distributions.specific.normal import Normal from maypy.distributions.distribution import Distribution NP_DISTRIBUTIONS = [ Pareto, TurkeyLambda, Alpha, Gamma, ExponentialNorm, Exponential, Logistic, PowerNorm, PowerLogNorm, LogNorm, DWeibull, DGamma, Cosine, Chi2, Chi, Uniform, BetaPrime, Beta, LogGamma, Normal ]
48.533333
73
0.85783
176
1,456
7.045455
0.193182
0.152419
0.372581
0.483871
0.177419
0
0
0
0
0
0
0.002251
0.084478
1,456
29
74
50.206897
0.927982
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.807692
0
0.807692
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b03d26aa43e50b2e6c36ddac36d61b6519c978a3
45,144
py
Python
models/networks.py
dr-benway/RevGAN
fcaf4f837a58f20f787e442914d68194325c2ca6
[ "BSD-3-Clause" ]
1
2019-03-20T10:37:24.000Z
2019-03-20T10:37:24.000Z
models/networks.py
dr-benway/RevGAN
fcaf4f837a58f20f787e442914d68194325c2ca6
[ "BSD-3-Clause" ]
null
null
null
models/networks.py
dr-benway/RevGAN
fcaf4f837a58f20f787e442914d68194325c2ca6
[ "BSD-3-Clause" ]
null
null
null
import torch import torch.nn as nn from torch.nn import init import torch.nn.functional as F import functools from torch.optim import lr_scheduler from memcnn.models.revop import ReversibleBlock from torch.nn import Parameter import numpy as np import re ############################################################################### # Helper Functions ############################################################################### class Layer(nn.Module): def __init__(self): super(Layer, self).__init__() def forward(self, x): raise NotImplementedError def inverse(self, y): raise NotImplementedError class Squeeze(Layer): def __init__(self, factor=2): super(Squeeze, self).__init__() assert factor > 1 and isinstance(factor, int), 'no point of using this if factor <= 1' self.factor = factor def squeeze_bchw(self, x): bs, c, h, w = x.size() assert h % self.factor == 0 and w % self.factor == 0, pdb.set_trace() # taken from https://github.com/chaiyujin/glow-pytorch/blob/master/glow/modules.py x = x.view(bs, c, h // self.factor, self.factor, w // self.factor, self.factor) x = x.permute(0, 1, 3, 5, 2, 4).contiguous() x = x.view(bs, c * self.factor * self.factor, h // self.factor, w // self.factor) return x def unsqueeze_bchw(self, x): bs, c, h, w = x.size() assert c >= 4 and c % 4 == 0 # taken from https://github.com/chaiyujin/glow-pytorch/blob/master/glow/modules.py x = x.view(bs, c // self.factor ** 2, self.factor, self.factor, h, w) x = x.permute(0, 1, 4, 2, 5, 3).contiguous() x = x.view(bs, c // self.factor ** 2, h * self.factor, w * self.factor) return x def forward(self, x): if len(x.size()) != 4: raise NotImplementedError # Maybe ValueError would be more appropriate return self.squeeze_bchw(x) def inverse(self, x): if len(x.size()) != 4: raise NotImplementedError return self.unsqueeze_bchw(x) class Unsqueeze(Layer): def __init__(self, factor=2): super(Unsqueeze, self).__init__() assert factor > 1 and isinstance(factor, int), 'no point of using this if factor <= 1' self.factor = factor def squeeze_bchw(self, x): bs, c, h, w = x.size() assert h % self.factor == 0 and w % self.factor == 0, pdb.set_trace() # taken from https://github.com/chaiyujin/glow-pytorch/blob/master/glow/modules.py x = x.view(bs, c, h // self.factor, self.factor, w // self.factor, self.factor) x = x.permute(0, 1, 3, 5, 2, 4).contiguous() x = x.view(bs, c * self.factor * self.factor, h // self.factor, w // self.factor) return x def unsqueeze_bchw(self, x): bs, c, h, w = x.size() assert c >= 4 and c % 4 == 0 # taken from https://github.com/chaiyujin/glow-pytorch/blob/master/glow/modules.py x = x.view(bs, c // self.factor ** 2, self.factor, self.factor, h, w) x = x.permute(0, 1, 4, 2, 5, 3).contiguous() x = x.view(bs, c // self.factor ** 2, h * self.factor, w * self.factor) return x def forward(self, x): if len(x.size()) != 4: raise NotImplementedError # Maybe ValueError would be more appropriate return self.unsqueeze_bchw(x) def inverse(self, x): if len(x.size()) != 4: raise NotImplementedError return self.squeeze_bchw(x) def get_norm_layer(norm_type='instance'): if norm_type == 'batch': norm_layer = functools.partial(nn.BatchNorm2d, affine=True) elif norm_type == 'instance': norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=True) elif norm_type == 'none': norm_layer = None else: raise NotImplementedError('normalization layer [%s] is not found' % norm_type) return norm_layer def get_scheduler(optimizer, opt): if opt.lr_policy == 'lambda': def lambda_rule(epoch): lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.niter) / float(opt.niter_decay + 1) return lr_l scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule) elif opt.lr_policy == 'step': scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1) elif opt.lr_policy == 'plateau': scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) else: return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) return scheduler def init_weights(net, init_type='normal', gain=0.02): def init_func(m): classname = m.__class__.__name__ if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): if init_type == 'normal': init.normal_(m.weight.data, 0.0, gain) elif init_type == 'xavier': init.xavier_normal_(m.weight.data, gain=gain) elif init_type == 'kaiming': init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') elif init_type == 'orthogonal': init.orthogonal_(m.weight.data, gain=gain) else: raise NotImplementedError('initialization method [%s] is not implemented' % init_type) if hasattr(m, 'bias') and m.bias is not None: init.constant_(m.bias.data, 0.0) elif classname.find('BatchNorm2d') != -1: init.normal_(m.weight.data, 1.0, gain) init.constant_(m.bias.data, 0.0) print('initialize network with %s' % init_type) net.apply(init_func) def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]): if len(gpu_ids) > 0: assert(torch.cuda.is_available()) print(gpu_ids) device = torch.device('cuda:{}'.format(gpu_ids[0])) if gpu_ids else torch.device('cpu') net.to(device) net = torch.nn.DataParallel(net, gpu_ids) init_weights(net, init_type, gain=init_gain) return net def define_G(input_nc, output_nc, ngf, which_model_netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[], output_tanh=True, n_downsampling=2): netG = None norm_layer = get_norm_layer(norm_type=norm) if which_model_netG.startswith('resnet_') and which_model_netG.endswith('blocks'): n_blocks = int(re.findall(r'\d+', which_model_netG)[0]) netG = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=n_blocks, output_tanh=output_tanh, n_downsampling=n_downsampling) elif which_model_netG == 'onet_128': netG = OnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) elif which_model_netG == 'nonet_64': netG = OnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) elif which_model_netG == 'unet_128': netG = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) elif which_model_netG == 'unet_256': netG = UnetGenerator(input_nc, output_nc, 8, ngf, norm_layer=norm_layer, use_dropout=use_dropout) else: raise NotImplementedError('Generator model name [%s] is not recognized' % which_model_netG) return init_net(netG, init_type, init_gain, gpu_ids) def define_G_enc(input_nc, output_nc, ngf, which_model_netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[], n_downsampling=2): netG = None norm_layer = get_norm_layer(norm_type=norm) if which_model_netG.startswith('resnet_') and which_model_netG.endswith('blocks'): n_blocks = int(re.findall(r'\d+', which_model_netG)[0]) netG = ResnetGenerator_enc(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=n_blocks, n_downsampling=n_downsampling) elif which_model_netG.startswith('noise_') and which_model_netG.endswith('blocks'): netG = Noise_enc(input_nc, output_nc, ngf) else: raise NotImplementedError('Generator model name [%s] is not recognized' % which_model_netG) return init_net(netG, init_type, init_gain, gpu_ids) def define_G_core(input_nc, output_nc, ngf, which_model_netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[], invertible=False, ode=False, squeeze=False, n_downsampling=2, add_noise=False, coupling='additive'): netG = None norm_layer = get_norm_layer(norm_type=norm) if which_model_netG.startswith('resnet_') and which_model_netG.endswith('blocks'): n_blocks = int(re.findall(r'\d+', which_model_netG)[0]) netG = ResnetGenerator_core(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=n_blocks, invertible=invertible, ode=ode, squeeze=squeeze, n_downsampling=n_downsampling, add_noise=add_noise, coupling=coupling) elif which_model_netG.startswith('noise_') and which_model_netG.endswith('blocks'): n_blocks = int(re.findall(r'\d+', which_model_netG)[0]) netG = Noise_core(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=n_blocks, invertible=invertible, ode=ode, squeeze=squeeze, n_downsampling=n_downsampling, add_noise=add_noise, coupling=coupling) else: raise NotImplementedError('Generator model name [%s] is not recognized' % which_model_netG) return init_net(netG, init_type, init_gain, gpu_ids) def define_G_dec(input_nc, output_nc, ngf, which_model_netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[], output_tanh=True, n_downsampling=2): netG = None norm_layer = get_norm_layer(norm_type=norm) if which_model_netG.startswith('resnet_') and which_model_netG.endswith('blocks'): n_blocks = int(re.findall(r'\d+', which_model_netG)[0]) netG = ResnetGenerator_dec(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=n_blocks, output_tanh=output_tanh, n_downsampling=n_downsampling) elif which_model_netG.startswith('noise_') and which_model_netG.endswith('blocks'): netG = Noise_dec(input_nc, output_nc, ngf) else: raise NotImplementedError('Generator model name [%s] is not recognized' % which_model_netG) return init_net(netG, init_type, init_gain, gpu_ids) def define_D(input_nc, ndf, which_model_netD, n_layers_D=3, norm='batch', use_sigmoid=False, init_type='normal', init_gain=0.02, gpu_ids=[]): netD = None norm_layer = get_norm_layer(norm_type=norm) if which_model_netD == 'basic': netD = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer, use_sigmoid=use_sigmoid) elif which_model_netD == 'n_layers': netD = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer, use_sigmoid=use_sigmoid) elif which_model_netD == 'pixel': netD = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer, use_sigmoid=use_sigmoid) elif which_model_netD == 'paraml': netD = ParamLDiscriminator(input_nc) else: raise NotImplementedError('Discriminator model name [%s] is not recognized' % which_model_netD) return init_net(netD, init_type, init_gain, gpu_ids) ############################################################################## # Classes ############################################################################## # Defines the GAN loss which uses either LSGAN or the regular GAN. # When LSGAN is used, it is basically same as MSELoss, # but it abstracts away the need to create the target label tensor # that has the same size as the input class GANLoss(nn.Module): def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0): super(GANLoss, self).__init__() self.register_buffer('real_label', torch.tensor(target_real_label)) self.register_buffer('fake_label', torch.tensor(target_fake_label)) if use_lsgan: self.loss = nn.MSELoss() else: self.loss = nn.BCELoss() def get_target_tensor(self, input, target_is_real): if target_is_real: target_tensor = self.real_label else: target_tensor = self.fake_label return target_tensor.expand_as(input) def __call__(self, input, target_is_real): target_tensor = self.get_target_tensor(input, target_is_real) return self.loss(input, target_tensor) # Defines the generator that consists of Resnet blocks between a few # downsampling/upsampling operations. # Code and idea originally from Justin Johnson's architecture. # https://github.com/jcjohnson/fast-neural-style/ class ResnetGenerator(nn.Module): def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', output_tanh=True, n_downsampling = 2): assert(n_blocks >= 0) super(ResnetGenerator, self).__init__() self.input_nc = input_nc self.output_nc = output_nc self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, bias=use_bias), norm_layer(ngf), nn.ReLU(True)] for i in range(n_downsampling): mult = 2**i model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1, bias=use_bias), norm_layer(ngf * mult * 2), nn.ReLU(True)] mult = 2**n_downsampling for i in range(n_blocks): model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] for i in range(n_downsampling): mult = 2**(n_downsampling - i) model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, output_padding=1, bias=use_bias), norm_layer(int(ngf * mult / 2)), nn.ReLU(True)] model += [nn.ReflectionPad2d(3)] model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] if output_tanh: model += [nn.Tanh()] self.model = nn.Sequential(*model) def forward(self, input): return self.model(input) class ResnetGenerator_enc_noise(nn.Module): def __init__(self, ngf, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', coupling='additive'): assert(n_blocks >= 0) super(ResnetGenerator_enc_noise, self).__init__() self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d self.model = ReversibleConvBlock(ngf, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias, coupling=coupling, kernel_size=7) def forward(self, input): noise = torch.rand(input.shape[0], self.ngf - input.shape[1], input.shape[2], input.shape[3]) - 0.5 if input.is_cuda: noise = noise.cuda() cat = torch.cat([input, noise], 1) return self.model(cat) def inverse(self, input): return self.model.inverse(input)[:, :3, :, :] class ResnetGenerator_dec_noise(nn.Module): def __init__(self, ngf, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', coupling='additive'): assert(n_blocks >= 0) super(ResnetGenerator_dec_noise, self).__init__() self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d self.model = ReversibleConvBlock(ngf, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias, coupling=coupling, kernel_size=7) def forward(self, input): return self.model(input)[:, :3, :, :] def inverse(self, input): noise = torch.rand(input.shape[0], self.ngf - input.shape[1], input.shape[2], input.shape[3]) - 0.5 if input.is_cuda: noise = noise.cuda() cat = torch.cat([input, noise], 1) return self.model.inverse(cat) class Noise_enc(nn.Module): def __init__(self, nc, ngf): super(Noise_enc, self).__init__() self.nc = nc self.ngf = ngf def forward(self, input): N, _, H, W = input.shape noise = torch.randn(N, self.ngf - self.nc, H, W).cuda() return torch.cat((input, noise), 1) def inverse(self, input): return input[:, :self.nc, :, :] class Noise_dec(nn.Module): def __init__(self, nc, ngf): super(Noise_dec, self).__init__() self.nc = nc self.ngf = ngf def forward(self, input): return input[:, :self.nc, :, :] def inverse(self, input): N, _, H, W = input.shape noise = torch.randn(N, self.ngf - self.nc, H, W).cuda() return torch.cat((input, noise), 1) class inv1x1(Layer, nn.Conv2d): def __init__(self, num_channels): self.num_channels = num_channels nn.Conv2d.__init__(self, num_channels, num_channels, 1, bias=False) def reset_parameters(self): w_init = np.linalg.qr(np.random.randn(self.num_channels, self.num_channels))[0] w_init = torch.from_numpy(w_init.astype('float32')) w_init = w_init.unsqueeze(-1).unsqueeze(-1) self.weight.data.copy_(w_init) def forward(self, x): output = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, \ self.dilation, self.groups) return output def inverse(self, x): weight_inv = torch.inverse(self.weight.squeeze()).unsqueeze(-1).unsqueeze(-1) output = F.conv2d(x, weight_inv, self.bias, self.stride, self.padding, \ self.dilation, self.groups) return output class ResnetGenerator_enc(nn.Module): def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', n_downsampling = 2, coupling='additive'): assert(n_blocks >= 0) super(ResnetGenerator_enc, self).__init__() self.input_nc = input_nc self.output_nc = output_nc self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, bias=use_bias), norm_layer(ngf), nn.ReLU(True)] for i in range(n_downsampling): mult = 2**i model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1, bias=use_bias), norm_layer(ngf * mult * 2), nn.ReLU(True)] self.model = nn.Sequential(*model) def forward(self, input): return self.model(input) class Fat(nn.Module): def __init__(self, dim): super(Fat, self).__init__() model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, bias=use_bias), norm_layer(ngf), nn.ReLU(True)] self.model = ODEBlock(ODEfunc_add(self.model)) def forward(self, input): self.model(input) def inverse(self, input): self.model.inverse(input) class Noise_core(nn.Module): def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', invertible=False, ode=False, squeeze=False, n_downsampling=2, add_noise=False, coupling='additive'): assert(n_blocks >= 0) super(Noise_core, self).__init__() self.input_nc = input_nc self.output_nc = output_nc self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d model = [] for i in range(n_blocks): if invertible and ode: model += [inv1x1(self.ngf)] model += [Fat(self.ngf)] model += [Squeeze()] # model += [inv1x1(self.ngf*4**1)] # model += [Squeeze()] # model += [ODEBlock(ODEfunc(ngf*4**2, padding_type, norm_layer, use_dropout, use_bias))] model += [ODEBlock(ODEfunc(ngf*4**1, padding_type, norm_layer, use_dropout, use_bias))] # model += [ODEBlock(ODEfunc2(self.ngf*4**2))] # model += [Unsqueeze()] # model += [inv1x1(self.ngf*4**1)] model += [Unsqueeze()] model += [Fat(self.ngf)] model += [inv1x1(self.ngf)] else: raise NotImplementedError('wow') self.model = nn.Sequential(*model) def forward(self, input, inverse=False): out = input if inverse: for block in reversed(self.model): out = block.inverse(out) else: for block in self.model: out = block(out) return out class ResnetGenerator_core(nn.Module): def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', invertible=False, ode=False, squeeze=False, n_downsampling=2, add_noise=False, coupling='additive'): assert(n_blocks >= 0) super(ResnetGenerator_core, self).__init__() self.input_nc = input_nc self.output_nc = output_nc self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d model = [] if add_noise: model += [ResnetGenerator_enc_noise(ngf, norm_layer=nn.InstanceNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect')] if squeeze == 'squeezeblock': for i in range(n_downsampling): mult = 4**(i+1) model += [Squeeze(), ReversibleConvBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] mult = 4**n_downsampling elif squeeze == 'squeeze': for i in range(n_downsampling): model += [Squeeze()] mult = 4**n_downsampling else: mult = 2**n_downsampling for i in range(n_blocks): if invertible: if ode: model += [ODEBlock(ODEfunc(ngf * mult, padding_type, norm_layer, use_dropout, use_bias))] else: model += [ReversibleResnetBlock(ngf * mult, padding_type, norm_layer, use_dropout, use_bias, coupling)] else: model += [ResnetBlock(mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] if squeeze == 'squeezeblock': for i in range(n_downsampling): mult = 4**(n_downsampling - i) model += [ReversibleResnetBlock(ngf * mult, padding_type, norm_layer, use_dropout, use_bias, coupling), Unsqueeze()] elif squeeze == 'squeeze': for i in range(n_downsampling): model += [Squeeze()] if add_noise: model += [ResnetGenerator_dec_noise(ngf, norm_layer=nn.InstanceNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect')] self.model = nn.Sequential(*model) def forward(self, input, inverse=False): out = input if inverse: for block in reversed(self.model): out = block.inverse(out) else: for block in self.model: out = block(out) return out class ResnetGenerator_dec(nn.Module): def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', output_tanh=True, n_downsampling=2, coupling='additive'): assert(n_blocks >= 0) super(ResnetGenerator_dec, self).__init__() self.input_nc = input_nc self.output_nc = output_nc self.ngf = ngf if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d model = [] for i in range(n_downsampling): mult = 2**(n_downsampling - i) model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, output_padding=1, bias=use_bias), norm_layer(int(ngf * mult / 2)), nn.ReLU(True)] model += [nn.ReflectionPad2d(3)] model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] if output_tanh: model += [nn.Tanh()] self.model = nn.Sequential(*model) def forward(self, input): return self.model(input) # Define a resnet block class ResnetBlock(nn.Module): def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias): super(ResnetBlock, self).__init__() self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias) def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): conv_block = [] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim), nn.ReLU(True)] if use_dropout: conv_block += [nn.Dropout(0.5)] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim)] return nn.Sequential(*conv_block) def forward(self, x): out = x + self.conv_block(x) return out class ODEfunc(nn.Module): def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias): super(ODEfunc, self).__init__() self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias) self.nfe = 0 def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): conv_block = [] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim), nn.ReLU(True)] if use_dropout: conv_block += [nn.Dropout(0.5)] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim)] return nn.Sequential(*conv_block) def forward(self, t, x): return t + self.conv_block(x) class ConcatConv2d(nn.Module): def __init__(self, dim_in, dim_out, ksize=3, stride=1, padding=0, dilation=1, groups=1, bias=True, transpose=False): super(ConcatConv2d, self).__init__() module = nn.ConvTranspose2d if transpose else nn.Conv2d self._layer = module( dim_in + 1, dim_out, kernel_size=ksize, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias ) def forward(self, t, x): tt = torch.ones_like(x[:, :1, :, :]) * t ttx = torch.cat([tt, x], 1) return self._layer(ttx) def conv3x3(in_planes, out_planes, stride=1): """3x3 convolution with padding""" return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) def conv1x1(in_planes, out_planes, stride=1): """1x1 convolution""" return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) def norm(dim): return nn.GroupNorm(min(32, dim), dim) class ODEfunc2(nn.Module): def __init__(self, func): super(ODEfunc2, self).__init__() self.func = func self.nfe = 0 def forward(self, t, x): self.nfe += 1 tt = torch.ones_like(x[:, :1, :, :]) * t ttx = torch.cat([tt, x], 1) return self.func(ttx) class ODEfunc3(nn.Module): def __init__(self, func): super(ODEfunc3, self).__init__() self.func = func self.nfe = 0 def forward(self, t, x): self.nfe += 1 tt = torch.ones_like(x[:, :1, :, :]) * t N, C, H, W = x.shape noise = torch.randn(N, 3, H, W).cuda() * t ttx = torch.cat([tt, x, noise], 1) return self.func(ttx) # tol = 1e-1 tol = 1e-8 class ODEBlock(nn.Module): def __init__(self, odefunc): super(ODEBlock, self).__init__() self.odefunc = odefunc self.fwd_integration_time = torch.tensor([0, 1]).float() self.bwd_integration_time = torch.tensor([1, 0]).float() def forward(self, x): self.fwd_integration_time = self.fwd_integration_time.type_as(x) out = odeint(self.odefunc, x, self.fwd_integration_time, rtol=tol, atol=tol) return out[1] def inverse(self, x): self.bwd_integration_time = self.bwd_integration_time.type_as(x) out = odeint(self.odefunc, x, self.bwd_integration_time, rtol=tol, atol=tol) return out[1] @property def nfe(self): return self.odefunc.nfe @nfe.setter def nfe(self, value): self.odefunc.nfe = value class ReversibleConvBlock(nn.Module): def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias, coupling, kernel_size=3): super(ReversibleConvBlock, self).__init__() F = self.build_conv_block(dim // 2, padding_type, norm_layer, use_dropout, use_bias, kernel_size) G = self.build_conv_block(dim // 2, padding_type, norm_layer, use_dropout, use_bias, kernel_size) self.rev_block = ReversibleBlock(F, G, coupling) def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias, kernel_size): conv_block = [] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(kernel_size//2)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(kernel_size//2)] elif padding_type == 'zero': p = kernel_size//2 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=kernel_size, padding=p, bias=use_bias), norm_layer(dim), nn.ReLU(True)] if use_dropout: conv_block += [nn.Dropout(0.5)] return nn.Sequential(*conv_block) def forward(self, x): return self.rev_block(x) def inverse(self, x): return self.rev_block.inverse(x) class ReversibleResnetBlock(nn.Module): def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias, coupling): super(ReversibleResnetBlock, self).__init__() F = self.build_conv_block(dim // 2, padding_type, norm_layer, use_dropout, use_bias) G = self.build_conv_block(dim // 2, padding_type, norm_layer, use_dropout, use_bias) self.rev_block = ReversibleBlock(F, G, coupling) def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): conv_block = [] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim), nn.ReLU(True)] if use_dropout: conv_block += [nn.Dropout(0.5)] p = 0 if padding_type == 'reflect': conv_block += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv_block += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim)] return nn.Sequential(*conv_block) def forward(self, x): return self.rev_block(x) def inverse(self, x): return self.rev_block.inverse(x) class ZeroInit(nn.Conv2d): def __init__(self, channels_in, channels_out, filter_size, stride=1, padding=0, logscale=3.): super().__init__(channels_in, channels_out, filter_size, stride=stride, padding=padding) def reset_parameters(self): self.weight.data.zero_() self.bias.data.zero_() # Defines the Unet generator. # |num_downs|: number of downsamplings in UNet. For example, # if |num_downs| == 7, image of size 128x128 will become of size 1x1 # at the bottleneck class UnetGenerator(nn.Module): def __init__(self, input_nc, output_nc, num_downs, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): super(UnetGenerator, self).__init__() # construct unet structure unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) for i in range(num_downs - 5): unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) self.model = unet_block def forward(self, input): return self.model(input) class OnetGenerator(nn.Module): def __init__(self, input_nc, output_nc, num_downs, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): super(OnetGenerator, self).__init__() # construct onet structure onet_block = OnetSkipConnectionBlock(ngf * 8 + 1, ngf * 8 + 1, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) for i in range(num_downs - 5): onet_block = OnetSkipConnectionBlock(ngf * 8 + 1, ngf * 8 + 1, input_nc=None, submodule=onet_block, norm_layer=norm_layer, use_dropout=use_dropout) onet_block = OnetSkipConnectionBlock(ngf * 4 + 1, ngf * 8 + 1, input_nc=None, submodule=onet_block, norm_layer=norm_layer) onet_block = OnetSkipConnectionBlock(ngf * 2 + 1, ngf * 4 + 1, input_nc=None, submodule=onet_block, norm_layer=norm_layer) onet_block = OnetSkipConnectionBlock(ngf + 1, ngf * 2 + 1, input_nc=None, submodule=onet_block, norm_layer=norm_layer) onet_block = OnetSkipConnectionBlock(input_nc, ngf + 1, input_nc=output_nc + 1, submodule=onet_block, outermost=True, norm_layer=norm_layer) self.model = ODEBlock(ODEfunc2(onet_block)) def forward(self, input): out = self.model(input) return out def inverse(self, input): out = self.model.inverse(input) return out class OnetSkipConnectionBlock(nn.Module): def __init__(self, outer_nc, inner_nc, input_nc=None, submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): super(OnetSkipConnectionBlock, self).__init__() self.outermost = outermost if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d if input_nc is None: input_nc = outer_nc downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) downrelu = nn.LeakyReLU(0.2, True) downnorm = norm_layer(inner_nc) uprelu = nn.ReLU(True) upnorm = norm_layer(outer_nc) if outermost: upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1) down = [downconv] up = [uprelu, upconv, nn.Tanh()] model = down + [submodule] + up elif innermost: upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) down = [downrelu, downconv] up = [uprelu, upconv, upnorm] model = down + up else: upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) down = [downrelu, downconv, downnorm] up = [uprelu, upconv, upnorm] if use_dropout: model = down + [submodule] + up + [nn.Dropout(0.5)] else: model = down + [submodule] + up self.model = nn.Sequential(*model) def forward(self, x): out = self.model(x) return out # Defines the submodule with skip connection. # X -------------------identity---------------------- X # |-- downsampling -- |submodule| -- upsampling --| class UnetSkipConnectionBlock(nn.Module): def __init__(self, outer_nc, inner_nc, input_nc=None, submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): super(UnetSkipConnectionBlock, self).__init__() self.outermost = outermost if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d if input_nc is None: input_nc = outer_nc downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) downrelu = nn.LeakyReLU(0.2, True) downnorm = norm_layer(inner_nc) uprelu = nn.ReLU(True) upnorm = norm_layer(outer_nc) if outermost: upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1) down = [downconv] up = [uprelu, upconv, nn.Tanh()] model = down + [submodule] + up elif innermost: upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) down = [downrelu, downconv] up = [uprelu, upconv, upnorm] model = down + up else: upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) down = [downrelu, downconv, downnorm] up = [uprelu, upconv, upnorm] if use_dropout: model = down + [submodule] + up + [nn.Dropout(0.5)] else: model = down + [submodule] + up self.model = nn.Sequential(*model) def forward(self, x): if self.outermost: return self.model(x) else: return torch.cat([x, self.model(x)], 1) # Defines the PatchGAN discriminator with the specified arguments. class NLayerDiscriminator(nn.Module): def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False): super(NLayerDiscriminator, self).__init__() if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d kw = 4 padw = 1 sequence = [ nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True) ] nf_mult = 1 nf_mult_prev = 1 for n in range(1, n_layers): nf_mult_prev = nf_mult nf_mult = min(2**n, 8) sequence += [ nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias), norm_layer(ndf * nf_mult), nn.LeakyReLU(0.2, True) ] nf_mult_prev = nf_mult nf_mult = min(2**n_layers, 8) sequence += [ nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias), norm_layer(ndf * nf_mult), nn.LeakyReLU(0.2, True) ] sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] if use_sigmoid: sequence += [nn.Sigmoid()] self.model = nn.Sequential(*sequence) def forward(self, input): return self.model(input) class PixelDiscriminator(nn.Module): def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False): super(PixelDiscriminator, self).__init__() if type(norm_layer) == functools.partial: use_bias = norm_layer.func == nn.InstanceNorm2d else: use_bias = norm_layer == nn.InstanceNorm2d self.net = [ nn.Conv2d(input_nc, ndf, kernel_size=1, stride=1, padding=0), nn.LeakyReLU(0.2, True), nn.Conv2d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias), norm_layer(ndf * 2), nn.LeakyReLU(0.2, True), nn.Conv2d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)] if use_sigmoid: self.net.append(nn.Sigmoid()) self.net = nn.Sequential(*self.net) def forward(self, input): out = self.net(input) return out
40.021277
248
0.603159
5,770
45,144
4.483536
0.069151
0.056011
0.020101
0.024121
0.781214
0.749092
0.728063
0.70804
0.696792
0.673715
0
0.017551
0.27554
45,144
1,127
249
40.056788
0.77346
0.035974
0
0.655889
0
0
0.030954
0
0
0
0
0
0.016166
1
0.114319
false
0
0.011547
0.018476
0.232102
0.002309
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b0674a539035491b51591f6c0e0dbdeef3976f4e
104
py
Python
engapp/apps.py
leonolan2020/phoenix
b5956a7003e548f01255cbd5d0d76cfd0ac77a81
[ "MIT" ]
1
2020-09-19T21:56:40.000Z
2020-09-19T21:56:40.000Z
engapp/apps.py
leonolan2020/phoenix
b5956a7003e548f01255cbd5d0d76cfd0ac77a81
[ "MIT" ]
null
null
null
engapp/apps.py
leonolan2020/phoenix
b5956a7003e548f01255cbd5d0d76cfd0ac77a81
[ "MIT" ]
5
2020-09-18T18:53:03.000Z
2020-10-21T14:42:00.000Z
from django.apps import AppConfig APP_NAME='engapp' class EngappConfig(AppConfig): name = 'engapp'
17.333333
33
0.759615
13
104
6
0.769231
0.25641
0
0
0
0
0
0
0
0
0
0
0.144231
104
5
34
20.8
0.876404
0
0
0
0
0
0.115385
0
0
0
0
0
0
1
0
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
c65a988ddbca47565d6bc8c6e4429a2acfe04efd
109
py
Python
src/FFEAT/test/decay/__init__.py
PatrikValkovic/MasterThesis
6e9f3b186541db6c8395ebc96ace7289d01c805b
[ "MIT" ]
null
null
null
src/FFEAT/test/decay/__init__.py
PatrikValkovic/MasterThesis
6e9f3b186541db6c8395ebc96ace7289d01c805b
[ "MIT" ]
null
null
null
src/FFEAT/test/decay/__init__.py
PatrikValkovic/MasterThesis
6e9f3b186541db6c8395ebc96ace7289d01c805b
[ "MIT" ]
null
null
null
############################### # # Created by Patrik Valkovic # 3/15/2021 # ###############################
15.571429
31
0.275229
7
109
4.285714
1
0
0
0
0
0
0
0
0
0
0
0.071429
0.100917
109
6
32
18.166667
0.234694
0.330275
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
c681dbcef8f41f1ee0de79ed94e3d1a5ee1d49be
51
py
Python
example_pkg_sckmkny/__main__.py
larkintuckerllc/example-prj-sckmkny
ffb435eb17afd8469ef8237628e6cf8c4c4091bc
[ "MIT" ]
null
null
null
example_pkg_sckmkny/__main__.py
larkintuckerllc/example-prj-sckmkny
ffb435eb17afd8469ef8237628e6cf8c4c4091bc
[ "MIT" ]
null
null
null
example_pkg_sckmkny/__main__.py
larkintuckerllc/example-prj-sckmkny
ffb435eb17afd8469ef8237628e6cf8c4c4091bc
[ "MIT" ]
null
null
null
from example_pkg_sckmkny import main main.hello()
12.75
36
0.823529
8
51
5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.117647
51
3
37
17
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
c6b3c2b281cabe6212cbce08f96751e6a92c88a7
444
py
Python
python/graphscope/nx/generators/tests/test_nonisomorphic_trees.py
LI-Mingyu/GraphScope-MY
942060983d3f7f8d3a3377467386e27aba285b33
[ "Apache-2.0" ]
1
2021-12-17T03:58:08.000Z
2021-12-17T03:58:08.000Z
python/graphscope/nx/generators/tests/test_nonisomorphic_trees.py
LI-Mingyu/GraphScope-MY
942060983d3f7f8d3a3377467386e27aba285b33
[ "Apache-2.0" ]
null
null
null
python/graphscope/nx/generators/tests/test_nonisomorphic_trees.py
LI-Mingyu/GraphScope-MY
942060983d3f7f8d3a3377467386e27aba285b33
[ "Apache-2.0" ]
null
null
null
""" ==================== Generators - Non Isomorphic Trees ==================== Unit tests for WROM algorithm generator in generators/nonisomorphic_trees.py """ import networkx.generators.tests.test_nonisomorphic_trees import pytest from graphscope.nx.utils.compat import import_as_graphscope_nx import_as_graphscope_nx( networkx.generators.tests.test_nonisomorphic_trees, decorators=pytest.mark.usefixtures("graphscope_session"), )
26.117647
76
0.754505
50
444
6.46
0.54
0.167183
0.142415
0.167183
0.278638
0.278638
0
0
0
0
0
0
0.09009
444
16
77
27.75
0.799505
0.344595
0
0
0
0
0.063604
0
0
0
0
0
0
1
0
true
0
0.571429
0
0.571429
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4