hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
949cbb6c19f937ac98b117dbcd575e91ca2101e5
1,750
py
Python
extensions/.stubs/clrclasses/System/Management/Instrumentation/__init__.py
vicwjb/Pycad
7391cd694b7a91ad9f9964ec95833c1081bc1f84
[ "MIT" ]
1
2020-03-25T03:27:24.000Z
2020-03-25T03:27:24.000Z
extensions/.stubs/clrclasses/System/Management/Instrumentation/__init__.py
vicwjb/Pycad
7391cd694b7a91ad9f9964ec95833c1081bc1f84
[ "MIT" ]
null
null
null
extensions/.stubs/clrclasses/System/Management/Instrumentation/__init__.py
vicwjb/Pycad
7391cd694b7a91ad9f9964ec95833c1081bc1f84
[ "MIT" ]
null
null
null
from __clrclasses__.System.Management.Instrumentation import InstanceNotFoundException from __clrclasses__.System.Management.Instrumentation import InstrumentationBaseException from __clrclasses__.System.Management.Instrumentation import InstrumentationException from __clrclasses__.System.Management.Instrumentation import ManagementBindAttribute from __clrclasses__.System.Management.Instrumentation import ManagementCommitAttribute from __clrclasses__.System.Management.Instrumentation import ManagementConfigurationAttribute from __clrclasses__.System.Management.Instrumentation import ManagementConfigurationType from __clrclasses__.System.Management.Instrumentation import ManagementCreateAttribute from __clrclasses__.System.Management.Instrumentation import ManagementEntityAttribute from __clrclasses__.System.Management.Instrumentation import ManagementEnumeratorAttribute from __clrclasses__.System.Management.Instrumentation import ManagementHostingModel from __clrclasses__.System.Management.Instrumentation import ManagementKeyAttribute from __clrclasses__.System.Management.Instrumentation import ManagementMemberAttribute from __clrclasses__.System.Management.Instrumentation import ManagementNameAttribute from __clrclasses__.System.Management.Instrumentation import ManagementNewInstanceAttribute from __clrclasses__.System.Management.Instrumentation import ManagementProbeAttribute from __clrclasses__.System.Management.Instrumentation import ManagementReferenceAttribute from __clrclasses__.System.Management.Instrumentation import ManagementRemoveAttribute from __clrclasses__.System.Management.Instrumentation import ManagementTaskAttribute from __clrclasses__.System.Management.Instrumentation import WmiConfigurationAttribute
83.333333
93
0.92
140
1,750
10.928571
0.185714
0.183007
0.261438
0.392157
0.666667
0.666667
0
0
0
0
0
0
0.045714
1,750
20
94
87.5
0.916168
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
949dc9c359dab3e42edef2d3034093b87622e6e4
1,679
py
Python
test/test_fuzzylogic.py
vishalbelsare/PyShortTextCategorization
4fa46a148a3eeb923885a7d70c789e988554f758
[ "MIT" ]
481
2016-10-07T16:48:40.000Z
2022-03-16T12:44:12.000Z
test/test_fuzzylogic.py
vishalbelsare/PyShortTextCategorization
4fa46a148a3eeb923885a7d70c789e988554f758
[ "MIT" ]
56
2017-02-02T17:50:14.000Z
2021-12-15T05:14:28.000Z
test/test_fuzzylogic.py
vishalbelsare/PyShortTextCategorization
4fa46a148a3eeb923885a7d70c789e988554f758
[ "MIT" ]
70
2017-01-28T15:20:46.000Z
2021-09-30T15:08:41.000Z
import unittest import shorttext class TestFuzzyLogic(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_similarity(self): self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('debug', 'deubg'), 1) self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('intrdependence', 'interdpeendencae'), 3) self.assertEqual(shorttext.metrics.dynprog.longest_common_prefix('debug', 'debuag'), 4) def test_transposition(self): self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('independent', 'indeepndent'), 1) self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('providence', 'porvidecne'), 2) def test_insertion(self): self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('algorithm', 'algorithms'), 1) self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('algorithm', 'algoarithmm'), 2) def test_deletion(self): self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('algorithm', 'algoithm'), 1) self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('algorithm', 'algorith'), 1) self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('algorithm', 'algrihm'), 2) def test_correct(self): self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('python', 'python'), 0) self.assertEqual(shorttext.metrics.dynprog.damerau_levenshtein('sosad', 'sosad'), 0) def test_jaccard(self): self.assertAlmostEqual(shorttext.metrics.dynprog.similarity('diver', 'driver'), 5./6.) if __name__ == '__main__': unittest.main()
43.051282
112
0.729005
176
1,679
6.801136
0.318182
0.173768
0.249791
0.310777
0.604845
0.573099
0.573099
0.526316
0.280702
0
0
0.009689
0.139369
1,679
39
113
43.051282
0.818685
0
0
0.071429
0
0
0.132817
0
0
0
0
0
0.464286
1
0.285714
false
0.071429
0.071429
0
0.392857
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
1
0
0
0
0
0
5
94be36745556ba037d5746f2c4040fc32dba26d0
44
py
Python
pvanalytics/filtering.py
kanderso-nrel/pvanalytics
27ea3fdddaf0e885cce8b56438256b7e51e9bdea
[ "MIT", "BSD-3-Clause" ]
49
2020-02-19T19:18:27.000Z
2022-03-26T00:12:48.000Z
pvanalytics/filtering.py
kanderso-nrel/pvanalytics
27ea3fdddaf0e885cce8b56438256b7e51e9bdea
[ "MIT", "BSD-3-Clause" ]
96
2020-02-20T15:02:11.000Z
2022-03-22T22:51:15.000Z
pvanalytics/filtering.py
kanderso-nrel/pvanalytics
27ea3fdddaf0e885cce8b56438256b7e51e9bdea
[ "MIT", "BSD-3-Clause" ]
20
2020-02-18T21:40:13.000Z
2022-02-22T15:50:23.000Z
"""Functions to aid with data filtering."""
22
43
0.704545
6
44
5.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
44
1
44
44
0.815789
0.840909
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
94e794ff88b59f9bc0ddd3095928805762ffa8bd
221
py
Python
httoop/six.py
spaceone/httoop
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
[ "MIT" ]
13
2015-01-07T19:39:02.000Z
2021-07-12T21:09:28.000Z
httoop/six.py
spaceone/httoop
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
[ "MIT" ]
9
2015-06-14T11:37:26.000Z
2020-12-11T09:12:30.000Z
httoop/six.py
spaceone/httoop
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
[ "MIT" ]
10
2015-05-28T05:51:46.000Z
2021-12-29T20:36:15.000Z
from __future__ import absolute_import from six import PY2, PY3, int2byte, iterbytes, reraise, string_types, with_metaclass __all__ = ('with_metaclass', 'reraise', 'string_types', 'iterbytes', 'int2byte', 'PY2', 'PY3')
36.833333
94
0.755656
27
221
5.703704
0.555556
0.077922
0.233766
0
0
0
0
0
0
0
0
0.030612
0.113122
221
5
95
44.2
0.755102
0
0
0
0
0
0.253394
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
bf6e551d399a960b2e84c0eca81c74c8b6976882
134
py
Python
src/apps/list/admin.py
avibn/todovib
4052da86e27825c60be618e986c2c16a23002e3c
[ "MIT" ]
8
2021-06-27T10:39:36.000Z
2022-01-22T19:47:35.000Z
src/apps/list/admin.py
avibn/todovib
4052da86e27825c60be618e986c2c16a23002e3c
[ "MIT" ]
null
null
null
src/apps/list/admin.py
avibn/todovib
4052da86e27825c60be618e986c2c16a23002e3c
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import TodoList, ListItem admin.site.register(TodoList) admin.site.register(ListItem)
19.142857
38
0.820896
18
134
6.111111
0.555556
0.163636
0.309091
0
0
0
0
0
0
0
0
0
0.097015
134
6
39
22.333333
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bf7c2c3d9bf7d6d959e16f444603870c078b7661
30
py
Python
arxivabscraper/__init__.py
MohamedElashri/Arxiv-Aabstract-scraper
1f1087aab7488138268fd84afd0f5d60d74600a5
[ "MIT" ]
1
2021-01-03T09:30:28.000Z
2021-01-03T09:30:28.000Z
arxivabscraper/__init__.py
MohamedElashri/Arxiv-Aabstract-scraper
1f1087aab7488138268fd84afd0f5d60d74600a5
[ "MIT" ]
4
2021-01-15T22:10:28.000Z
2021-01-18T00:58:37.000Z
arxivabscraper/__init__.py
MohamedElashri/Arxiv-Aabstract-scraper
1f1087aab7488138268fd84afd0f5d60d74600a5
[ "MIT" ]
null
null
null
from .arxivabscraper import *
15
29
0.8
3
30
8
1
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
1
30
30
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bfb7216a41cfcb64493428d61981eace59c2334e
142
py
Python
cyclegan/optim.py
narumiruna/cyclegan-pytorch
11e28b7d9681e9cd40ecf9ee6d0fc93076d69365
[ "MIT" ]
1
2020-03-19T07:38:42.000Z
2020-03-19T07:38:42.000Z
cyclegan/optim.py
narumiruna/cyclegan-pytorch
11e28b7d9681e9cd40ecf9ee6d0fc93076d69365
[ "MIT" ]
null
null
null
cyclegan/optim.py
narumiruna/cyclegan-pytorch
11e28b7d9681e9cd40ecf9ee6d0fc93076d69365
[ "MIT" ]
null
null
null
from torch import optim from .utils import get_factory OptimFactory = get_factory(optim) SchedulerFactory = get_factory(optim.lr_scheduler)
20.285714
50
0.830986
19
142
6
0.578947
0.263158
0.263158
0
0
0
0
0
0
0
0
0
0.112676
142
6
51
23.666667
0.904762
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
44b21bde2e78575ffc1965805681d08d01792eed
128
py
Python
nnutils/loss/_base.py
STomoya/animeface
37b3cd26097d7874559d4c152e41e5712b7a1a42
[ "MIT" ]
61
2020-06-06T08:25:09.000Z
2022-03-28T13:30:10.000Z
nnutils/loss/_base.py
OrigamiXx/animeface
8724006df99ba7ef369e837d8294350ea733611b
[ "MIT" ]
13
2020-07-02T02:41:14.000Z
2021-05-09T14:24:58.000Z
nnutils/loss/_base.py
OrigamiXx/animeface
8724006df99ba7ef369e837d8294350ea733611b
[ "MIT" ]
8
2020-10-03T18:51:16.000Z
2022-02-05T18:18:01.000Z
class Loss: def __init__(self, return_all: bool=False ) -> None: self.return_all = return_all pass
16
36
0.585938
16
128
4.25
0.6875
0.397059
0.382353
0
0
0
0
0
0
0
0
0
0.328125
128
7
37
18.285714
0.790698
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0.166667
0
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
44d31efc406b4e46d09485a809165fad12bb5099
67
py
Python
src/pink/__init__.py
Pix-00/olea-v2_flask_1_
7ddfa83a7a2a7dfbe55b78da002c1193f38781c0
[ "Apache-2.0" ]
null
null
null
src/pink/__init__.py
Pix-00/olea-v2_flask_1_
7ddfa83a7a2a7dfbe55b78da002c1193f38781c0
[ "Apache-2.0" ]
null
null
null
src/pink/__init__.py
Pix-00/olea-v2_flask_1_
7ddfa83a7a2a7dfbe55b78da002c1193f38781c0
[ "Apache-2.0" ]
null
null
null
from flask import Blueprint pink_bp = Blueprint('pink', __name__)
16.75
37
0.776119
9
67
5.222222
0.777778
0.553191
0
0
0
0
0
0
0
0
0
0
0.134328
67
3
38
22.333333
0.810345
0
0
0
0
0
0.059701
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
1
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
5
44da4efc1ac780ca53a616e1150d729ad321aeaa
25
py
Python
hello-world.py
SergEndorfin/profile-rest-api
5f0fff9a1794022d06e16a9390d160c9ea41954a
[ "MIT" ]
null
null
null
hello-world.py
SergEndorfin/profile-rest-api
5f0fff9a1794022d06e16a9390d160c9ea41954a
[ "MIT" ]
null
null
null
hello-world.py
SergEndorfin/profile-rest-api
5f0fff9a1794022d06e16a9390d160c9ea41954a
[ "MIT" ]
null
null
null
print("hello WWWWWWWWW")
12.5
24
0.76
3
25
6.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.08
25
1
25
25
0.826087
0
0
0
0
0
0.6
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
78087a899b27a8b6853431499f2d608267dbfb98
49
py
Python
examples/single_file/template/{{file_name}}.py
faycheng/tpl
2e2dceb9f5259101084d77c7279d0655097954a5
[ "MIT" ]
null
null
null
examples/single_file/template/{{file_name}}.py
faycheng/tpl
2e2dceb9f5259101084d77c7279d0655097954a5
[ "MIT" ]
null
null
null
examples/single_file/template/{{file_name}}.py
faycheng/tpl
2e2dceb9f5259101084d77c7279d0655097954a5
[ "MIT" ]
null
null
null
print("{{message}}") print('{{prompt_message}}')
16.333333
27
0.632653
5
49
6
0.6
0
0
0
0
0
0
0
0
0
0
0
0.040816
49
2
28
24.5
0.638298
0
0
0
0
0
0.591837
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
7871f4d954c294553d1f0cce0c1dab4c162bc966
36
py
Python
page_xml_draw/__init__.py
GBN-DBP/page-xml-draw
ecf3123b385c58286649ba5b5bddc2a9d834daf8
[ "Apache-2.0" ]
6
2021-02-01T12:45:10.000Z
2021-02-03T14:14:28.000Z
page_xml_draw/__init__.py
GBN-DBP/page-xml-draw
ecf3123b385c58286649ba5b5bddc2a9d834daf8
[ "Apache-2.0" ]
1
2021-02-17T02:02:08.000Z
2021-02-17T02:02:08.000Z
page_xml_draw/__init__.py
GBN-DBP/page-xml-draw
ecf3123b385c58286649ba5b5bddc2a9d834daf8
[ "Apache-2.0" ]
null
null
null
from page_xml_draw.main import main
18
35
0.861111
7
36
4.142857
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.111111
36
1
36
36
0.90625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
78b7763328c4f256ddc9dd3c246858ce7a214481
60
py
Python
enthought/pyface/util/fix_introspect_bug.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
3
2016-12-09T06:05:18.000Z
2018-03-01T13:00:29.000Z
enthought/pyface/util/fix_introspect_bug.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
1
2020-12-02T00:51:32.000Z
2020-12-02T08:48:55.000Z
enthought/pyface/util/fix_introspect_bug.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
null
null
null
# proxy module from pyface.util.fix_introspect_bug import *
20
44
0.816667
9
60
5.222222
1
0
0
0
0
0
0
0
0
0
0
0
0.116667
60
2
45
30
0.886792
0.2
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
78cd73a76e210f870c0bd051ee3fa772c0396d86
107
py
Python
setuper desktop app/gui/functionpages/__init__.py
dragondjf/CloudSetuper
31aefe629f7f2d59d287981eda3e4e618ace9e9f
[ "MIT" ]
22
2015-01-08T12:54:20.000Z
2021-05-16T04:15:45.000Z
setuper desktop app/gui/functionpages/__init__.py
dragondjf/CloudSetuper
31aefe629f7f2d59d287981eda3e4e618ace9e9f
[ "MIT" ]
null
null
null
setuper desktop app/gui/functionpages/__init__.py
dragondjf/CloudSetuper
31aefe629f7f2d59d287981eda3e4e618ace9e9f
[ "MIT" ]
11
2015-01-25T01:26:45.000Z
2021-08-18T01:40:40.000Z
#!/usr/bin/python # -*- coding: utf-8 -*- from .homepage import HomePage from .aboutpage import AboutPage
17.833333
32
0.71028
14
107
5.428571
0.714286
0
0
0
0
0
0
0
0
0
0
0.01087
0.140187
107
5
33
21.4
0.815217
0.35514
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1566e96a27d46b234aeec48e773371402c23c7e8
4,525
py
Python
releasetools.py
FelipeSillva034/android_device_xiaomi_platina
638fa4edb2c81433c109d59ae48435877be29aeb
[ "Apache-2.0" ]
6
2021-08-15T13:04:56.000Z
2022-01-29T02:54:28.000Z
releasetools.py
FelipeSillva034/android_device_xiaomi_platina
638fa4edb2c81433c109d59ae48435877be29aeb
[ "Apache-2.0" ]
null
null
null
releasetools.py
FelipeSillva034/android_device_xiaomi_platina
638fa4edb2c81433c109d59ae48435877be29aeb
[ "Apache-2.0" ]
23
2019-05-31T17:37:56.000Z
2022-01-10T14:28:22.000Z
# Copyright (C) 2009 The Android Open Source Project # Copyright (c) 2011, The Linux Foundation. All rights reserved. # Copyright (C) 2017 The LineageOS Project # Copyright (C) 2021 The PixelExperience Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import hashlib import common import re def FullOTA_Assertions(info): OTA_UpdateFirmware(info) return def IncrementalOTA_Assertions(info): OTA_UpdateFirmware(info) return def OTA_UpdateFirmware(info): info.script.AppendExtra('ui_print("Flashing MIUI V12.0.3.QDTMIXM firmware...");') info.script.AppendExtra('package_extract_file("install/firmware-update/cmnlib64.mbn", "/dev/block/bootdevice/by-name/cmnlib64");') info.script.AppendExtra('package_extract_file("install/firmware-update/NON-HLOS.bin", "/dev/block/bootdevice/by-name/modem");') info.script.AppendExtra('package_extract_file("install/firmware-update/cmnlib.mbn", "/dev/block/bootdevice/by-name/cmnlib");') info.script.AppendExtra('package_extract_file("install/firmware-update/hyp.mbn", "/dev/block/bootdevice/by-name/hyp");') info.script.AppendExtra('package_extract_file("install/firmware-update/BTFM.bin", "/dev/block/bootdevice/by-name/bluetooth");') info.script.AppendExtra('package_extract_file("install/firmware-update/tz.mbn", "/dev/block/bootdevice/by-name/tz");') info.script.AppendExtra('package_extract_file("install/firmware-update/keymaster64.mbn", "/dev/block/bootdevice/by-name/keymaster");') info.script.AppendExtra('package_extract_file("install/firmware-update/logfs_ufs_8mb.bin", "/dev/block/bootdevice/by-name/logfs");') info.script.AppendExtra('package_extract_file("install/firmware-update/pmic.elf", "/dev/block/bootdevice/by-name/pmic");') info.script.AppendExtra('package_extract_file("install/firmware-update/abl.elf", "/dev/block/bootdevice/by-name/abl");') info.script.AppendExtra('package_extract_file("install/firmware-update/dspso.bin", "/dev/block/bootdevice/by-name/dsp");') info.script.AppendExtra('package_extract_file("install/firmware-update/devcfg.mbn", "/dev/block/bootdevice/by-name/devcfg");') info.script.AppendExtra('package_extract_file("install/firmware-update/storsec.mbn", "/dev/block/bootdevice/by-name/storsec");') info.script.AppendExtra('package_extract_file("install/firmware-update/xbl.elf", "/dev/block/bootdevice/by-name/xbl");') info.script.AppendExtra('package_extract_file("install/firmware-update/rpm.mbn", "/dev/block/bootdevice/by-name/rpm");') info.script.AppendExtra('package_extract_file("install/firmware-update/cmnlib64.mbn", "/dev/block/bootdevice/by-name/cmnlib64bak");') info.script.AppendExtra('package_extract_file("install/firmware-update/cmnlib.mbn", "/dev/block/bootdevice/by-name/cmnlibbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/hyp.mbn", "/dev/block/bootdevice/by-name/hypbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/tz.mbn", "/dev/block/bootdevice/by-name/tzbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/keymaster64.mbn", "/dev/block/bootdevice/by-name/keymasterbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/pmic.elf", "/dev/block/bootdevice/by-name/pmicbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/abl.elf", "/dev/block/bootdevice/by-name/ablbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/devcfg.mbn", "/dev/block/bootdevice/by-name/devcfgbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/storsec.mbn", "/dev/block/bootdevice/by-name/storsecbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/xbl.elf", "/dev/block/bootdevice/by-name/xblbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/rpm.mbn", "/dev/block/bootdevice/by-name/rpmbak");') info.script.AppendExtra('package_extract_file("install/firmware-update/logo.img", "/dev/block/bootdevice/by-name/logo");')
76.694915
139
0.779006
615
4,525
5.630894
0.234146
0.080855
0.169795
0.218308
0.735201
0.72827
0.697083
0.671672
0.671672
0.585042
0
0.008696
0.059669
4,525
58
140
78.017241
0.80517
0.16
0
0.105263
0
0.710526
0.717759
0.696353
0
0
0
0
0.052632
1
0.078947
false
0
0.078947
0
0.210526
0.026316
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
15815a1c3c4c498aad5b2b04b4f8a95ab6872a1f
156
py
Python
simstring/measure/__init__.py
icfly2/simstring-1
e4a57603967c5d138ce021cedc09d509f75e1933
[ "MIT" ]
null
null
null
simstring/measure/__init__.py
icfly2/simstring-1
e4a57603967c5d138ce021cedc09d509f75e1933
[ "MIT" ]
null
null
null
simstring/measure/__init__.py
icfly2/simstring-1
e4a57603967c5d138ce021cedc09d509f75e1933
[ "MIT" ]
null
null
null
from .cosine import CosineMeasure from .jaccard import JaccardMeasure from .dice import DiceMeasure from .overlap import OverlapMeasure, LeftOverlapMeasure
31.2
55
0.858974
17
156
7.882353
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.108974
156
4
56
39
0.964029
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
158e85e21fadf959a0a12d259f79e26eb6fc64ff
169
py
Python
common/item.py
animal-breeding-zoo/typhoon-weather
762e2971889668236afef9b966c67ae1a4bbf532
[ "BSD-3-Clause" ]
null
null
null
common/item.py
animal-breeding-zoo/typhoon-weather
762e2971889668236afef9b966c67ae1a4bbf532
[ "BSD-3-Clause" ]
null
null
null
common/item.py
animal-breeding-zoo/typhoon-weather
762e2971889668236afef9b966c67ae1a4bbf532
[ "BSD-3-Clause" ]
null
null
null
import ujson class Item(object): """ """ def __init__(self, **args): self.args = args def to_json(self): return ujson.dumps(self.args)
15.363636
37
0.56213
21
169
4.285714
0.619048
0.266667
0
0
0
0
0
0
0
0
0
0
0.295858
169
10
38
16.9
0.756303
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.166667
0.833333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
ec699bf6c970299b6d49a9cb1a930836dd004181
271
py
Python
libres/db/models/__init__.py
seantis/libres
995ca5d802a815fbbc8dd65fddcd169f10cca584
[ "MIT" ]
10
2017-10-30T20:41:52.000Z
2022-03-15T02:20:58.000Z
libres/db/models/__init__.py
seantis/libres
995ca5d802a815fbbc8dd65fddcd169f10cca584
[ "MIT" ]
14
2015-01-14T18:15:03.000Z
2021-08-31T06:37:09.000Z
libres/db/models/__init__.py
seantis/libres
995ca5d802a815fbbc8dd65fddcd169f10cca584
[ "MIT" ]
3
2015-01-08T00:31:44.000Z
2020-06-21T13:58:34.000Z
from libres.db.models.base import ORMBase from libres.db.models.allocation import Allocation from libres.db.models.reserved_slot import ReservedSlot from libres.db.models.reservation import Reservation __all__ = ['ORMBase', 'Allocation', 'ReservedSlot', 'Reservation']
33.875
66
0.815498
34
271
6.352941
0.382353
0.185185
0.222222
0.333333
0
0
0
0
0
0
0
0
0.088561
271
7
67
38.714286
0.874494
0
0
0
0
0
0.147601
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
ec6e46f7ca26670d019b874474879a3422607434
102
py
Python
tests/app2/awesome_people.py
kiwnix/persisting-theory
cca9c4470cf2b43f2d03b884c526dcb7d6073ee1
[ "BSD-3-Clause" ]
null
null
null
tests/app2/awesome_people.py
kiwnix/persisting-theory
cca9c4470cf2b43f2d03b884c526dcb7d6073ee1
[ "BSD-3-Clause" ]
null
null
null
tests/app2/awesome_people.py
kiwnix/persisting-theory
cca9c4470cf2b43f2d03b884c526dcb7d6073ee1
[ "BSD-3-Clause" ]
null
null
null
from ..test_registries import awesome_people @awesome_people.register class FrederikPeeters: pass
20.4
44
0.833333
12
102
6.833333
0.833333
0.317073
0
0
0
0
0
0
0
0
0
0
0.117647
102
5
45
20.4
0.911111
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.25
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
ec96d84aeced6f3d65c74faed8ac851fa2339546
78
py
Python
dryadic/features/data/__init__.py
ohsu-comp-bio/dryads
015f6d3186a5146809334e2490c072e675b22891
[ "MIT" ]
null
null
null
dryadic/features/data/__init__.py
ohsu-comp-bio/dryads
015f6d3186a5146809334e2490c072e675b22891
[ "MIT" ]
null
null
null
dryadic/features/data/__init__.py
ohsu-comp-bio/dryads
015f6d3186a5146809334e2490c072e675b22891
[ "MIT" ]
null
null
null
from .domains import get_protein_domains __all__ = ['get_protein_domains']
13
40
0.794872
10
78
5.4
0.6
0.37037
0.62963
0
0
0
0
0
0
0
0
0
0.128205
78
5
41
15.6
0.794118
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
eca7f4266eec693020fbfae7a43b506ca83ea044
69
py
Python
app/model/blog/__init__.py
LXG-Shadow/flaskweb
cc170386053e8eb83cc1dc27e38acdf4290b7427
[ "MIT" ]
null
null
null
app/model/blog/__init__.py
LXG-Shadow/flaskweb
cc170386053e8eb83cc1dc27e38acdf4290b7427
[ "MIT" ]
null
null
null
app/model/blog/__init__.py
LXG-Shadow/flaskweb
cc170386053e8eb83cc1dc27e38acdf4290b7427
[ "MIT" ]
null
null
null
from .article import article,articles from .blogInfo import blogInfo
23
37
0.84058
9
69
6.444444
0.555556
0
0
0
0
0
0
0
0
0
0
0
0.115942
69
3
38
23
0.95082
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
eccad9770010cbad99ffb0378946504b4e3c9a7b
75
py
Python
captain_hook/services/patreon/__init__.py
brantje/captain_hook
dde076a96afffa2235b7d8d01d47c4e61099c6b6
[ "Apache-2.0" ]
1
2017-01-07T16:22:05.000Z
2017-01-07T16:22:05.000Z
captain_hook/services/patreon/__init__.py
brantje/captain_hook
dde076a96afffa2235b7d8d01d47c4e61099c6b6
[ "Apache-2.0" ]
3
2017-02-27T00:34:19.000Z
2017-02-27T14:25:44.000Z
captain_hook/services/patreon/__init__.py
brantje/telegram-github-bot
dde076a96afffa2235b7d8d01d47c4e61099c6b6
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import from .patreon import PatreonService
25
38
0.88
9
75
6.777778
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.106667
75
2
39
37.5
0.910448
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ecd00ecf18a9c6e117af1524e00829a4baae4c8b
40
py
Python
run_dummy_script.py
schwettmann/visualvocab
d6ba80dc648576baf13efcb7c778e7e4384dcacb
[ "MIT" ]
8
2021-10-15T19:45:41.000Z
2022-02-20T20:52:11.000Z
run_dummy_script.py
schwettmann/visualvocab
d6ba80dc648576baf13efcb7c778e7e4384dcacb
[ "MIT" ]
null
null
null
run_dummy_script.py
schwettmann/visualvocab
d6ba80dc648576baf13efcb7c778e7e4384dcacb
[ "MIT" ]
1
2021-12-31T01:46:54.000Z
2021-12-31T01:46:54.000Z
"""A dummy script that does nothing."""
20
39
0.675
6
40
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.15
40
1
40
40
0.794118
0.825
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
ecdfc59f596383d482237847e4e727610b23497b
105
py
Python
alphafold/Model/__init__.py
YaoYinYing/OpenFold2
57fd3cfba0bc70a2ca4c6943ba00e1c4892c1945
[ "MIT" ]
null
null
null
alphafold/Model/__init__.py
YaoYinYing/OpenFold2
57fd3cfba0bc70a2ca4c6943ba00e1c4892c1945
[ "MIT" ]
null
null
null
alphafold/Model/__init__.py
YaoYinYing/OpenFold2
57fd3cfba0bc70a2ca4c6943ba00e1c4892c1945
[ "MIT" ]
null
null
null
from .alphafold import AlphaFold from .config import model_config from .features import AlphaFoldFeatures
35
39
0.866667
13
105
6.923077
0.538462
0
0
0
0
0
0
0
0
0
0
0
0.104762
105
3
39
35
0.957447
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
01e14da6c8873aaf447bcfb7e251dc98786f2707
125
py
Python
mail_app/admin.py
samir321-pixel/Django_Celery_With_React
9a6063e2f7397e001f896c2e64f3e4e165b38289
[ "Apache-2.0" ]
4
2021-03-08T17:08:37.000Z
2021-12-31T02:40:53.000Z
mail_app/admin.py
samir321-pixel/Django_Celery_With_React
9a6063e2f7397e001f896c2e64f3e4e165b38289
[ "Apache-2.0" ]
null
null
null
mail_app/admin.py
samir321-pixel/Django_Celery_With_React
9a6063e2f7397e001f896c2e64f3e4e165b38289
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import User_mail # Register your models here. admin.site.register(User_mail)
17.857143
32
0.808
19
125
5.210526
0.631579
0.161616
0
0
0
0
0
0
0
0
0
0
0.128
125
6
33
20.833333
0.908257
0.208
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bf266c60f109b86b91f55ae1d37c768f05d8077e
23
py
Python
callsignlookuptools/callook/__init__.py
miaowware/callsignlookuptools
d4fa15c82c37e48b9a3945b337170b05691a3824
[ "BSD-3-Clause" ]
null
null
null
callsignlookuptools/callook/__init__.py
miaowware/callsignlookuptools
d4fa15c82c37e48b9a3945b337170b05691a3824
[ "BSD-3-Clause" ]
4
2021-04-22T02:26:42.000Z
2021-12-28T20:04:40.000Z
callsignlookuptools/callook/__init__.py
miaowware/callsignlookuptools
d4fa15c82c37e48b9a3945b337170b05691a3824
[ "BSD-3-Clause" ]
null
null
null
# here to appease mypy
11.5
22
0.73913
4
23
4.25
1
0
0
0
0
0
0
0
0
0
0
0
0.217391
23
1
23
23
0.944444
0.869565
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
1753d76b755b8a7db74c7363063a8da0ec66231b
63
py
Python
packages/vaex-server/vaex/server/_version.py
mfouesneau/vaex
d0f38b4beee381744df7e02a37c329581e8bd70e
[ "MIT" ]
2
2020-12-01T09:41:54.000Z
2020-12-13T14:10:19.000Z
packages/vaex-server/vaex/server/_version.py
mfouesneau/vaex
d0f38b4beee381744df7e02a37c329581e8bd70e
[ "MIT" ]
null
null
null
packages/vaex-server/vaex/server/_version.py
mfouesneau/vaex
d0f38b4beee381744df7e02a37c329581e8bd70e
[ "MIT" ]
null
null
null
__version_tuple__ = (0, 3, 0, 'dev') __version__ = '0.3.0-dev'
21
36
0.634921
11
63
2.818182
0.454545
0.129032
0.193548
0.387097
0
0
0
0
0
0
0
0.111111
0.142857
63
2
37
31.5
0.462963
0
0
0
0
0
0.190476
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1757338067278ffbe992758a535cdecbe95efb35
112
py
Python
cwf2neo/__init__.py
sintax1/cwf2neo
25a8186798a6611f91e4b39052c3baa2023fb5b1
[ "Apache-2.0" ]
1
2021-06-02T11:44:00.000Z
2021-06-02T11:44:00.000Z
cwf2neo/__init__.py
sintax1/cwf2neo
25a8186798a6611f91e4b39052c3baa2023fb5b1
[ "Apache-2.0" ]
null
null
null
cwf2neo/__init__.py
sintax1/cwf2neo
25a8186798a6611f91e4b39052c3baa2023fb5b1
[ "Apache-2.0" ]
1
2021-11-27T00:33:28.000Z
2021-11-27T00:33:28.000Z
import logging from .cwf2neo import CWF # NOQA logging.getLogger(__name__).addHandler(logging.NullHandler())
18.666667
61
0.794643
13
112
6.538462
0.769231
0
0
0
0
0
0
0
0
0
0
0.01
0.107143
112
5
62
22.4
0.84
0.035714
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
176b5c2c5b490049dabb9104d5aed9c4578ffe66
549
py
Python
tests/test_text.py
johnsamuelwrites/ordia
bad7f725df91efb696290ae957eff18b54979d32
[ "Apache-2.0" ]
18
2018-09-27T13:15:20.000Z
2022-01-30T15:27:55.000Z
tests/test_text.py
johnsamuelwrites/ordia
bad7f725df91efb696290ae957eff18b54979d32
[ "Apache-2.0" ]
119
2018-05-28T10:53:58.000Z
2022-03-24T21:17:52.000Z
tests/test_text.py
johnsamuelwrites/ordia
bad7f725df91efb696290ae957eff18b54979d32
[ "Apache-2.0" ]
12
2018-06-13T16:55:59.000Z
2021-05-29T07:42:19.000Z
"""Test ordia text module.""" from ordia.text import text_to_sentences, text_to_words def test_text_to_sentences(): assert text_to_sentences('Hallo') == ['Hallo'] assert text_to_sentences('Hallo. World') == ['Hallo.', 'World'] assert text_to_sentences('Hallo.\nWorld') == ['Hallo.', 'World'] assert text_to_sentences('Hallo.\tWorld') == ['Hallo.', 'World'] assert text_to_sentences('and, e.g., hallo') == ['and, e.g., hallo'] def test_text_to_words(): assert text_to_words('e-mail to send') == ['e-mail', 'to', 'send']
32.294118
72
0.659381
78
549
4.358974
0.25641
0.176471
0.308824
0.308824
0.455882
0.302941
0.211765
0
0
0
0
0
0.143898
549
16
73
34.3125
0.723404
0.041894
0
0
0
0
0.267308
0
0
0
0
0
0.666667
1
0.222222
true
0
0.111111
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
5
17a54a9ca04aeb137f37ae26f817f835e4de00d3
40
py
Python
api/blueprints/users/views/__init__.py
mohamed040406/API
40ceb2b35271938d90e4309a6cdcf63ba0c17f0b
[ "MIT" ]
1
2021-05-01T02:25:27.000Z
2021-05-01T02:25:27.000Z
api/blueprints/users/views/__init__.py
mohamed040406/API
40ceb2b35271938d90e4309a6cdcf63ba0c17f0b
[ "MIT" ]
null
null
null
api/blueprints/users/views/__init__.py
mohamed040406/API
40ceb2b35271938d90e4309a6cdcf63ba0c17f0b
[ "MIT" ]
null
null
null
from . import users, roles # noqa F401
20
39
0.7
6
40
4.666667
1
0
0
0
0
0
0
0
0
0
0
0.096774
0.225
40
1
40
40
0.806452
0.225
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bd8eea1de06bfa74c4d65d32068d1fe604625a0c
109
py
Python
floq/__init__.py
jakelishman/floq
d0c83b239d0c9631182cea5d69c901793d7c67c6
[ "MIT" ]
2
2021-02-01T10:43:05.000Z
2021-08-10T14:51:45.000Z
floq/__init__.py
jakelishman/floq
d0c83b239d0c9631182cea5d69c901793d7c67c6
[ "MIT" ]
9
2018-09-10T10:46:04.000Z
2018-10-10T16:48:26.000Z
floq/__init__.py
jakelishman/floq
d0c83b239d0c9631182cea5d69c901793d7c67c6
[ "MIT" ]
null
null
null
from . import optimization, system, parallel, types from .system import System System.__module__ = __name__
21.8
51
0.798165
13
109
6.076923
0.615385
0
0
0
0
0
0
0
0
0
0
0
0.137615
109
4
52
27.25
0.840426
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bd9602e80e0ae846fd286c73768c4cec2250456c
61
py
Python
PVT/__init__.py
Justin900429/vision-transformer
e149092efbb83c166449944137db0ee5200f9325
[ "MIT" ]
1
2021-09-01T03:29:03.000Z
2021-09-01T03:29:03.000Z
PVT/__init__.py
Justin900429/vision-transformer
e149092efbb83c166449944137db0ee5200f9325
[ "MIT" ]
null
null
null
PVT/__init__.py
Justin900429/vision-transformer
e149092efbb83c166449944137db0ee5200f9325
[ "MIT" ]
null
null
null
from .pvt_block import TransformerBlock from .pvt import PVT
20.333333
39
0.836066
9
61
5.555556
0.555556
0.28
0
0
0
0
0
0
0
0
0
0
0.131148
61
2
40
30.5
0.943396
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bdbbb92e9bc08046daa14658a24644ac3b76114b
221
py
Python
tradercompany/binary_operators.py
yyamaguchi/tradercompany
42036f2fd8360f448e3a45fcf7a01331f7732fb8
[ "Apache-2.0" ]
null
null
null
tradercompany/binary_operators.py
yyamaguchi/tradercompany
42036f2fd8360f448e3a45fcf7a01331f7732fb8
[ "Apache-2.0" ]
1
2021-11-19T14:51:46.000Z
2021-11-19T14:51:46.000Z
tradercompany/binary_operators.py
yoshida-chem/tradercompany
42036f2fd8360f448e3a45fcf7a01331f7732fb8
[ "Apache-2.0" ]
null
null
null
def add(x, y): return x + y def diff(x, y): return x - y def multiple(x, y): return x * y def get_x(x,y): return x def get_y(x,y): return y def x_is_greater_than_y(x,y): return (x > y) * 1.0
11.631579
29
0.552036
48
221
2.416667
0.270833
0.172414
0.413793
0.387931
0.422414
0.336207
0
0
0
0
0
0.012987
0.303167
221
18
30
12.277778
0.74026
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
da0318458cd37551cfc693fe824bacff0eeb9c09
23
py
Python
src/goodEvening.py
AkshayIyer12/yourMessage
eab10ac95fb7fbfdd6f331255106f9c161ab2240
[ "MIT" ]
null
null
null
src/goodEvening.py
AkshayIyer12/yourMessage
eab10ac95fb7fbfdd6f331255106f9c161ab2240
[ "MIT" ]
null
null
null
src/goodEvening.py
AkshayIyer12/yourMessage
eab10ac95fb7fbfdd6f331255106f9c161ab2240
[ "MIT" ]
3
2017-10-16T03:22:17.000Z
2019-05-11T16:48:51.000Z
print("Good evening!")
11.5
22
0.695652
3
23
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.086957
23
1
23
23
0.761905
0
0
0
0
0
0.565217
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
da0d944e25ff30a96a17c9087ba04fa0333ca2f7
173
py
Python
status/admin.py
RyanBalfanz/Django-Bingo
0ba9f893c78fae1a5fb5eb45e43b36783d930e25
[ "BSD-3-Clause" ]
9
2015-11-04T16:22:05.000Z
2021-01-27T11:02:19.000Z
status/admin.py
osundiranayoade/Django-Bingo_builder
9ca7098c28fa0a4d77c3ea62877d44230940618d
[ "BSD-3-Clause" ]
1
2021-06-10T23:18:27.000Z
2021-06-10T23:18:27.000Z
status/admin.py
RyanBalfanz/Django-Bingo
0ba9f893c78fae1a5fb5eb45e43b36783d930e25
[ "BSD-3-Clause" ]
4
2015-07-13T19:22:09.000Z
2020-11-24T11:25:27.000Z
from django.contrib import admin from models import * import datetime admin.site.register(HostStats) admin.site.register(FilesystemStats) admin.site.register(NetworkStats)
21.625
36
0.83815
22
173
6.590909
0.545455
0.186207
0.351724
0
0
0
0
0
0
0
0
0
0.080925
173
7
37
24.714286
0.91195
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
da3be80e51a969ec274df7bfcb1adbe83cb323f0
121
py
Python
garminproj/connect/__init__.py
njkalish/garmin-proj
ecc791339e5edb9d5ceb19ec3e9c4dfc1eea641e
[ "MIT" ]
1
2020-05-06T09:09:29.000Z
2020-05-06T09:09:29.000Z
garminproj/connect/__init__.py
njkalish/garmin-proj
ecc791339e5edb9d5ceb19ec3e9c4dfc1eea641e
[ "MIT" ]
3
2020-04-20T09:49:21.000Z
2020-05-10T06:09:15.000Z
garminproj/connect/__init__.py
njkalish/garmin-proj
ecc791339e5edb9d5ceb19ec3e9c4dfc1eea641e
[ "MIT" ]
2
2020-04-20T04:22:53.000Z
2020-04-27T15:19:22.000Z
from .login import get_garmin_client, garmin_client from .activities import get_activity_tcx_data, get_latest_activities
40.333333
68
0.884298
18
121
5.5
0.611111
0.181818
0
0
0
0
0
0
0
0
0
0
0.082645
121
2
69
60.5
0.891892
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
da4e7c800b10d740a000e7869469956f79647996
10,575
py
Python
pyhopper/tests/test_canceler.py
pyhopper/pyhopper
3a5a449ba36c03ba365d33f900c3ecbb2d107e6b
[ "Apache-2.0" ]
6
2021-06-21T11:25:45.000Z
2022-03-12T15:16:06.000Z
pyhopper/tests/test_canceler.py
PyHopper/PyHopper
3a5a449ba36c03ba365d33f900c3ecbb2d107e6b
[ "Apache-2.0" ]
null
null
null
pyhopper/tests/test_canceler.py
PyHopper/PyHopper
3a5a449ba36c03ba365d33f900c3ecbb2d107e6b
[ "Apache-2.0" ]
1
2021-07-07T20:56:02.000Z
2021-07-07T20:56:02.000Z
# Copyright 2021 Mathias Lechner and the PyHopper team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import sys import time import pytest import pyhopper import pyhopper.cancelers def of_inc(param): return param["a"] + param["b"] def test_simple_inc_dec(): search = pyhopper.Search( { "a": pyhopper.float(lb=0, ub=10, init=2), "b": pyhopper.float(lb=0, ub=10, init=2), } ) r1 = search.run(of_inc, direction="max", max_steps=10) assert of_inc(r1) >= 4 search = pyhopper.Search( { "a": pyhopper.float(lb=0, ub=10, init=2), "b": pyhopper.float(lb=0, ub=10, init=2), } ) r1 = search.run(of_inc, direction="min", max_steps=10) assert of_inc(r1) <= 4 def of(param, x=None): for i in range(10): yield np.random.default_rng().normal() - np.square(param["lr"] - 4) * 10 def test_simple1(): search = pyhopper.Search( { "lr": pyhopper.float( lb=0, ub=10, ) } ) r1 = search.run(of, direction="max", max_steps=10) r1 = search.run( of, direction="max", max_steps=100, canceler=pyhopper.cancelers.TopKCanceler(5) ) r1 = search.run( of, direction="max", max_steps=200, n_jobs=5, canceler=pyhopper.cancelers.TopKCanceler(5), ) def test_simple2(): search = pyhopper.Search( { "lr": pyhopper.float( lb=0, ub=10, ) } ) with pytest.raises(ValueError): search = pyhopper.Search({"lr": pyhopper.float(-1)}) r1 = search.run(of, direction="max", max_steps=10) r1 = search.run( of, direction="max", max_steps=100, canceler=pyhopper.cancelers.QuantileCanceler(50), ) r1 = search.run( of, direction="max", max_steps=100, n_jobs=5, canceler=pyhopper.cancelers.QuantileCanceler(90), ) r1 = search.run( of, direction="max", max_steps=100, canceler=pyhopper.cancelers.QuantileCanceler(0.5), ) of_counter = 0 def of_cancel_first(param, x=None): global of_counter of_counter += 1 if of_counter < 4: raise pyhopper.CancelEvaluation() return np.random.default_rng().normal() def test_exception1(): search = pyhopper.Search( { "lr": pyhopper.float( lb=0, ub=10, ) } ) r1 = search.run(of_cancel_first, direction="max", max_steps=10) assert "lr" in r1.keys() def of_nan(param, x=None): global of_counter of_counter += 1 if of_counter in [0, 1, 4, 10, 50]: return np.NaN return np.random.default_rng().normal() def test_nan(): global of_counter of_counter = 0 search = pyhopper.Search( { "lr": pyhopper.float( lb=0, ub=10, ) } ) with pytest.raises(ValueError): of_counter = 0 r1 = search.run(of_nan, direction="max", max_steps=10) of_counter = 0 r1 = search.run(of_nan, direction="max", ignore_nans=True, max_steps=10) with pytest.raises(ValueError): of_counter = 0 r1 = search.run( of_nan, direction="max", ignore_nans=True, max_steps=100, canceler=pyhopper.cancelers.QuantileCanceler(0.5), ) with pytest.raises(ValueError): of_counter = 0 r1 = search.run(of_nan, direction="max", max_steps=200, n_jobs=5) of_counter = 0 r1 = search.run(of_nan, direction="max", ignore_nans=True, n_jobs=5, max_steps=300) of_counter = 0 assert "lr" in r1.keys() def of_nan2(param, x=None): r = np.random.default_rng().normal() if r > 0.5: return np.NaN return r def test_nan_simple(): global of_counter of_counter = 0 search = pyhopper.Search( { "lr": pyhopper.float( lb=0, ub=10, ) } ) of_counter = 0 r1 = search.run(of_nan, direction="max", ignore_nans=True, max_steps=10) of_counter = 0 r1 = search.run(of_nan2, direction="max", ignore_nans=True, n_jobs=5, max_steps=200) def test_topk(): canceller = pyhopper.cancelers.TopKCanceler(3) canceller.direction = "max" assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False canceller.append([12, 12, 12, 12, 10]) canceller.append([20, 20, 20, 20, 20]) canceller.append([0, 0, 0, 0, 0]) canceller.append([12, 12, 10, 12, 10]) canceller.append([12, 12, 12, 12, 10]) canceller.append([12, 12, 12, 12, 10]) assert canceller.should_cancel([11]) == True assert canceller.should_cancel([11, 11]) == True assert canceller.should_cancel([11, 11, 11]) == True assert canceller.should_cancel([10, 11, 11, 11]) == True assert canceller.should_cancel([11, 11, 11, 11, 0]) == False assert canceller.should_cancel([12]) == False assert canceller.should_cancel([12, 12]) == False assert canceller.should_cancel([12, 12, 12]) == False assert canceller.should_cancel([12, 12, 12, 12]) == False assert canceller.should_cancel([12, 12, 12, 12, 12]) == False canceller.append([13, 15, 20, 13, 30]) canceller.append([15, 13, 20, 15, 30]) canceller.append([20, 15, 20, 20, 30]) canceller.append([13, 20, 13, 20, 30]) assert canceller.should_cancel([12]) == True assert canceller.should_cancel([12, 12]) == True assert canceller.should_cancel([12, 12, 12]) == True assert canceller.should_cancel([12, 12, 12, 12]) == True assert canceller.should_cancel([12, 12, 12, 12, 12]) == False assert canceller.should_cancel([50]) == False assert canceller.should_cancel([50, 20]) == False assert canceller.should_cancel([50, 20, 20]) == False assert canceller.should_cancel([50, 20, 20, 20]) == False assert canceller.should_cancel([20, 20, 20, 20, 20]) == False def test_quantile2(): canceller = pyhopper.cancelers.QuantileCanceler(50) canceller.direction = "min" assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False canceller.append([5, 5, 5, 5, 5]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([0, 0, 0, 0, 0]) canceller.append([0, 0, 0, 0, 0]) canceller.append([0, 0, 0, 0, 0]) canceller.append([7, 7, 7, 7, 7]) assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0]) == False assert canceller.should_cancel([10, 10, 10, 10, 10]) == False assert canceller.should_cancel([10, 10, 10]) == True assert canceller.should_cancel([10]) == True assert canceller.should_cancel([10, 10]) == True def test_quantile(): canceller = pyhopper.cancelers.QuantileCanceler(50) canceller.direction = "max" assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0, 0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False assert canceller.should_cancel([0, 0, 0]) == False canceller.append([5, 5, 5, 5, 5]) canceller.append([20, 20, 50, 20, 20]) canceller.append([0, 0, 0, 0, 0]) canceller.append([7, 7, 7, 7, 7]) canceller.append([0, 0, 0, 0, 0]) canceller.append([12, 12, 10, 12, 10]) canceller.append([12, 12, 12, 12, 10]) canceller.append([12, 12, 12, 12, 10]) canceller.append([12, 12, 12, 12, 10]) assert canceller.should_cancel([8]) == True assert canceller.should_cancel([8, 10]) == True assert canceller.should_cancel([8, 10, 0]) == True assert canceller.should_cancel([8, 10, 0, 5]) == True assert canceller.should_cancel([8, 10, 0, 5, 9]) == False assert canceller.should_cancel([12]) == False assert canceller.should_cancel([12, 12]) == False assert canceller.should_cancel([8, 10, 10]) == False assert canceller.should_cancel([8, 10, 10, 12]) == False assert canceller.should_cancel([8, 10, 10, 10]) == True canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) canceller.append([20, 20, 50, 20, 20]) assert canceller.should_cancel([12]) == True assert canceller.should_cancel([12, 12]) == True assert canceller.should_cancel([8, 10, 30]) == True assert canceller.should_cancel([8, 10, 30, 12]) == True assert canceller.should_cancel([15, 15, 30, 15, 15]) == False canceller.append([50, 50]) canceller.append([50]) for i in range(20): canceller.append([50, 100, 50]) assert canceller.should_cancel([20]) == True assert canceller.should_cancel([20, 20]) == True assert canceller.should_cancel([100]) == False assert canceller.should_cancel([100, 100]) == False if __name__ == "__main__": test_nan_simple()
32.240854
88
0.608227
1,478
10,575
4.246279
0.104871
0.028362
0.220841
0.283939
0.809751
0.785532
0.749203
0.698215
0.635118
0.577438
0
0.0989
0.243688
10,575
328
89
32.240854
0.685796
0.054374
0
0.538745
0
0
0.009212
0
0
0
0
0
0.258303
1
0.051661
false
0
0.02214
0.00369
0.095941
0
0
0
0
null
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
da519ec1158d6430b018a0c25453ce2022f7121f
229
py
Python
otokon_form/post_mail/tasks.py
bilbeyt/otokon-e_form
3be5fc79b6bedc6cee81fc3b03cb86c9c19c2c12
[ "MIT" ]
null
null
null
otokon_form/post_mail/tasks.py
bilbeyt/otokon-e_form
3be5fc79b6bedc6cee81fc3b03cb86c9c19c2c12
[ "MIT" ]
null
null
null
otokon_form/post_mail/tasks.py
bilbeyt/otokon-e_form
3be5fc79b6bedc6cee81fc3b03cb86c9c19c2c12
[ "MIT" ]
null
null
null
from __future__ import absolute_import from celery import shared_task from django.core.mail import send_mail @shared_task def send(title, content, sender, to): send_mail(title, content, sender, (to,), fail_silently=False)
22.9
65
0.786026
34
229
5
0.558824
0.117647
0.211765
0.235294
0
0
0
0
0
0
0
0
0.131004
229
9
66
25.444444
0.854271
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.5
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
da72c417f102ec45f45c53f4c0c261eb344b5b5e
78,960
py
Python
starsim/nbspectra.py
dbarochlopez/starsim
add1c930e6506958dc57d2c815069462797a9dcb
[ "MIT" ]
null
null
null
starsim/nbspectra.py
dbarochlopez/starsim
add1c930e6506958dc57d2c815069462797a9dcb
[ "MIT" ]
null
null
null
starsim/nbspectra.py
dbarochlopez/starsim
add1c930e6506958dc57d2c815069462797a9dcb
[ "MIT" ]
null
null
null
#NUMBA ############################################ import numba as nb import numpy as np import math as m @nb.njit def dummy(): return None @nb.njit(cache=True,error_model='numpy') def fit_multiplicative_offset_jitter(x0,f,y,dy): off=x0[0] jit=x0[1] newerr=np.sqrt((dy)**2+jit**2)/off lnL=-0.5*np.sum(((y/off-f)/(newerr))**2.0+np.log(2.0*np.pi)+np.log(newerr**2)) return -lnL @nb.njit(cache=True,error_model='numpy') def fit_only_multiplicative_offset(x0,f,y,dy): off=x0 lnL=-0.5*np.sum(((y/off-f)/(dy/off))**2.0+np.log(2.0*np.pi)+np.log((dy/off)**2)) return -lnL @nb.njit(cache=True,error_model='numpy') def fit_linear_offset_jitter(x0,f,y,dy): off=x0[0] jit=x0[1] lnL=-0.5*np.sum(((y-off-f)/(np.sqrt(dy**2+jit**2)))**2.0+np.log(2.0*np.pi)+np.log(dy**2+jit**2)) return -lnL @nb.njit(cache=True,error_model='numpy') def fit_only_linear_offset(x0,f,y,dy): off=x0 lnL=-0.5*np.sum(((y-off-f)/(dy))**2.0+np.log(2.0*np.pi)+np.log(dy**2)) return -lnL @nb.njit(cache=True,error_model='numpy') def fit_only_jitter(x0,f,y,dy): jit=x0 lnL=-0.5*np.sum(((y-f)/(np.sqrt(dy**2+jit**2)))**2.0+np.log(2.0*np.pi)+np.log(dy**2+jit**2)) return -lnL @nb.njit(cache=True,error_model='numpy') def _coeff_mat(x, deg): mat_ = np.zeros(shape=(x.shape[0],deg + 1)) const = np.ones_like(x) mat_[:,0] = const mat_[:, 1] = x if deg > 1: for n in range(2, deg + 1): mat_[:, n] = x**n return mat_ @nb.njit(cache=True,error_model='numpy') def _fit_x(a, b): # linalg solves ax = b det_ = np.linalg.lstsq(a, b)[0] return det_ @nb.njit(cache=True,error_model='numpy') def fit_poly(x, y, deg,w): a = _coeff_mat(x, deg)*w.reshape(-1,1) p = _fit_x(a, y*w) # Reverse order so p[0] is coefficient of highest order return p[::-1] ##################################################### ############# UTILITIES ############################# ##################################################### @nb.njit(cache=True,error_model='numpy') def gaussian(x, amplitude, mean, stddev): return amplitude * np.exp(-(x-mean)**2/(2*stddev**2)) @nb.njit(cache=True,error_model='numpy') def gaussian2(x, amplitude, mean, stddev,C): return C + amplitude * np.exp(-(x-mean)**2/(2*stddev**2)) @nb.njit(cache=True,error_model='numpy') def normalize_spectra_nb(bins,wavelength,flux): x_bin=np.zeros(len(bins)-1) y_bin=np.zeros(len(bins)-1) for i in range(len(bins)-1): idxup = wavelength>bins[i] idxdown= wavelength<bins[i+1] idx=idxup & idxdown y_bin[i]=flux[idx].max() x_bin[i]=wavelength[idx][np.argmax(flux[idx])] #divide by 6th deg polynomial return x_bin, y_bin @nb.njit(cache=True,error_model='numpy') def interpolation_nb(xp,x,y,left=0,right=0): # Create result array yp=np.zeros(len(xp)) minx=x[0] maxx=x[-1] lastidx=1 for i,xi in enumerate(xp): if xi<minx: #extrapolate left yp[i]=left elif xi>maxx: #extrapolate right yp[i]=right else: for j in range(lastidx,len(x)): #per no fer el loop sobre tota la x, ja que esta sorted sempre comenso amb lanterior. if x[j]>xi: #Trobo el primer x mes gran que xj. llavors utilitzo x[j] i x[j-1] per interpolar yp[i]=y[j-1]+(xi-x[j-1])*(y[j]-y[j-1])/(x[j]-x[j-1]) lastidx=j break return yp @nb.njit(cache=True,error_model='numpy') def cross_correlation_nb(rv,wv,flx,wv_ref,flx_ref): #Compute the CCF against the reference spectrum. Can be optimized. ccf=np.zeros(len(rv)) #initialize ccf lenf=len(flx_ref) for i in range(len(rv)): wvshift=wv_ref*(1.0+rv[i]/2.99792458e8) #shift ref spectrum, in m/s # fshift=np.interp(wvshift,wv,flx) fshift = interpolation_nb(wvshift,wv,flx,left=0,right=0) ccf[i]=np.sum(flx_ref*fshift)/lenf #compute ccf return (ccf-np.min(ccf))/np.max((ccf-np.min(ccf))) @nb.njit(cache=True,error_model='numpy') def cross_correlation_mask(rv,wv,f,wvm,fm): ccf = np.zeros(len(rv)) lenm = len(wvm) wvmin=wv[0] for i in range(len(rv)): wvshift=wvm*(1.0+rv[i]/2.99792458e8) #shift ref spectrum, in m/s #for each mask line for j in range(lenm): #find wavelengths right and left of the line. wvline=wvshift[j] if wvline<3000.0: idxlf = int((wvline-wvmin)/0.1) elif wvline<4999.986: if wvmin<3000.0: idxlf = np.round(int((3000.0-wvmin)/0.1)) + int((wvline-3000.0)/0.006) else: idxlf = int((wvline-wvmin)/0.006) elif wvline<5000.0: if wvmin<3000.0: idxlf = np.round(int((3000.0-wvmin)/0.1)) + int((4999.986-3000.0)/0.006) + 1 else: idxlf = int((4999.986-wvmin)/0.006) + 1 elif wvline<10000.0: if wvmin<3000.0: idxlf = np.round(int((3000.0-wvmin)/0.1)) + int((4999.986-3000.0)/0.006) + 1 + int((wvline-5000.0)/0.01) elif wvmin<4999.986: idxlf = int((4999.986-wvmin)/0.006) + 1 + int((wvline-5000.0)/0.01) else: idxlf = int((wvline-wvmin)/0.01) elif wvline<15000.0: if wvmin<3000.0: idxlf = np.round(int((3000.0-wvmin)/0.1)) + int((4999.986-3000.0)/0.006) + 1 + int((10000.0-5000.0)/0.01) + int((wvline-10000.0)/0.02) elif wvmin<4999.986: idxlf = int((4999.986-wvmin)/0.006) + 1 + int((10000-5000.0)/0.01) + int((wvline-10000.0)/0.02) elif wvmin<10000.0: idxlf = int((10000.0-wvmin)/0.01) + int((wvline-10000.0)/0.02) else: idxlf = int((wvline-wvmin)/0.02) else: if wvmin<3000.0: idxlf = np.round(int((3000.0-wvmin)/0.1)) + int((4999.986-3000.0)/0.006) + 1 + int((10000.0-5000.0)/0.01) + int((15000.0-10000.0)/0.02) + int((wvline-15000.0)/0.03) elif wvmin<4999.986: idxlf = int((4999.986-wvmin)/0.006) + 1 + int((10000-5000.0)/0.01) + int((15000-10000.0)/0.02) + int((wvline-15000.0)/0.03) elif wvmin<10000.0: idxlf = int((10000.0-wvmin)/0.01) + int((15000-10000.0)/0.02) + int((wvline-15000.0)/0.03) elif wvmin<15000.0: idxlf = int((15000-wvmin)/0.02) + int((wvline-15000.0)/0.03) else: idxlf = int((wvline-wvmin)/0.03) idxrg = idxlf + 1 diffwv=wv[idxrg]-wv[idxlf] #pixel size in wavelength midpix=(wv[idxrg]+wv[idxlf])/2 #wavelength between the two pixels leftmask = wvline - diffwv/2 #left edge of the mask rightmask = wvline + diffwv/2 #right edge of the mask frac1 = (midpix - leftmask)/diffwv #fraction of the mask ovelapping the left pixel frac2 = (rightmask - midpix)/diffwv #fraction of the mask overlapping the right pixel midleft = (leftmask + midpix)/2 #central left overlapp midright = (rightmask + midpix)/2 #central wv right overlap f1 = f[idxlf] + (midleft-wv[idxlf])*(f[idxrg]-f[idxlf])/(diffwv) f2 = f[idxlf] + (midright-wv[idxlf])*(f[idxrg]-f[idxlf])/(diffwv) ccf[i]=ccf[i] - f1*fm[j]*frac1 - f2*fm[j]*frac2 return (ccf-np.min(ccf))/np.max((ccf-np.min(ccf))) @nb.njit(cache=True,error_model='numpy') def weight_mask(wvi,wvf,o_weight,wvm,fm): j=0 maxj=len(wvi) for i in range(len(wvm)): if wvm[i]<wvi[j]: fm[i]=0.0 elif wvm[i]>=wvi[j] and wvm[i]<=wvf[j]: fm[i]=fm[i]*o_weight[j] elif wvm[i]>wvf[j]: j+=1 if j>=maxj: fm[i]=0.0 break else: i-=1 return wvm, fm @nb.njit(cache=True,error_model='numpy') def polar2colatitude_nb(r,a,i): '''Enters the polars coordinates and the inclination i (with respect to the north pole, i=0 makes transits, 90-(inclination defined in exoplanets)) Returns the colatitude in the star (90-latitude) ''' a=a*m.pi/180. i=-i #negative to make the rotation toward the observer. theta=m.acos(r*m.sin(a)*m.cos(i)-m.sin(i)*m.sqrt(1-r*r)) return theta @nb.njit(cache=True,error_model='numpy') def polar2longitude_nb(r,a,i): '''Enters the polars coordinates and the inclination i (with respect to the north pole, i=0 makes transits, 90-(inclination defined in exoplanets)) Returns the longitude in the star (from -90 to 90) ''' a=a*m.pi/180. i=-i #negative to make the rotation toward the observer. h=m.sqrt((1.-(r*m.cos(a))**2.)/(m.tan(i)**2.+1.)) #heigh of the terminator (long=pi/2) if r*np.sin(a)>h: phi=m.asin(-r*m.cos(a)/m.sqrt(1.-(r*m.sin(a)*m.cos(i)-m.sin(i)*m.sqrt(1.-r*r))**2.))+m.pi #to correct for mirroring of longitudes in the terminator else: phi=m.asin(r*m.cos(a)/m.sqrt(1.-(r*m.sin(a)*m.cos(i)-m.sin(i)*m.sqrt(1.-r*r))**2.)) return phi @nb.njit(cache=True,error_model='numpy') def speed_bisector_nb(rv,ccf,integrated_bis): ''' Fit the bisector of the CCF with a 5th deg polynomial ''' idxmax=ccf.argmax() maxccf=ccf[idxmax] maxrv=rv[idxmax] xnew = rv ynew = ccf cutleft=0 cutright=len(ynew)-1 # if not integrated_bis: #cut the CCF at the minimum of the wings only for reference CCF, if not there are errors. for i in range(len(ynew)): if xnew[i]>maxrv: if ynew[i]>ynew[i-1]: cutright=i break for i in range(len(ynew)): if xnew[-1-i]<maxrv: if ynew[-1-i]>ynew[-i]: cutleft=len(ynew)-i break xnew=xnew[cutleft:cutright] ynew=ynew[cutleft:cutright] minright=np.min(ynew[xnew>maxrv]) minleft=np.min(ynew[xnew<maxrv]) minccf=np.max(np.array([minright,minleft])) ybis=np.linspace(minccf+0.01*(maxccf-minccf),0.999*maxccf,50) #from 5% to maximum xbis=np.zeros(len(ybis)) for i in range(len(ybis)): for j in range(len(ynew)-1): if ynew[j]<ybis[i] and ynew[j+1]>ybis[i] and xnew[j]<maxrv: rv1=xnew[j]+(xnew[j+1]-xnew[j])*(ybis[i]-ynew[j])/(ynew[j+1]-ynew[j]) if ynew[j]>ybis[i] and ynew[j+1]<ybis[i] and xnew[j+1]>maxrv: rv2=xnew[j]+(xnew[j+1]-xnew[j])*(ybis[i]-ynew[j])/(ynew[j+1]-ynew[j]) xbis[i]=(rv1+rv2)/2.0 #bisector # xbis[-1]=maxrv #at the top should be max RV return cutleft,cutright,xbis,ybis @nb.njit(cache=True,error_model='numpy') def limb_darkening_law(LD_law,LD1,LD2,amu): if LD_law == 'linear': mu=1-LD1*(1-amu) elif LD_law == 'quadratic': a=2*np.sqrt(LD1)*LD2 b=np.sqrt(LD1)*(1-2*LD2) mu=1-a*(1-amu)-b*(1-amu)**2 elif LD_law == 'sqrt': a=np.sqrt(LD1)*(1-2*LD2) b=2*np.sqrt(LD1)*LD2 mu=1-a*(1-amu)-b*(1-np.sqrt(amu)) elif LD_law == 'log': a=LD2*LD1**2+1 b=LD1**2-1 mu=1-a*(1-amu)-b*amu*(1-np.log(amu)) else: print('LD law not valid.') return mu @nb.njit(cache=True,error_model='numpy') def compute_spot_position(t,spot_map,ref_time,Prot,diff_rot,Revo,Q): pos=np.zeros((len(spot_map),4)) for i in range(len(spot_map)): tini = spot_map[i][0] #time of spot apparence dur = spot_map[i][1] #duration of the spot tfin = tini + dur #final time of spot colat = spot_map[i][2] #colatitude lat = 90 - colat #latitude longi = spot_map[i][3] #longitude Rcoef = spot_map[i][4:7] #coefficients for the evolution od the radius. Depends on the desired law. pht = longi + (t-ref_time)/Prot%1*360 #update longitude adding diff rotation phsr= pht + (t-ref_time)*diff_rot*(1.698*m.sin(np.deg2rad(lat))**2+2.346*m.sin(np.deg2rad(lat))**4) if Revo == 'constant': if t>=tini and t<=tfin: rad=Rcoef[0] else: rad=0.0 elif Revo == 'linear': if t>=tini and t<=tfin: rad=Rcoef[0]+(t-tini)*(Rcoef[1]-Rcoef[0])/dur else: rad=0.0 elif Revo == 'quadratic': if t>=tini and t<=tfin: rad=-4*Rcoef[0]/(dur*(1-2*tini))*(t-tini)*(t-tini-dur) else: rad=0.0 else: print('Spot evolution law not implemented yet. Only constant and linear are implemented.') if Q!=0.0: #to speed up the code when no fac are present rad_fac=np.deg2rad(rad)*m.sqrt(1+Q) else: rad_fac=0.0 pos[i]=np.array([np.deg2rad(colat), np.deg2rad(phsr), np.deg2rad(rad), rad_fac]) #return position and radii of spots at t in radians. return pos @nb.njit(cache=True,error_model='numpy') def compute_planet_pos(t,esinw,ecosw,T0p,Pp,rad_pl,b,a,alp): if(esinw==0 and ecosw==0): ecc=0 omega=0 else: ecc=m.sqrt(esinw**2+ecosw**2) omega=m.atan2(esinw,ecosw) t_peri = Ttrans_2_Tperi(T0p,Pp, ecc, omega) sinf,cosf=true_anomaly(t,Pp,ecc,t_peri) cosftrueomega=cosf*m.cos(omega+m.pi/2)-sinf*m.sin(omega+np.pi/2) #cos(f+w)=cos(f)*cos(w)-sin(f)*sin(w) sinftrueomega=cosf*m.sin(omega+m.pi/2)+sinf*m.cos(omega+np.pi/2) #sin(f+w)=cos(f)*sin(w)+sin(f)*cos(w) if cosftrueomega>0.0: return np.array([1+rad_pl*2, 0.0, rad_pl]) #avoid secondary transits cosi = (b/a)*(1+esinw)/(1-ecc**2) #cosine of planet inclination (i=90 is transit) rpl=a*(1-ecc**2)/(1+ecc*cosf) xpl=rpl*(-m.cos(alp)*sinftrueomega-m.sin(alp)*cosftrueomega*cosi) ypl=rpl*(m.sin(alp)*sinftrueomega-m.cos(alp)*cosftrueomega*cosi) rhopl=m.sqrt(ypl**2+xpl**2) thpl=m.atan2(ypl,xpl) pos=np.array([rhopl, thpl, rad_pl],dtype=np.float64) #rho, theta, and radii (in Rstar) of the planet return pos @nb.njit(cache=True,error_model='numpy') def Ttrans_2_Tperi(T0, P, e, w): f = m.pi/2 - w E = 2 * m.atan(m.tan(f/2.) * m.sqrt((1.-e)/(1.+e))) # eccentric anomaly Tp = T0 - P/(2*np.pi) * (E - e*m.sin(E)) # time of periastron return Tp @nb.njit(cache=True,error_model='numpy') def true_anomaly(x,period,ecc,tperi): fmean=2.0*m.pi*(x-tperi)/period #Solve by Newton's method x(n+1)=x(n)-f(x(n))/f'(x(n)) fecc=fmean diff=1.0 while(diff>1.0E-6): fecc_0=fecc fecc=fecc_0-(fecc_0-ecc*m.sin(fecc_0)-fmean)/(1.0-ecc*m.cos(fecc_0)) diff=m.fabs(fecc-fecc_0) sinf=m.sqrt(1.0-ecc*ecc)*m.sin(fecc)/(1.0-ecc*m.cos(fecc)) cosf=(m.cos(fecc)-ecc)/(1.0-ecc*m.cos(fecc)) return sinf, cosf ######################################################################################## ######################################################################################## # SPECTROSCOPY FUNCTIONS FOR SPHERICAL GRID # ######################################################################################## ######################################################################################## #with this the x and y width of each grid is the same, thus the area of the grids is similar in all the sphere, avoiding an over/under sampling of the poles/center @nb.njit(cache=True,error_model='numpy') def generate_grid_coordinates_nb(N): Nt=2*N-1 #N is number og concentric rings. Nt is counting them two times minus the center one. width=180.0/(2*N-1) #width of one grid element. centres=np.append(0,np.linspace(width,90-width/2,N-1)) #latitudes of the concentric grids anglesout=np.linspace(0,360-width,2*Nt) #longitudes of the grid in the equator. The pole ofthe grid faces the observer. radi=np.sin(np.pi*centres/180) #projected polar radius of the ring. amu=np.cos(np.pi*centres/180) #amus ts=[0.0] #central grid alphas=[0.0] #central grid area=[2.0*np.pi*(1.0-np.cos(width*np.pi/360.0))] #area central element parea=[np.pi*np.sin(width*np.pi/360.0)**2] Ngrid_in_ring=[1] for i in range(1,len(amu)): #for each ring except firs Nang=int(round(len(anglesout)*(radi[i]))) #Number of longitudes to have grids of same width w=360/Nang #width i angles Ngrid_in_ring.append(Nang) angles=np.linspace(0,360-w,Nang) area.append(radi[i]*width*w*np.pi*np.pi/(180*180)) #area of each grid parea.append(amu[i]*area[-1]) #PROJ. AREA OF THE GRID for j in range(Nang): ts.append(centres[i]) #latitude alphas.append(angles[j]) #longitude alphas=np.array(alphas) #longitude of grid (pole faces observer) ts=np.array(ts) #colatitude of grid Ngrids=len(ts) #number of grids rs = np.sin(np.pi*ts/180) #projected polar radius of grid xs = np.cos(np.pi*ts/180) #grid elements in cartesian coordinates. Note that pole faces the observer. ys = rs*np.sin(np.pi*alphas/180) zs = -rs*np.cos(np.pi*alphas/180) return Ngrids,Ngrid_in_ring, centres, amu, rs, alphas, xs, ys, zs, area, parea @nb.njit(cache=True,error_model='numpy') def loop_compute_immaculate_nb(N,Ngrid_in_ring,ccf_tot,rvel,rv,rvs_ring,ccf_ring): #CCF of each pixel, adding doppler and interpolating iteration=0 #Compute the position of the grid projected on the sphere and its radial velocity. for i in range(0,N): #Loop for each ring. for j in range(Ngrid_in_ring[i]): #loop for each grid in the ring ccf_tot[iteration,:]=ccf_tot[iteration,:]+interpolation_nb(rv,rvs_ring[i,:] + rvel[iteration],ccf_ring[i,:],ccf_ring[i,0],ccf_ring[i,-1]) iteration=iteration+1 return ccf_tot @nb.njit(cache=True,error_model='numpy') def loop_generate_rotating_nb(N,Ngrid_in_ring,pare,amu,spot_pos,vec_grid,vec_spot,simulate_planet,planet_pos,ccf_ph,ccf_sp,ccf_fc,ccf_ph_tot,vis): #define things width=np.pi/(2*N-1) #width of one grid element, in radiants ccf_tot = ccf_ph_tot vis_spots_idx=[] for i in range(len(vis)-1): if vis[i]==1.0: vis_spots_idx.append(i) ###################### CENTRAL GRID ############################### #Central grid is different since it is a circle. initialize values. #################################################################### dsp=0.0 #fraction covered by each spot dfc=0.0 asp=0.0 #fraction covered by all spots afc=0.0 apl=0.0 iteration = 0 for l in vis_spots_idx: #for each spot if spot_pos[l][2]==0.0: continue dist=m.acos(np.dot(vec_grid[iteration],vec_spot[l])) #compute the distance to the grid if dist>(width/2+spot_pos[l][2]): dsp=0.0 else: if (width/2)<spot_pos[l][2]: #if the spot can cover completely the grid, two cases: if dist<=spot_pos[l][2]-(width/2): #grid completely covered dsp=1.0 else: #grid partially covered dsp=-(dist-spot_pos[l][2]-width/2)/width else: #the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][2]): #all the spot is inside the grid dsp=(2*spot_pos[l][2]/width)**2 else: #grid partially covered dsp=-2*spot_pos[l][2]*(dist-width/2-spot_pos[l][2])/width**2 asp+=dsp #FACULA if spot_pos[l][3]==0.0: #if radius=0, there is no facula, jump to next spot with continue continue if dist>(width/2+spot_pos[l][3]): dfc=0.0 else: if (width/2)<spot_pos[l][3]: #if the spot can cover completely the grid, two cases: if dist<=spot_pos[l][3]-(width/2): #grid completely covered dfc=1.0 - dsp else: #grid partially covered dfc=-(dist-spot_pos[l][3]-width/2)/width - dsp else: #if the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][3]): #all the spot is inside the grid dfc=(2*spot_pos[l][3]/width)**2 - dsp else: #grid partially covered dfc =-2*spot_pos[l][3]*(dist-width/2-spot_pos[l][3])/width**2 - dsp afc+=dfc #PLANET if simulate_planet: if vis[-1]==1.0: dist=m.sqrt((planet_pos[0]*m.cos(planet_pos[1]) - vec_grid[iteration,1])**2 + ( planet_pos[0]*m.sin(planet_pos[1]) - vec_grid[iteration,2] )**2) #grid-planet distance width2=2*m.sin(width/2) if dist>width2/2+planet_pos[2]: apl=0.0 elif dist<planet_pos[2]-width2/2: apl=1.0 else: apl=-(dist-planet_pos[2]-width2/2)/width2 if afc>1.0: afc=1.0 if asp>1.0: asp=1.0 afc=0.0 if apl>0.0: asp=asp*(1-apl) afc=afc*(1-apl) aph=1-asp-afc-apl #add the corresponding ccf to the total CCF ccf_tot = ccf_tot - (1-aph)*ccf_ph[iteration] + asp*ccf_sp[iteration] + afc*ccf_fc[iteration] Aph=aph*pare[0] Asp=asp*pare[0] Afc=afc*pare[0] Apl=apl*pare[0] typ=[[aph,asp,afc,apl]] ############### OTHER GRIDS ####################### # NOW DO THE SAME FOR THE REST OF GRIDS ################################################### for i in range(1,N): #Loop for each ring. for j in range(Ngrid_in_ring[i]): #Loop for each grid iteration+=1 dsp=0.0 #fraction covered by each spot dfc=0.0 asp=0.0 #fraction covered by all spots afc=0.0 apl=0.0 for l in vis_spots_idx: if spot_pos[l][2]==0.0: #if radius=0, there is no spot, jump to next spot with continue continue dist=m.acos(np.dot(vec_grid[iteration],vec_spot[l])) #distance between spot centre and grid,multiplying two unit vectors #SPOT if dist>(width/2+spot_pos[l][2]): #grid not covered dsp=0.0 else: if (width/m.sqrt(2))<spot_pos[l][2]: #if the spot can cover completely the grid, two cases: if dist<=(m.sqrt(spot_pos[l][2]**2-(width/2)**2)-width/2): #grid completely covered dsp=1.0 else: #grid partially covered dsp=-(dist-spot_pos[l][2]-width/2)/(width+spot_pos[l][2]-m.sqrt(spot_pos[l][2]**2-(width/2)**2)) elif (width/2)>spot_pos[l][2]: #if the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][2]): #all the spot is inside the grid dsp=(np.pi/4)*(2*spot_pos[l][2]/width)**2 else: #grid partially covered dsp=(np.pi/4)*((2*spot_pos[l][2]/width)**2-(2*spot_pos[l][2]/width**2)*(dist-width/2+spot_pos[l][2])) else: #if the spot is larger than the grid but not enough to cover it, grid partially covered by the spot A1=(width/2)*m.sqrt(spot_pos[l][2]**2-(width/2)**2) A2=(spot_pos[l][2]**2/2)*(m.pi/2-2*m.asin(m.sqrt(spot_pos[l][2]**2-(width/2)**2)/spot_pos[l][2])) Ar=4*(A1+A2)/width**2 dsp=-Ar*(dist-width/2-spot_pos[l][2])/(width/2+spot_pos[l][2]) asp+=dsp #FACULA if spot_pos[l][3]==0.0: #if radius=0, there is no facula, jump to next spot with continue continue if dist>(width/2+spot_pos[l][3]): #grid not covered by faculae dfc=0.0 else: if (width/m.sqrt(2))<spot_pos[l][3]: #if the spot can cover completely the grid, two cases: if dist<=(m.sqrt(spot_pos[l][3]**2-(width/2)**2)-width/2): #grid completely covered dfc=1.0-dsp #subtract spot else: #grid partially covered dfc=-(dist-spot_pos[l][3]-width/2)/(width+spot_pos[l][3]-m.sqrt(spot_pos[l][3]**2-(width/2)**2))-dsp elif (width/2)>spot_pos[l][3]: #if the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][3]): #all the spot is inside the grid dfc=(np.pi/4)*(2*spot_pos[l][3]/width)**2-dsp else: #grid partially covered dfc=(np.pi/4)*((2*spot_pos[l][3]/width)**2-(2*spot_pos[l][3]/width**2)*(dist-width/2+spot_pos[l][3]))-dsp else: #if the spot is larger than the grid but not enough to cover it, grid partially covered by the spot A1=(width/2)*m.sqrt(spot_pos[l][3]**2-(width/2)**2) A2=(spot_pos[l][3]**2/2)*(m.pi/2-2*m.asin(m.sqrt(spot_pos[l][3]**2-(width/2)**2)/spot_pos[l][3])) Ar=4*(A1+A2)/width**2 dfc=-Ar*(dist-width/2-spot_pos[l][3])/(width/2+spot_pos[l][3])-dsp afc+=dfc #PLANET if simulate_planet: if vis[-1]==1.0: dist=m.sqrt((planet_pos[0]*m.cos(planet_pos[1]) - vec_grid[iteration,1])**2 + ( planet_pos[0]*m.sin(planet_pos[1]) - vec_grid[iteration,2] )**2) #grid-planet distance width2=amu[i]*width if dist>width2/2+planet_pos[2]: apl=0.0 elif dist<planet_pos[2]-width2/2: apl=1.0 else: apl=-(dist-planet_pos[2]-width2/2)/width2 if afc>1.0: afc=1.0 if asp>1.0: asp=1.0 afc=0.0 if apl>0.0: asp=asp*(1-apl) afc=afc*(1-apl) aph=1-asp-afc-apl #add the corresponding ccf to the total CCF ccf_tot = ccf_tot - (1-aph)*ccf_ph[iteration] + asp*ccf_sp[iteration] + afc*ccf_fc[iteration] Aph=Aph+aph*pare[i] Asp=Asp+asp*pare[i] Afc=Afc+afc*pare[i] Apl=Apl+apl*pare[i] typ.append([aph,asp,afc,apl]) return ccf_tot,typ, Aph, Asp, Afc, Apl @nb.njit(cache=True,error_model='numpy') def loop_generate_rotating_lc_nb(N,Ngrid_in_ring,pare,amu,spot_pos,vec_grid,vec_spot,simulate_planet,planet_pos,bph,bsp,bfc,flxph,vis): #define things width=np.pi/(2*N-1) #width of one grid element, in radiants flux = flxph vis_spots_idx=[] for i in range(len(vis)-1): if vis[i]==1.0: vis_spots_idx.append(i) ###################### CENTRAL GRID ############################### #Central grid is different since it is a circle. initialize values. #################################################################### dsp=0.0 #fraction covered by each spot dfc=0.0 asp=0.0 #fraction covered by all spots afc=0.0 apl=0.0 iteration = 0 for l in vis_spots_idx: #for each spot if spot_pos[l][2]==0.0: continue dist=m.acos(np.dot(vec_grid[iteration],vec_spot[l])) #compute the distance to the grid if dist>(width/2+spot_pos[l][2]): dsp=0.0 else: if (width/2)<spot_pos[l][2]: #if the spot can cover completely the grid, two cases: if dist<=spot_pos[l][2]-(width/2): #grid completely covered dsp=1.0 else: #grid partially covered dsp=-(dist-spot_pos[l][2]-width/2)/width else: #the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][2]): #all the spot is inside the grid dsp=(2*spot_pos[l][2]/width)**2 else: #grid partially covered dsp=-2*spot_pos[l][2]*(dist-width/2-spot_pos[l][2])/width**2 asp+=dsp #FACULA if spot_pos[l][3]==0.0: #if radius=0, there is no facula, jump to next spot with continue continue if dist>(width/2+spot_pos[l][3]): dfc=0.0 else: if (width/2)<spot_pos[l][3]: #if the spot can cover completely the grid, two cases: if dist<=spot_pos[l][3]-(width/2): #grid completely covered dfc=1.0 - dsp else: #grid partially covered dfc=-(dist-spot_pos[l][3]-width/2)/width - dsp else: #if the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][3]): #all the spot is inside the grid dfc=(2*spot_pos[l][3]/width)**2 - dsp else: #grid partially covered dfc =-2*spot_pos[l][3]*(dist-width/2-spot_pos[l][3])/width**2 - dsp afc+=dfc #PLANET if simulate_planet: if vis[-1]==1.0: dist=m.sqrt((planet_pos[0]*m.cos(planet_pos[1]) - vec_grid[iteration,1])**2 + ( planet_pos[0]*m.sin(planet_pos[1]) - vec_grid[iteration,2] )**2) #grid-planet distance width2=2*m.sin(width/2) if dist>width2/2+planet_pos[2]: apl=0.0 elif dist<planet_pos[2]-width2/2: apl=1.0 else: apl=-(dist-planet_pos[2]-width2/2)/width2 if afc>1.0: afc=1.0 if asp>1.0: asp=1.0 afc=0.0 if apl>0.0: asp=asp*(1-apl) afc=afc*(1-apl) aph=1-asp-afc-apl #add the corresponding flux to the total flux flux = flux - (1-aph)*bph[i]+asp*bsp[i]+bfc[i]*afc Aph=aph*pare[0] Asp=asp*pare[0] Afc=afc*pare[0] Apl=apl*pare[0] typ=[[aph,asp,afc,apl]] ############### OTHER GRIDS ####################### # NOW DO THE SAME FOR THE REST OF GRIDS ################################################### for i in range(1,N): #Loop for each ring. for j in range(Ngrid_in_ring[i]): #Loop for each grid iteration+=1 dsp=0.0 #fraction covered by each spot dfc=0.0 asp=0.0 #fraction covered by all spots afc=0.0 apl=0.0 for l in vis_spots_idx: if spot_pos[l][2]==0.0: #if radius=0, there is no spot, jump to next spot with continue continue dist=m.acos(np.dot(vec_grid[iteration],vec_spot[l])) #distance between spot centre and grid,multiplying two unit vectors #SPOT if dist>(width/2+spot_pos[l][2]): #grid not covered dsp=0.0 else: if (width/m.sqrt(2))<spot_pos[l][2]: #if the spot can cover completely the grid, two cases: if dist<=(m.sqrt(spot_pos[l][2]**2-(width/2)**2)-width/2): #grid completely covered dsp=1.0 else: #grid partially covered dsp=-(dist-spot_pos[l][2]-width/2)/(width+spot_pos[l][2]-m.sqrt(spot_pos[l][2]**2-(width/2)**2)) elif (width/2)>spot_pos[l][2]: #if the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][2]): #all the spot is inside the grid dsp=(np.pi/4)*(2*spot_pos[l][2]/width)**2 else: #grid partially covered dsp=(np.pi/4)*((2*spot_pos[l][2]/width)**2-(2*spot_pos[l][2]/width**2)*(dist-width/2+spot_pos[l][2])) else: #if the spot is larger than the grid but not enough to cover it, grid partially covered by the spot A1=(width/2)*m.sqrt(spot_pos[l][2]**2-(width/2)**2) A2=(spot_pos[l][2]**2/2)*(m.pi/2-2*m.asin(m.sqrt(spot_pos[l][2]**2-(width/2)**2)/spot_pos[l][2])) Ar=4*(A1+A2)/width**2 dsp=-Ar*(dist-width/2-spot_pos[l][2])/(width/2+spot_pos[l][2]) asp+=dsp #FACULA if spot_pos[l][3]==0.0: #if radius=0, there is no facula, jump to next spot with continue continue if dist>(width/2+spot_pos[l][3]): #grid not covered by faculae dfc=0.0 else: if (width/m.sqrt(2))<spot_pos[l][3]: #if the spot can cover completely the grid, two cases: if dist<=(m.sqrt(spot_pos[l][3]**2-(width/2)**2)-width/2): #grid completely covered dfc=1.0-dsp #subtract spot else: #grid partially covered dfc=-(dist-spot_pos[l][3]-width/2)/(width+spot_pos[l][3]-m.sqrt(spot_pos[l][3]**2-(width/2)**2))-dsp elif (width/2)>spot_pos[l][3]: #if the grid can completely cover the spot, two cases: if dist<=(width/2-spot_pos[l][3]): #all the spot is inside the grid dfc=(np.pi/4)*(2*spot_pos[l][3]/width)**2-dsp else: #grid partially covered dfc=(np.pi/4)*((2*spot_pos[l][3]/width)**2-(2*spot_pos[l][3]/width**2)*(dist-width/2+spot_pos[l][3]))-dsp else: #if the spot is larger than the grid but not enough to cover it, grid partially covered by the spot A1=(width/2)*m.sqrt(spot_pos[l][3]**2-(width/2)**2) A2=(spot_pos[l][3]**2/2)*(m.pi/2-2*m.asin(m.sqrt(spot_pos[l][3]**2-(width/2)**2)/spot_pos[l][3])) Ar=4*(A1+A2)/width**2 dfc=-Ar*(dist-width/2-spot_pos[l][3])/(width/2+spot_pos[l][3])-dsp afc+=dfc #PLANET if simulate_planet: if vis[-1]==1.0: dist=m.sqrt((planet_pos[0]*m.cos(planet_pos[1]) - vec_grid[iteration,1])**2 + ( planet_pos[0]*m.sin(planet_pos[1]) - vec_grid[iteration,2] )**2) #grid-planet distance width2=amu[i]*width if dist>width2/2+planet_pos[2]: apl=0.0 elif dist<planet_pos[2]-width2/2: apl=1.0 else: apl=-(dist-planet_pos[2]-width2/2)/width2 if afc>1.0: afc=1.0 if asp>1.0: asp=1.0 afc=0.0 if apl>0.0: asp=asp*(1-apl) afc=afc*(1-apl) aph=1-asp-afc-apl #add the corresponding ccf to the total CCF flux = flux - (1-aph)*bph[i]+asp*bsp[i]+bfc[i]*afc Aph=Aph+aph*pare[i] Asp=Asp+asp*pare[i] Afc=Afc+afc*pare[i] Apl=Apl+apl*pare[i] typ.append([aph,asp,afc,apl]) return flux ,typ, Aph, Asp, Afc, Apl ################################################################ # FAST MODE ROUTINES ################################################################ #PHOTOMETRY @nb.njit(cache=True,error_model='numpy') def generate_rotating_photosphere_fast_lc(obs_times,Ngrid_in_ring,acd,amu,pare,flnp,flns,filter_trans,N,use_phoenix_mu,LD_law,LD1,LD2,spot_map,ref_time,Prot,diff_rot,Revo,Q,inc,temp_ph,temp_fc,simulate_planet,esinw,ecosw,T0p,Pp,Rpl,b,a,alp): flxph = 0.0 #initialze flux of photosphere sflp=np.zeros(N) #brightness of ring flp=np.zeros((N,len(filter_trans))) #spectra of each ring convolved by filter ################### IMMACULATE FLUX ########################### #Computing flux of immaculate photosphere and of every pixel for i in range(0,N): #Loop for each ring, to compute the flux of the star. #Interpolate Phoenix intensity models to correct projected ange: if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu[i]*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flnp[idx_low]+(flnp[idx_upp]-flnp[idx_low])*(amu[i]-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #spectra of the projected angle. includes limb darkening else: #or use a specified limb darkening law to multiply central spectra dlp = flnp[0]*limb_darkening_law(LD_law,LD1,LD2,amu[i]) flp[i,:]=dlp*pare[i]/(4*np.pi)*filter_trans #spectra of one grid in ring N multiplied by the filter. sflp[i]=np.sum(flp[i,:]) #brightness of onegrid in ring N. flxph=flxph+sflp[i]*Ngrid_in_ring[i] #total BRIGHTNESS of the immaculate photosphere ######################## ROTATE PHOTSPHERE FOR EACH TIME ################################ flux=flxph+np.zeros((len(obs_times))) #initialize total flux at each timestamp filling_sp=0.0+np.zeros(len(obs_times)) filling_ph=m.pi+np.zeros(len(obs_times)) filling_pl=0.0+np.zeros(len(obs_times)) filling_fc=0.0+np.zeros(len(obs_times)) for k,t in enumerate(obs_times): if simulate_planet: planet_pos=compute_planet_pos(t,esinw,ecosw,T0p,Pp,Rpl,b,a,alp)#compute the planet position at current time. In polar coordinates!! else: planet_pos = np.array([2.0,0.0,0.0],dtype=np.float64) if spot_map.size==0: spot_pos=np.expand_dims(np.array([m.pi/2,-m.pi,0.0,0.0]),axis=0) else: spot_pos=compute_spot_position(t,spot_map,ref_time,Prot,diff_rot,Revo,Q) #compute the position of all spots at the current time. Returns theta and phi of each spot. #convert latitude/longitude of spot centre to XYZ vec_spot=np.zeros((len(spot_map),3)) for i in range(len(spot_map)): xspot = m.cos(inc)*m.sin(spot_pos[i,0])*m.cos(spot_pos[i,1])+m.sin(inc)*m.cos(spot_pos[i,0]) yspot = m.sin(spot_pos[i,0])*m.sin(spot_pos[i,1]) zspot = m.cos(spot_pos[i,0])*m.cos(inc)-m.sin(inc)*m.sin(spot_pos[i,0])*m.cos(spot_pos[i,1]) vec_spot[i,:]=np.array([xspot,yspot,zspot]).T #spot center in cartesian #Loop for each spot. for i in range(len(vec_spot)): if spot_pos[i][2]==0.0: #if radius is 0, go to next spot continue dist=m.acos(np.dot(vec_spot[i],np.array([1.,0.,0.]))) #Angle center spot to center of star. if (dist-spot_pos[i,2]*np.sqrt(1.0+Q)) > (m.pi/2): #spot & facula not visible. Jump to next spot. continue beta=np.pi/2-dist #angle of the spot with the edge of the star alpha=spot_pos[i,2] #angle of the radii of the spot ############ FACULA PROJECTED AREA ################## if Q>0.0: #facula alphaf=spot_pos[i,2]*m.sqrt(1.0+Q) #angle of the radii of the faculae #CASE 1: FACULA OUTSIDE OUTSIDE-> NULL, ALREADY EVALUATED BEFORE #CASE 2: ALL FACULAE INSIDE -> ELLIPSE if 0.0 < alphaf <= beta: ay=m.sin(alphaf) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse Ape=ax*ay*m.pi/4.0 pare_fac = 2.0*2.0*Ape #area of ellipse (projected area of spot) amu_fac=m.cos(dist) #CASE 3: MOST OF THE FACULA INSIDE -> ELLIPSE + LUNE elif 0.0 <= beta < alphaf: ay=m.sin(alphaf) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Ape=ax*ay*m.pi/4.0 #y=ay Apep= ax*ay*0.5*((ylay)*m.sqrt(1.0-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alphaf)*m.cos(beta) pare_fac = 2.0*(2.0*Ape + Apcp - Apep) #proj. area is described in Urena et al. Stratified Sampling of Projected Spherical Caps amu_fac=m.sin((beta+alphaf)/2.0) #representative mu as the mean point between edge of spot and of star #CASE 4: MOST OF THE FACULA OUTSIDE-> LUNE elif 0.0 < (-beta) < alphaf: ay=m.sin(alphaf) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) #intesection x and y between ellipse and lune ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Apep= ax*ay*0.5*((ylay)*m.sqrt(1-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alphaf)*m.cos(beta) pare_fac = 2.0*(Apep + Apcp) #proj. area is area of lune amu_fac=m.sin((beta+alphaf)/2.0) ######### SPOT PROJECTED AREA ############ #CASE 1: SPOT OUTSIDE-> NULL if 0.0 <= alpha <= (-beta): pare_spot=0.0 amu_spot=0.0 #CASE 2: ALL SPOT INSIDE -> ELLIPSE elif 0.0 < alpha <= beta: ay=m.sin(alpha) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse Ape=ax*ay*m.pi/4.0 pare_spot = 2.0*2.0*Ape #area of ellipse (projected area of spot) amu_spot=m.cos(dist) #CASE 3: MOST OF THE SPOT INSIDE -> ELLIPSE + LUNE elif 0.0 <= beta < alpha: ay=m.sin(alpha) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Ape=ax*ay*m.pi/4.0 #y=ay Apep= ax*ay*0.5*((ylay)*m.sqrt(1.0-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alpha)*m.cos(beta) pare_spot = 2.0*(2.0*Ape + Apcp - Apep) #proj. area is described in Urena et al. Stratified Sampling of Projected Spherical Caps amu_spot=m.sin((beta+alpha)/2.0) #representative mu as the mean point between edge of spot and of star #CASE 4: MOST OF THE SPOT OUTSIDE-> LUNE elif 0.0 < (-beta) < alpha: ay=m.sin(alpha) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) #intesection x and y between ellipse and lune ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Apep= ax*ay*0.5*((ylay)*m.sqrt(1-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alpha)*m.cos(beta) pare_spot = 2.0*(Apep + Apcp) #proj. area is area of lune amu_spot=m.sin((beta+alpha)/2.0) #Spot, photosphere, and faculae flux at the angle of the spot if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_spot*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flnp[idx_low]+(flnp[idx_upp]-flnp[idx_low])*(amu_spot-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening dls = flns[idx_low]+(flns[idx_upp]-flns[idx_low])*(amu_spot-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_spot) dlp = flnp[0]*ld dls = flns[0]*ld flux_phsp=np.sum(dlp*pare_spot/(4*np.pi)*filter_trans) #flux of the photosphere occuppied by the spot. flux_sp=np.sum(dls*pare_spot/(4*np.pi)*filter_trans) #flux of the spot if Q>0.0: pare_facula= pare_fac - pare_spot if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_fac*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flnp[idx_low]+(flnp[idx_upp]-flnp[idx_low])*(amu_fac-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_fac) dlp = flnp[0]*ld flux_phfc=np.sum(dlp*pare_fac/(4*np.pi)*filter_trans) #flux of the photosphere occuppied by the spot. dtfmu=250.9-407.4*amu_fac+190.9*amu_fac**2 #(T_fac-T_ph) multiplied by a factor depending on the flux_fc=np.sum(dlp*pare_fac/(4*np.pi)*filter_trans)*((temp_ph+dtfmu)/(temp_fc))**4 #flux of the spot else: flux_phfc = 0.0 flux_fc = 0.0 pare_facula = 0.0 flux[k] = flux[k] - flux_phsp + flux_sp - flux_phfc + flux_fc #total flux - photosphere + spot filling_sp[k] = filling_sp[k] + pare_spot filling_ph[k] = filling_ph[k] - pare_spot - pare_facula filling_fc[k] = filling_fc[k] + pare_facula ################### PLANETARY TRANSIT PROJECTED AREA ######################## if simulate_planet: if planet_pos[0]-planet_pos[2]>= 1.0: #all planet outside pare_pl = 0.0 amu_pl = 0.0 block = 'none' elif planet_pos[0]+planet_pos[2] <= 1.0: #all planet inside pare_pl = m.pi*planet_pos[2]**2 #area of a circle amu_pl = m.sqrt(1-planet_pos[0]**2) #cos(mu)**2=1-sin(mu)**2=1-r**2 block='ph' for i in range(len(vec_spot)): #check if planet is over a spot or photosphere or faculae distsp=m.acos(np.dot(vec_spot[i],np.array([1.,0.,0.]))) #Angle center spot to center of star. if (distsp-spot_pos[i,2]*np.sqrt(1.0+Q)) >= (m.pi/2): #spot & facula not visible. Jump to next spot. block='ph' continue dist=m.acos(np.dot(np.array([m.cos(m.asin(planet_pos[0])),planet_pos[0]*m.cos(planet_pos[1]),planet_pos[0]*m.sin(planet_pos[1])]),vec_spot[i])) #spot-planet centers distance if dist < spot_pos[i,2]: #if the distance is lower than spot radius, most of the planet is inside the spot if (distsp-spot_pos[i,2]) >= (m.pi/2): #if spot is not visible block='ph' else: block = 'sp' elif dist < spot_pos[i,2]*m.sqrt(1+Q): #if the distance is lower than facula radius, most of the planet is inside the facula block = 'fc' elif (block != 'sp') and (block != 'fc'): #if the planet is not blocking a spot or a facula, then its blocking ph block = 'ph' #else, the planet is blocking photosphere else: #the planet is partially covering the star. d1=(1-planet_pos[2]**2+planet_pos[0]**2)/(2*planet_pos[0]) #dist from star centre to centre of intersection d2=planet_pos[0]-d1 #dist from centre of planet to centre of intersection dedge = 1-(1+planet_pos[2]-planet_pos[0])/2 #dist from centre star to centre intersection pare_pl = m.acos(d1) - d1*m.sqrt(1-d1**2) + planet_pos[2]**2*m.acos(d2/planet_pos[2]) - d2*m.sqrt(planet_pos[2]**2-d2**2) #area of intersection star-planet amu_pl = m.sqrt(1-(dedge)**2) #amu is represented by the mean point of the intersection. block='ph' for i in range(len(vec_spot)): #check if planet is over a spot or photosphere or faculae distsp=m.acos(np.dot(vec_spot[i],np.array([1.,0.,0.]))) #Angle center spot to center of star. if (distsp-spot_pos[i,2]*np.sqrt(1.0+Q)) >= (m.pi/2): #spot & facula not visible. Jump to next spot. block='ph' continue dist=m.acos(np.dot(np.array([m.cos(m.asin(dedge)),dedge*m.cos(planet_pos[1]),dedge*m.sin(planet_pos[1])]),vec_spot[i])) #distance from spot to centre of planet-star intersection if dist < spot_pos[i,2]: #if the distance is lower than spot radius, most of the planet is inside the spot if (distsp-spot_pos[i,2]) > (m.pi/2): #if spot is not visible block='ph' else: block = 'sp' elif dist < spot_pos[i,2]*m.sqrt(1+Q): #if the distance is lower than facula radius, most of the planet is inside the facula block = 'fc' elif (block != 'sp') and (block != 'fc'): #if the planet is not blocking a spot or a facula, then its blocking ph block = 'ph' #else, the planet is blocking photosphere #compute and subtract flux blocked by the planet if block == 'ph': if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_pl*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flnp[idx_low]+(flnp[idx_upp]-flnp[idx_low])*(amu_pl-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_pl) dlp = flnp[0]*ld flux_pl=np.sum(dlp*pare_pl/(4*np.pi)*filter_trans) #flux of the photosphere occuppied by the planet. flux[k] = flux[k] - flux_pl #total flux - flux blocked filling_ph[k] = filling_ph[k] - pare_pl filling_pl[k] = filling_pl[k] + pare_pl if block == 'sp': #flux blocked by the planet if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_pl*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dls = flns[idx_low]+(flns[idx_upp]-flns[idx_low])*(amu_pl-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_pl) dls = flns[0]*ld flux_pl=np.sum(dls*pare_pl/(4*np.pi)*filter_trans) #flux of the photosphere occuppied by the planet. flux[k] = flux[k] - flux_pl #total flux - flux blocked filling_sp[k] = filling_sp[k] - pare_pl filling_pl[k] = filling_pl[k] + pare_pl if block == 'fc': #flux blocked by the spot if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_pl*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flnp[idx_low]+(flnp[idx_upp]-flnp[idx_low])*(amu_pl-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_pl) dlp = flnp[0]*ld dtfmu=250.9-407.4*amu_pl+190.9*amu_pl**2 #(T_fac-T_ph) multiplied by a factor depending on the flux_pl=np.sum(dlp*pare_pl/(4*np.pi)*filter_trans)*((temp_ph+dtfmu)/(temp_fc))**4 #flux of the facula occuppied by the planet. flux[k] = flux[k] - flux_pl #total flux - flux blocked filling_fc[k] = filling_fc[k] - pare_pl filling_pl[k] = filling_pl[k] + pare_pl filling_ph[k]=100*filling_ph[k]/m.pi filling_sp[k]=100*filling_sp[k]/m.pi filling_fc[k]=100*filling_fc[k]/m.pi filling_pl[k]=100*filling_pl[k]/m.pi return obs_times, flux/flxph, filling_ph, filling_sp, filling_fc, filling_pl ############### #CCFS ############### @nb.njit(cache=True,error_model='numpy') def fun_spot_bisect(ccf): rv=-1.51773453*ccf**4 +3.52774949*ccf**3 -3.18794328*ccf**2 +1.22541774*ccf -0.22479665 #Polynomial fit to ccf in Fig 2 of Dumusque 2014, plus 400m/s to match Fig6 in Herrero 2016 return rv @nb.njit(cache=True,error_model='numpy') def fun_cifist(ccf,amu): '''Interpolate the cifist bisectors as a function of the projected angle ''' # amv=np.arange(1,0.0,-0.1) #list of angles defined in cfist amv=np.arange(0.0,1.01,0.1) #list of angles defined in cfist idx_upp=np.searchsorted(amv,amu*0.999999999,side='right') idx_low=idx_upp-1 cxm=np.zeros((len(amv),7)) #coeff of the bisectors. NxM, N is number of angles, M=7, the degree of the polynomial #PARAMS COMPUTED WITH HARPS MASK cxm[10,:]=np.array([-3.51974861,11.1702017,-13.22368296,6.67694456,-0.63201573,-0.44695616,-0.36838495]) #1.0 cxm[9,:]=np.array([-4.05903967,13.21901003,-16.47215949,9.51023171,-2.13104764,-0.05153799,-0.36973749]) #0.9 cxm[8,:]=np.array([-3.92153131,12.76694663,-15.96958217,9.39599116,-2.34394028,0.12546611,-0.42092905]) #0.8 cxm[7,:]=np.array([-3.81892968,12.62209118,-16.06973368,9.71487198,-2.61439945,0.25356088,-0.43310756]) #0.7 cxm[6,:]=np.array([-5.37213406,17.6604689,-22.52477323,13.91461247,-4.13186181,0.60271171,-0.46427559]) #0.6 cxm[5,:]=np.array([-6.35351933,20.92046705,-26.83933359,16.86220487,-5.28285592,0.90643187,-0.47696283]) #0.5 cxm[4,:]=np.array([-7.67270144,25.60866105,-33.4381214,21.58855269,-7.1527039,1.35990694,-0.48001707]) #0.4 cxm[3,:]=np.array([-9.24152009,31.09337903,-41.07410957,27.04196984,-9.32910982,1.89291407,-0.455407]) #0.3 cxm[2,:]=np.array([-11.62006536,39.30962189,-52.38161244,34.98243089,-12.40650704,2.57940618,-0.37337442]) #0.2 cxm[1,:]=np.array([-14.14768805,47.9566719,-64.20294114,43.23156971,-15.57423374,3.13318175,-0.14451226]) #0.1 cxm[0,:]=np.array([-16.67531074,56.60372191,-76.02426984,51.48070853,-18.74196044,3.68695732,0.0843499 ]) #0.0 #interpolate cxu=cxm[idx_low]+(cxm[idx_upp]-cxm[idx_low])*(amu-amv[idx_low])/(amv[idx_upp]-amv[idx_low]) rv = cxu[0]*ccf**6 + cxu[1]*ccf**5 + cxu[2]*ccf**4 + cxu[3]*ccf**3 + cxu[4]*ccf**2 + cxu[5]*ccf + cxu[6] return rv @nb.njit(cache=True,error_model='numpy') def generate_rotating_photosphere_fast_rv(obs_times,Ngrid_in_ring,acd,amu,pare,rv,rv_ph,rv_sp,rv_fc,ccf_ph_tot,ccf_ph,ccf_sp,ccf_fc,fluxph,flpk,flsk,N,use_phoenix_mu,LD_law,LD1,LD2,spot_map,ref_time,Prot,diff_rot,Revo,Q,inc,vsini,CB,temp_ph,temp_fc,simulate_planet,esinw,ecosw,T0p,Pp,Rpl,b,a,alp): ######################## ROTATE PHOTSPHERE FOR EACH TIME ################################ ccf=ccf_ph_tot*np.ones((len(obs_times),len(ccf_ph_tot))) #initialize total flux at each timestamp filling_sp=0.0+np.zeros(len(obs_times)) filling_ph=m.pi+np.zeros(len(obs_times)) filling_pl=0.0+np.zeros(len(obs_times)) filling_fc=0.0+np.zeros(len(obs_times)) for k,t in enumerate(obs_times): if simulate_planet: planet_pos=compute_planet_pos(t,esinw,ecosw,T0p,Pp,Rpl,b,a,alp)#compute the planet position at current time. In polar coordinates!! else: planet_pos = np.array([2.0,0.0,0.0],dtype=np.float64) if spot_map.size==0: spot_pos=np.expand_dims(np.array([m.pi/2,-m.pi,0.0,0.0]),axis=0) else: spot_pos=compute_spot_position(t,spot_map,ref_time,Prot,diff_rot,Revo,Q) #compute the position of all spots at the current time. Returns theta and phi of each spot. #convert latitude/longitude of spot centre to XYZ vec_spot=np.zeros((len(spot_map),3)) for i in range(len(spot_map)): xspot = m.cos(inc)*m.sin(spot_pos[i,0])*m.cos(spot_pos[i,1])+m.sin(inc)*m.cos(spot_pos[i,0]) yspot = m.sin(spot_pos[i,0])*m.sin(spot_pos[i,1]) zspot = m.cos(spot_pos[i,0])*m.cos(inc)-m.sin(inc)*m.sin(spot_pos[i,0])*m.cos(spot_pos[i,1]) vec_spot[i,:]=np.array([xspot,yspot,zspot]).T #spot center in cartesian #Loop for each spot. for i in range(len(vec_spot)): if spot_pos[i][2]==0.0: #if radius is 0, go to next spot continue dist=m.acos(np.dot(vec_spot[i],np.array([1.,0.,0.]))) #Angle center spot to center of star. if (dist-spot_pos[i,2]*np.sqrt(1.0+Q)) > (m.pi/2): #spot & facula not visible. Jump to next spot. continue beta=np.pi/2-dist #angle of te spot with the edge of the star alpha=spot_pos[i,2] #angle of the radii of the spot ############ FACULA PROJECTED AREA ################## if Q>0.0: #facula alphaf=spot_pos[i,2]*m.sqrt(1.0+Q) #angle of the radii of the faculae #CASE 1: FACULA OUTSIDE OUTSIDE-> NULL, ALREADY EVALUATED BEFORE #CASE 2: ALL FACULAE INSIDE -> ELLIPSE if 0.0 < alphaf <= beta: ay=m.sin(alphaf) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse Ape=ax*ay*m.pi/4.0 pare_fac = 2.0*2.0*Ape #area of ellipse (projected area of spot) amu_fac=m.cos(dist) rvel_fac=vsini*m.sin(spot_pos[i,0])*m.sin(spot_pos[i,1]) #CASE 3: MOST OF THE FACULA INSIDE -> ELLIPSE + LUNE elif 0.0 <= beta < alphaf: ay=m.sin(alphaf) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Ape=ax*ay*m.pi/4.0 #y=ay Apep= ax*ay*0.5*((ylay)*m.sqrt(1.0-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alphaf)*m.cos(beta) pare_fac = 2.0*(2.0*Ape + Apcp - Apep) #proj. area is described in Urena et al. Stratified Sampling of Projected Spherical Caps amu_fac=m.sin((beta+alphaf)/2.0) #representative mu as the mean point between edge of spot and of star #position in polar: r_fac=m.cos((beta+alphaf)/2.0) t_fac=m.atan2(vec_spot[i,2],vec_spot[i,1]) #in spherical x_fac=amu_fac y_fac=r_fac*m.cos(t_fac) z_fac=r_fac*m.sin(t_fac) #in star coords colat_fac, lon_fac = m.acos(z_fac*m.cos(-inc)-x_fac*m.sin(-inc)), m.atan2(y_fac,x_fac*m.cos(-inc)+z_fac*m.sin(-inc)) #rvel of spot rvel_fac=vsini*m.sin(colat_fac)*m.sin(lon_fac) #CASE 4: MOST OF THE FACULA OUTSIDE-> LUNE elif 0.0 < (-beta) < alphaf: ay=m.sin(alphaf) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) #intesection x and y between ellipse and lune ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Apep= ax*ay*0.5*((ylay)*m.sqrt(1-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alphaf)*m.cos(beta) pare_fac = 2.0*(Apep + Apcp) #proj. area is area of lune amu_fac=m.sin((beta+alphaf)/2.0) #position in polar: r_fac=m.cos((beta+alphaf)/2.0) t_fac=m.atan2(vec_spot[i,2],vec_spot[i,1]) #in spherical x_fac=amu_fac y_fac=r_fac*m.cos(t_fac) z_fac=r_fac*m.sin(t_fac) #in star coords colat_fac, lon_fac = m.acos(z_fac*m.cos(-inc)-x_fac*m.sin(-inc)), m.atan2(y_fac,x_fac*m.cos(-inc)+z_fac*m.sin(-inc)) #rvel of spot rvel_fac=vsini*m.sin(colat_fac)*m.sin(lon_fac) ######### SPOT PROJECTED AREA ############ #CASE 1: SPOT OUTSIDE-> NULL if 0.0 <= alpha <= (-beta): pare_spot=0.0 amu_spot=0.0 #CASE 2: ALL SPOT INSIDE -> ELLIPSE elif 0.0 < alpha <= beta: ay=m.sin(alpha) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse Ape=ax*ay*m.pi/4.0 pare_spot = 2.0*2.0*Ape #area of ellipse (projected area of spot) amu_spot=m.cos(dist) rvel_spot=vsini*m.sin(spot_pos[i,0])*m.sin(spot_pos[i,1]) #CASE 3: MOST OF THE SPOT INSIDE -> ELLIPSE + LUNE elif 0.0 <= beta < alpha: ay=m.sin(alpha) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Ape=ax*ay*m.pi/4.0 #y=ay Apep= ax*ay*0.5*((ylay)*m.sqrt(1.0-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alpha)*m.cos(beta) pare_spot = 2.0*(2.0*Ape + Apcp - Apep) #proj. area is described in Urena et al. Stratified Sampling of Projected Spherical Caps amu_spot=m.sin((beta+alpha)/2.0) #representative mu as the mean point between edge of spot and of star #position in polar: r_spot=m.cos((beta+alpha)/2.0) t_spot=m.atan2(vec_spot[i,2],vec_spot[i,1]) #in spherical x_spot=amu_spot y_spot=r_spot*m.cos(t_spot) z_spot=r_spot*m.sin(t_spot) #in star coords colat_spot, lon_spot = m.acos(z_spot*m.cos(-inc)-x_spot*m.sin(-inc)), m.atan2(y_spot,x_spot*m.cos(-inc)+z_spot*m.sin(-inc)) #rvel of spot rvel_spot=vsini*m.sin(colat_spot)*m.sin(lon_spot) #CASE 4: MOST OF THE SPOT OUTSIDE-> LUNE elif 0.0 < (-beta) < alpha: ay=m.sin(alpha) #semiminor axis ellipse ax=ay*m.sin(beta) #semimajor axis ellipse yl=m.sqrt(1.0-(1.0-ay**2)/m.cos(beta)**2) #intesection x and y between ellipse and lune ylay=min(1.0,max(yl/ay,-1.0)) #yl/ay, to avoid getting values >1 and errors in asin, due to floating points Apep= ax*ay*0.5*((ylay)*m.sqrt(1-(ylay)**2)+m.asin((ylay)))# y''=yl Apcp= 0.5*(yl*m.sqrt(1.0-yl**2)+m.asin(yl)) - yl*m.cos(alpha)*m.cos(beta) pare_spot = 2.0*(Apep + Apcp) #proj. area is area of lune amu_spot=m.sin((beta+alpha)/2.0) #position in polar: r_spot=m.cos((beta+alpha)/2.0) t_spot=m.atan2(vec_spot[i,2],vec_spot[i,1]) #in spherical x_spot=amu_spot y_spot=r_spot*m.cos(t_spot) z_spot=r_spot*m.sin(t_spot) #in star coords colat_spot, lon_spot = m.acos(z_spot*m.cos(-inc)-x_spot*m.sin(-inc)), m.atan2(y_spot,x_spot*m.cos(-inc)+z_spot*m.sin(-inc)) #rvel of spot rvel_spot=vsini*m.sin(colat_spot)*m.sin(lon_spot) #Compute CCF ofspot an ph at amu if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_spot*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flpk[idx_low]+(flpk[idx_upp]-flpk[idx_low])*(amu_spot-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening dls = flsk[idx_low]+(flsk[idx_upp]-flsk[idx_low])*(amu_spot-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_spot) dlp = flpk[0]*ld dls = flsk[0]*ld flux_phsp=np.sum(dlp*pare_spot/(4*np.pi)) #flux of the photosphere occuppied by the spot. flux_spph=np.sum(dls*pare_spot/(4*np.pi)) #flux of the spot rv_phsp = rv_ph + rvel_spot + fun_cifist(ccf_ph,amu_spot)*1000.0*CB rv_spph = rv_sp + rvel_spot + fun_spot_bisect(ccf_sp)*amu_spot*1000.0*CB ccf_phsp=interpolation_nb(rv,rv_phsp,ccf_ph,ccf_ph[0],ccf_ph[-1]) #still normalized ccf. ccf_spph=interpolation_nb(rv,rv_spph,ccf_sp,ccf_sp[0],ccf_sp[-1]) #still normalized ccf. #Compute RVshift, shift CCF, and iterpolate the CCF values. CCF_phsp = ccf_phsp*flux_phsp/fluxph #the ccf of the element photosphere is the CCF weighted by the flux of the element over all the flux. CCF_spph = ccf_spph*flux_spph/fluxph if Q>0.0: pare_facula= pare_fac - pare_spot if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_fac*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flsk[idx_low]+(flsk[idx_upp]-flsk[idx_low])*(amu_fac-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_fac) dlp = flsk[0]*ld flux_phfc=np.sum(dlp*pare_fac/(4*np.pi)) #flux of the photosphere occuppied by the spot. dtfmu=250.9-407.4*amu_fac+190.9*amu_fac**2 #(T_fac-T_ph) multiplied by a factor depending on the flux_fcph=np.sum(dlp*pare_fac/(4*np.pi))*((temp_ph+dtfmu)/(temp_fc))**4 #flux of the spot #Compute RVshift, shift CCF, and iterpolate the CCF values. rv_phfc = rv_ph + rvel_fac + fun_cifist(ccf_ph,amu_fac)*1000.0*CB rv_fcph = rv_fc + rvel_fac + fun_spot_bisect(ccf_fc)*amu_fac*1000.0*CB ccf_phfc=interpolation_nb(rv,rv_phfc,ccf_ph,ccf_ph[0],ccf_ph[-1]) #still normalized ccf. ccf_fcph=interpolation_nb(rv,rv_fcph,ccf_fc,ccf_fc[0],ccf_fc[-1]) #still normalized ccf. #Compute RVshift, shift CCF, and iterpolate the CCF values. CCF_phfc = ccf_phfc*flux_phfc/fluxph #the ccf of the element photosphere is the CCF weighted by the flux of the element over all the flux. CCF_fcph = ccf_fcph*flux_fcph/fluxph else: CCF_phfc = ccf_ph*0.0 #the ccf of the element photosphere is the CCF weighted by the flux of the element over all the flux. CCF_fcph = ccf_fc*0.0 pare_facula = 0.0 ccf[k] = ccf[k] - CCF_phsp + CCF_spph - CCF_phfc + CCF_fcph #total CCF - photosphere_spot + spot - photosphere_fac + facula filling_sp[k] = filling_sp[k] + pare_spot filling_ph[k] = filling_ph[k] - pare_spot - pare_facula filling_fc[k] = filling_fc[k] + pare_facula ################### PLANETARY TRANSIT PROJECTED AREA ######################## if simulate_planet: if planet_pos[0]-planet_pos[2]>= 1.0: #all planet outside pare_pl = 0.0 amu_pl = 0.0 block = 'none' elif planet_pos[0]+planet_pos[2] <= 1.0: #all planet inside pare_pl = m.pi*planet_pos[2]**2 #area of a circle amu_pl = m.sqrt(1-planet_pos[0]**2) #cos(mu)**2=1-sin(mu)**2=1-r**2 x_pl=amu_pl y_pl=planet_pos[0]*m.cos(planet_pos[1]) z_pl=planet_pos[0]*m.sin(planet_pos[1]) #in star coords colat_pl, lon_pl = m.acos(z_pl*m.cos(-inc)-x_pl*m.sin(-inc)), m.atan2(y_pl,x_pl*m.cos(-inc)+z_pl*m.sin(-inc)) #rvel of spot rvel_pl=vsini*m.sin(colat_pl)*m.sin(lon_pl) block='ph' for i in range(len(vec_spot)): #check if planet is over a spot or photosphere or faculae distsp=m.acos(np.dot(vec_spot[i],np.array([1.,0.,0.]))) #Angle center spot to center of star. if (distsp-spot_pos[i,2]*np.sqrt(1.0+Q)) >= (m.pi/2): #spot & facula not visible. Jump to next spot. block='ph' continue dist=m.acos(np.dot(np.array([m.cos(m.asin(planet_pos[0])),planet_pos[0]*m.cos(planet_pos[1]),planet_pos[0]*m.sin(planet_pos[1])]),vec_spot[i])) #spot-planet centers distance if dist < spot_pos[i,2]: #if the distance is lower than spot radius, most of the planet is inside the spot if (distsp-spot_pos[i,2]) >= (m.pi/2): #if spot is not visible block='ph' else: block = 'sp' elif dist < spot_pos[i,2]*m.sqrt(1+Q): #if the distance is lower than facula radius, most of the planet is inside the facula block = 'fc' elif (block != 'sp') and (block != 'fc'): #if the planet is not blocking a spot or a facula, then its blocking ph block = 'ph' #else, the planet is blocking photosphere else: #the planet is partially covering the star. d1=(1-planet_pos[2]**2+planet_pos[0]**2)/(2*planet_pos[0]) #dist from star centre to intersection point d2=planet_pos[0]-d1 #dist from centre of planet to centre of intersection dedge = 1-(1+planet_pos[2]-planet_pos[0])/2 #dist from centre star to centre intersection pare_pl = m.acos(d1) - d1*m.sqrt(1-d1**2) + planet_pos[2]**2*m.acos(d2/planet_pos[2]) - d2*m.sqrt(planet_pos[2]**2-d2**2) #area of intersection star-planet amu_pl = m.sqrt(1-(dedge)**2) #amu is represented by the mean point of the intersection. #spherical coords of planet and corresponding RV of the surface x_pl=amu_pl y_pl=dedge*m.cos(planet_pos[1]) z_pl=dedge*m.sin(planet_pos[1]) #in star coords colat_pl, lon_pl = m.acos(z_pl*m.cos(-inc)-x_pl*m.sin(-inc)), m.atan2(y_pl,x_pl*m.cos(-inc)+z_pl*m.sin(-inc)) #rvel of surface rvel_pl=vsini*m.sin(colat_pl)*m.sin(lon_pl) block='ph' for i in range(len(vec_spot)): #check if planet is over a spot or photosphere or faculae distsp=m.acos(np.dot(vec_spot[i],np.array([1.,0.,0.]))) #Angle center spot to center of star. if (distsp-spot_pos[i,2]*np.sqrt(1.0+Q)) >= (m.pi/2): #spot & facula not visible. Jump to next spot. block='ph' continue dist=m.acos(np.dot(np.array([m.cos(m.asin(dedge)),dedge*m.cos(planet_pos[1]),dedge*m.sin(planet_pos[1])]),vec_spot[i])) #distance from spot to centre of planet-star intersection if dist < spot_pos[i,2]: #if the distance is lower than spot radius, most of the planet is inside the spot if (distsp-spot_pos[i,2]) > (m.pi/2): #if spot is not visible block='ph' else: block = 'sp' elif dist < spot_pos[i,2]*m.sqrt(1+Q): #if the distance is lower than facula radius, most of the planet is inside the facula block = 'fc' elif (block != 'sp') and (block != 'fc'): #if the planet is not blocking a spot or a facula, then its blocking ph block = 'ph' #else, the planet is blocking photosphere #compute and subtract flux blocked by the planet if block == 'ph': if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_pl*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flpk[idx_low]+(flpk[idx_upp]-flpk[idx_low])*(amu_pl-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_pl) dlp = flpk[0]*ld flux_phpl=np.sum(dlp*pare_pl/(4*np.pi)) #flux of the photosphere occuppied by the planet. rv_phpl = rv_ph + rvel_pl + fun_cifist(ccf_ph,amu_pl)*1000.0*CB ccf_phpl=interpolation_nb(rv,rv_phpl,ccf_ph,ccf_ph[0],ccf_ph[-1]) #still normalized ccf. #Compute RVshift, shift CCF, and iterpolate the CCF values. CCF_phpl = ccf_phpl*flux_phpl/fluxph #the ccf of the element photosphere is the CCF weighted by the flux of the element over all the flux. ccf[k] = ccf[k] - CCF_phpl #total flux - flux blocked filling_ph[k] = filling_ph[k] - pare_pl filling_pl[k] = filling_pl[k] + pare_pl if block == 'sp': #flux blocked by the planet if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_pl*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dls = flsk[idx_low]+(flsk[idx_upp]-flsk[idx_low])*(amu_pl-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_pl) dls = flsk[0]*ld flux_sppl=np.sum(dls*pare_pl/(4*np.pi)) #flux of the photosphere occuppied by the planet. rv_sppl = rv_sp + rvel_pl ccf_sppl=interpolation_nb(rv,rv_sppl,ccf_sp,ccf_sp[0],ccf_sp[-1]) #still normalized ccf. #Compute RVshift, shift CCF, and iterpolate the CCF values. CCF_sppl = ccf_sppl*flux_sppl/fluxph #the ccf of the element photosphere is the CCF weighted by the flux of the element over all the flux. ccf[k] = ccf[k] - CCF_sppl #total flux - flux blocked filling_sp[k] = filling_sp[k] - pare_pl filling_pl[k] = filling_pl[k] + pare_pl if block == 'fc': #flux blocked by the spot if use_phoenix_mu: idx_upp=len(acd)-1-np.searchsorted(np.flip(acd),amu_pl*0.999999999,side='right') #acd is sorted inversely idx_low=idx_upp+1 dlp = flpk[idx_low]+(flpk[idx_upp]-flpk[idx_low])*(amu_pl-acd[idx_low])/(acd[idx_upp]-acd[idx_low]) #limb darkening #limb darkening else: #or use a specified limb darkening law ld=limb_darkening_law(LD_law,LD1,LD2,amu_pl) dlp = flpk[0]*ld dtfmu=250.9-407.4*amu_pl+190.9*amu_pl**2 #(T_fac-T_ph) multiplied by a factor depending on the flux_fcpl=np.sum(dlp*pare_pl/(4*np.pi))*((temp_ph+dtfmu)/(temp_fc))**4 #flux of the facula occuppied by the planet. rv_fcpl = rv_fc + rvel_pl ccf_fcpl=interpolation_nb(rv,rv_fcpl,ccf_fc,ccf_fc[0],ccf_fc[-1]) #still normalized ccf. #Compute RVshift, shift CCF, and iterpolate the CCF values. CCF_fcpl = ccf_fcpl*flux_fcpl/fluxph #the ccf of the element photosphere is the CCF weighted by the flux of the element over all the flux. ccf[k] = ccf[k] - CCF_fcpl #total ccf - ccf blocked filling_fc[k] = filling_fc[k] - pare_pl filling_pl[k] = filling_pl[k] + pare_pl filling_ph[k]=100*filling_ph[k]/m.pi filling_sp[k]=100*filling_sp[k]/m.pi filling_fc[k]=100*filling_fc[k]/m.pi filling_pl[k]=100*filling_pl[k]/m.pi return obs_times, ccf, filling_ph, filling_sp, filling_fc, filling_pl @nb.njit(cache=True,error_model='numpy') def check_spot_overlap(spot_map,Q): #False if there is no overlap between spots N_spots=len(spot_map) for i in range(N_spots): for j in range(i+1,N_spots): t_ini_0 = spot_map[i][0] t_ini = spot_map[j][0] t_fin_0 = t_ini_0 + spot_map[i][1] t_fin = t_ini + spot_map[j][1] r_0 = np.max(spot_map[i][4:6]) r = np.max(spot_map[j][4:6]) th_0 = m.pi/2-spot_map[i][2]*m.pi/180 #latitude in radians th = m.pi/2-spot_map[j][2]*m.pi/180 #latitude in radians ph_0 = spot_map[i][3]*m.pi/180 #longitude in radians ph = spot_map[j][3]*m.pi/180 #longitude in radians dist = m.acos(m.sin(th_0)*m.sin(th) + m.cos(th_0)*m.cos(th)*m.cos(m.fabs(ph_0 - ph)))*180/m.pi #in if (dist<m.sqrt(Q+1)*(r_0+r)) and not ((t_ini>t_fin_0) or (t_ini_0>t_fin)): #if they touch and coincide in time return True return False
45.720903
297
0.544478
12,570
78,960
3.318457
0.065076
0.027186
0.02148
0.014672
0.775514
0.757174
0.748016
0.740776
0.720447
0.710306
0
0.059645
0.29696
78,960
1,727
298
45.720903
0.691778
0.235866
0
0.672355
0
0
0.007664
0
0
0
0
0
0
1
0.028157
false
0
0.00256
0.00256
0.059727
0.001706
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
da781f2bc2d2d2fb3373da185e4d497657f266bc
254
py
Python
myshkin/util/args.py
jakesnell/myshkin
cea0a625b1913627e27d66d0ada9155402f57d33
[ "MIT" ]
null
null
null
myshkin/util/args.py
jakesnell/myshkin
cea0a625b1913627e27d66d0ada9155402f57d33
[ "MIT" ]
null
null
null
myshkin/util/args.py
jakesnell/myshkin
cea0a625b1913627e27d66d0ada9155402f57d33
[ "MIT" ]
null
null
null
from docopt import docopt from attrdict import AttrDict def get_args(docstring): return AttrDict({k.replace("--", "").\ replace("<", "").\ replace(">", ""): v for k, v in docopt(docstring).iteritems()})
31.75
86
0.535433
26
254
5.192308
0.576923
0.207407
0
0
0
0
0
0
0
0
0
0
0.291339
254
7
87
36.285714
0.75
0
0
0
0
0
0.015748
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
0
0
0
5
da7b86640c076ed4fb618974f93604479de3a11a
178
py
Python
lista_ex2.py/exercicio23.py
robinson-1985/mentoria_exercises
8359cead6ee5351851b04cb45f252e3881b79117
[ "MIT" ]
null
null
null
lista_ex2.py/exercicio23.py
robinson-1985/mentoria_exercises
8359cead6ee5351851b04cb45f252e3881b79117
[ "MIT" ]
null
null
null
lista_ex2.py/exercicio23.py
robinson-1985/mentoria_exercises
8359cead6ee5351851b04cb45f252e3881b79117
[ "MIT" ]
null
null
null
''' 23. Faça um programa que receba um número real, encontre e mostre: a) a parte inteira desse número; b) a parte fracionária desse número; c) o arredondamento desse número. '''
44.5
70
0.747191
29
178
4.586207
0.689655
0.24812
0
0
0
0
0
0
0
0
0
0.013514
0.168539
178
4
71
44.5
0.885135
0.955056
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
16fa0417063656dfa47727e81f4149b7b6d72c76
1,333
py
Python
python/tests/test_date.py
eno-lang/enotype
03c4cbec0c9aaec6063bc8c85891e8e51ad1dc21
[ "MIT" ]
3
2019-09-27T19:18:19.000Z
2021-02-24T23:38:04.000Z
python/tests/test_date.py
eno-lang/enotype
03c4cbec0c9aaec6063bc8c85891e8e51ad1dc21
[ "MIT" ]
1
2021-05-07T02:25:26.000Z
2021-05-07T02:25:26.000Z
python/tests/test_date.py
eno-lang/enotype
03c4cbec0c9aaec6063bc8c85891e8e51ad1dc21
[ "MIT" ]
null
null
null
import pytest from datetime import date as python_date, datetime as python_datetime from enotype import date def test_0_1992_02_02(): assert date('1992-02-02') == python_date(1992, 2, 2) def test_1_1990(): with pytest.raises(ValueError) as excinfo: date('1990') def test_2_1991_01(): with pytest.raises(ValueError) as excinfo: date('1991-01') def test_3_1993_03_03t1920_01_00(): with pytest.raises(ValueError) as excinfo: date('1993-03-03T1920+01:00') def test_4_1994_04_04t1920_30_01_00(): with pytest.raises(ValueError) as excinfo: date('1994-04-04T1920:30+01:00') def test_5_1995_05_05t1920_30_45_01_00(): with pytest.raises(ValueError) as excinfo: date('1995-05-05T1920:30.45+01:00') def test_6_1996_06_06t0815_30_05_00(): with pytest.raises(ValueError) as excinfo: date('1996-06-06T0815:30-05:00') def test_7_1997_07_07t1315_30z(): with pytest.raises(ValueError) as excinfo: date('1997-07-07T1315:30Z') def test_8_2002_12_14(): with pytest.raises(ValueError) as excinfo: date('2002 12 14') def test_9_2002_12_14_20_15(): with pytest.raises(ValueError) as excinfo: date('2002-12-14 20:15') def test_10_january(): with pytest.raises(ValueError) as excinfo: date('January') def test_11_13_00(): with pytest.raises(ValueError) as excinfo: date('13:00')
26.66
69
0.739685
227
1,333
4.079295
0.255507
0.090713
0.190065
0.308855
0.664147
0.614471
0.538877
0.329374
0.240821
0.101512
0
0.239791
0.136534
1,333
50
70
26.66
0.564726
0
0
0.289474
0
0
0.130435
0.071964
0
0
0
0
0.026316
1
0.315789
true
0
0.078947
0
0.394737
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
5
16fefbf66c3aa2b0389b8a25825b997cc1d3e4aa
187
py
Python
instagram/instaclone/admin.py
kasambuli/InstaClone
c84ad962f02619ba41b9ba32a64b4146d05d8000
[ "Unlicense" ]
null
null
null
instagram/instaclone/admin.py
kasambuli/InstaClone
c84ad962f02619ba41b9ba32a64b4146d05d8000
[ "Unlicense" ]
null
null
null
instagram/instaclone/admin.py
kasambuli/InstaClone
c84ad962f02619ba41b9ba32a64b4146d05d8000
[ "Unlicense" ]
null
null
null
from django.contrib import admin from .models import Image, Profile, Comments #register models here admin.site.register(Image) admin.site.register(Profile) admin.site.register(Comments)
23.375
44
0.818182
26
187
5.884615
0.461538
0.176471
0.333333
0
0
0
0
0
0
0
0
0
0.090909
187
7
45
26.714286
0.9
0.106952
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
e51c57a669a65fecb1a041ca2d38034026fb97f6
99
py
Python
__init__.py
sunshine-app/tics_db
edb003cbd5bb5cde907d43b51e1156cd676ff10e
[ "MIT" ]
null
null
null
__init__.py
sunshine-app/tics_db
edb003cbd5bb5cde907d43b51e1156cd676ff10e
[ "MIT" ]
null
null
null
__init__.py
sunshine-app/tics_db
edb003cbd5bb5cde907d43b51e1156cd676ff10e
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # @Time : 2020/1/15 10:04 # @Author : shine # @File : __init__.py.py
24.75
28
0.525253
15
99
3.2
0.933333
0
0
0
0
0
0
0
0
0
0
0.16
0.242424
99
4
29
24.75
0.48
0.919192
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
e52c1e02fcbc891936656c9e5573009127e0a146
687
py
Python
.venv/lib/python3.8/site-packages/aws_cdk/pipelines/_jsii/__init__.py
sandipganguly/cdkpipeline
aecde04724a99e55d20a62cd3ccded6ceedbe967
[ "MIT-0" ]
null
null
null
.venv/lib/python3.8/site-packages/aws_cdk/pipelines/_jsii/__init__.py
sandipganguly/cdkpipeline
aecde04724a99e55d20a62cd3ccded6ceedbe967
[ "MIT-0" ]
null
null
null
.venv/lib/python3.8/site-packages/aws_cdk/pipelines/_jsii/__init__.py
sandipganguly/cdkpipeline
aecde04724a99e55d20a62cd3ccded6ceedbe967
[ "MIT-0" ]
null
null
null
import abc import builtins import datetime import enum import typing import jsii import jsii.compat import publication import aws_cdk.aws_cloudformation._jsii import aws_cdk.aws_codebuild._jsii import aws_cdk.aws_codepipeline._jsii import aws_cdk.aws_codepipeline_actions._jsii import aws_cdk.aws_events._jsii import aws_cdk.aws_iam._jsii import aws_cdk.aws_s3_assets._jsii import aws_cdk.cloud_assembly_schema._jsii import aws_cdk.core._jsii import aws_cdk.cx_api._jsii import constructs._jsii __jsii_assembly__ = jsii.JSIIAssembly.load( "@aws-cdk/pipelines", "1.56.0", __name__[0:-6], "pipelines@1.56.0.jsii.tgz" ) __all__ = [ "__jsii_assembly__", ] publication.publish()
21.46875
79
0.819505
107
687
4.785047
0.345794
0.214844
0.234375
0.28125
0.269531
0.121094
0
0
0
0
0
0.017685
0.094614
687
31
80
22.16129
0.805466
0
0
0
0
0
0.09607
0.03639
0
0
0
0
0
1
0
false
0
0.730769
0
0.730769
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
e550244ca62523f570e00d7bfbe1051340644f6b
342
py
Python
src/automotive/core/can/hardware/__init__.py
philosophy912/automotive
de918611652b789a83545f346c1569c2c2c955a6
[ "Apache-2.0" ]
null
null
null
src/automotive/core/can/hardware/__init__.py
philosophy912/automotive
de918611652b789a83545f346c1569c2c2c955a6
[ "Apache-2.0" ]
null
null
null
src/automotive/core/can/hardware/__init__.py
philosophy912/automotive
de918611652b789a83545f346c1569c2c2c955a6
[ "Apache-2.0" ]
1
2022-02-28T07:23:28.000Z
2022-02-28T07:23:28.000Z
# -*- coding:utf-8 -*- # -------------------------------------------------------- # Copyright (C), 2016-2021, lizhe, All rights reserved # -------------------------------------------------------- # @Name: __init__.py.py # @Author: lizhe # @Created: 2021/10/4 - 20:24 # --------------------------------------------------------
38
58
0.277778
23
342
3.956522
0.869565
0
0
0
0
0
0
0
0
0
0
0.06734
0.131579
342
8
59
42.75
0.239057
0.950292
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
e59b8098361ecf90097d7e1f1f5826b5e2ed2883
493
py
Python
knx_stack/definition/knxnet_ip/state.py
majamassarini/knx-stack
11a9baac6b7600649b5fbca43c93b200b23676b4
[ "MIT" ]
2
2021-07-28T07:42:28.000Z
2022-01-25T18:56:05.000Z
knx_stack/definition/knxnet_ip/state.py
majamassarini/knx-stack
11a9baac6b7600649b5fbca43c93b200b23676b4
[ "MIT" ]
6
2021-07-25T21:36:01.000Z
2022-02-20T21:11:31.000Z
knx_stack/definition/knxnet_ip/state.py
majamassarini/knx-stack
11a9baac6b7600649b5fbca43c93b200b23676b4
[ "MIT" ]
null
null
null
import knx_stack.state class State(knx_stack.state.State): def __init__(self, medium, association_table=None, datapointtypes=None): super(State, self).__init__(medium, association_table, datapointtypes) self._communication_channel_id = 0 @property def communication_channel_id(self): return self._communication_channel_id @communication_channel_id.setter def communication_channel_id(self, value): self._communication_channel_id = value
30.8125
78
0.754564
58
493
5.948276
0.37931
0.347826
0.382609
0.226087
0.168116
0
0
0
0
0
0
0.002445
0.170385
493
15
79
32.866667
0.841076
0
0
0
0
0
0
0
0
0
0
0
0
1
0.272727
false
0
0.090909
0.090909
0.545455
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
e5e6d8ac3e2e4331b78873651d262cf7b1607f4a
250
py
Python
simulacrum/coltypes.py
jbrambleDC/simulacrum
229ad1f8a83ddaf8cdbcec62d1098d3fcdd58d58
[ "MIT" ]
87
2016-09-18T10:10:40.000Z
2020-06-14T09:08:19.000Z
simulacrum/coltypes.py
jbrambleDC/simulacram
229ad1f8a83ddaf8cdbcec62d1098d3fcdd58d58
[ "MIT" ]
5
2016-09-19T08:45:48.000Z
2017-08-25T11:52:46.000Z
simulacrum/coltypes.py
jbrambleDC/simulacrum
229ad1f8a83ddaf8cdbcec62d1098d3fcdd58d58
[ "MIT" ]
7
2016-09-23T16:51:26.000Z
2017-07-26T01:15:00.000Z
class ColTypes: def __init__(self): self.coltypes = {} def add_coltype(self, name, coltype, **kwargs): kwargs['type'] = coltype self.coltypes[name] = kwargs def get_coltypes(self): return self.coltypes
20.833333
51
0.604
28
250
5.178571
0.428571
0.248276
0
0
0
0
0
0
0
0
0
0
0.28
250
11
52
22.727273
0.805556
0
0
0
0
0
0.016
0
0
0
0
0
0
1
0.375
false
0
0
0.125
0.625
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
f91337ccd5e0a7b98cfbd68fc2e43f56a4003927
140
py
Python
yaml_lib/__init__.py
ChauffeurPrive/nestor-api
364b5f034eeb929932a5a8c3f3b00d1275a7ae5b
[ "Apache-2.0" ]
2
2020-08-17T09:59:03.000Z
2020-08-17T09:59:23.000Z
yaml_lib/__init__.py
ChauffeurPrive/nestor-api
364b5f034eeb929932a5a8c3f3b00d1275a7ae5b
[ "Apache-2.0" ]
83
2020-06-12T14:37:35.000Z
2022-01-26T14:10:10.000Z
yaml_lib/__init__.py
ChauffeurPrive/nestor-api
364b5f034eeb929932a5a8c3f3b00d1275a7ae5b
[ "Apache-2.0" ]
1
2020-07-02T14:33:45.000Z
2020-07-02T14:33:45.000Z
"""Library to handle all the YAML related functions""" from .dump_yaml import convert_to_yaml from .load_yaml import parse_yaml, read_yaml
28
54
0.807143
23
140
4.652174
0.652174
0.186916
0
0
0
0
0
0
0
0
0
0
0.128571
140
4
55
35
0.877049
0.342857
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
005709d7135db3c0340ab90e6a8a4d20f227b07b
546
py
Python
appendix/app/api/services/UserMetadataService.py
iurykrieger96/morpy-tcc
95cb484ede708fab798db5471f944472c2a65c66
[ "MIT" ]
null
null
null
appendix/app/api/services/UserMetadataService.py
iurykrieger96/morpy-tcc
95cb484ede708fab798db5471f944472c2a65c66
[ "MIT" ]
null
null
null
appendix/app/api/services/UserMetadataService.py
iurykrieger96/morpy-tcc
95cb484ede708fab798db5471f944472c2a65c66
[ "MIT" ]
null
null
null
from database.db import db import pymongo class UserMetadataService(object): def __init__(self): self.user_meta = db.user_metadata def get_active(self): return self.user_meta.find_one({'active': True}) def insert(self, user_meta_dict): return self.user_meta.insert(user_meta_dict) def disable_all(self): return self.user_meta.update({'active': True}, {'$set': {'active': False}}) def get_all(self): return self.user_meta.find().sort([('version', pymongo.DESCENDING)])
27.3
83
0.659341
72
546
4.75
0.416667
0.163743
0.210526
0.210526
0.233918
0.233918
0
0
0
0
0
0
0.208791
546
20
84
27.3
0.791667
0
0
0
0
0
0.053016
0
0
0
0
0
0
1
0.384615
false
0
0.153846
0.307692
0.923077
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
0062993db30d3ce74b493017c73c0610f9d8b839
132
py
Python
dbconfig.py
feilaoda/espider
e80b224a036141b17a21445c94de646d5e4d1482
[ "Apache-2.0" ]
null
null
null
dbconfig.py
feilaoda/espider
e80b224a036141b17a21445c94de646d5e4d1482
[ "Apache-2.0" ]
null
null
null
dbconfig.py
feilaoda/espider
e80b224a036141b17a21445c94de646d5e4d1482
[ "Apache-2.0" ]
1
2021-11-29T01:28:14.000Z
2021-11-29T01:28:14.000Z
from sqlalchemy import create_engine engine = create_engine("mysql+mysqlconnector://root:admin@localhost/codespider?charset=utf8")
33
93
0.833333
16
132
6.75
0.8125
0.222222
0
0
0
0
0
0
0
0
0
0.008065
0.060606
132
3
94
44
0.862903
0
0
0
0
0
0.507576
0.507576
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
00810a3c303f903916219d75159fae7d61868577
58
py
Python
socialbus-scripts/indexing/datasources/__init__.py
arianpasquali/socialbus
942de3195f40732e8aa985c01f3733f9bc7eb39d
[ "MIT" ]
3
2016-11-27T17:20:24.000Z
2020-09-26T15:14:09.000Z
socialbus-scripts/indexing/datasources/__init__.py
arianpasquali/socialbus
942de3195f40732e8aa985c01f3733f9bc7eb39d
[ "MIT" ]
6
2021-06-04T01:08:16.000Z
2021-08-09T20:49:36.000Z
socialbus-scripts/indexing/datasources/__init__.py
tiagonc96/socialbustl
3a32a9184e7e2cea2a14091e74055eb8ed9cbcec
[ "MIT" ]
1
2017-03-08T17:02:19.000Z
2017-03-08T17:02:19.000Z
from MongoDataSource import * from SolrDataSource import *
29
29
0.844828
6
58
8.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.12069
58
2
30
29
0.960784
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
009df85c3eb4df7e9670e9b5a193bde2d1525ecd
255
py
Python
mayo/net/tf/gate/squeeze.py
deep-fry/mayo
7211a11fdb9bb0a036d496a3eba16c96db122f89
[ "MIT" ]
110
2018-06-07T17:52:29.000Z
2022-03-28T08:04:02.000Z
mayo/net/tf/gate/squeeze.py
kypomon/mayo
7211a11fdb9bb0a036d496a3eba16c96db122f89
[ "MIT" ]
6
2019-10-17T12:00:29.000Z
2021-10-21T13:41:22.000Z
mayo/net/tf/gate/squeeze.py
kypomon/mayo
7211a11fdb9bb0a036d496a3eba16c96db122f89
[ "MIT" ]
22
2018-07-05T01:30:49.000Z
2021-10-19T06:15:40.000Z
from mayo.net.tf.gate.sparse import SparseRegularizedGatedConvolutionBase class SqueezeExciteGatedConvolution(SparseRegularizedGatedConvolutionBase): def activate(self, tensor): return self.actives() * self.gate() * super().activate(tensor)
36.428571
75
0.792157
23
255
8.782609
0.73913
0
0
0
0
0
0
0
0
0
0
0
0.113725
255
6
76
42.5
0.893805
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
00a6aaabd6b6a973514feaf5b6dabe8f9e17fe69
230
py
Python
algorithms/age/optimizer.py
mpoiitis/iSpine
333c5c6326c7052a673ce89eabadb476031c7a35
[ "MIT" ]
4
2021-02-07T01:06:03.000Z
2021-09-07T11:55:46.000Z
algorithms/age/optimizer.py
mpoiitis/iSpine
333c5c6326c7052a673ce89eabadb476031c7a35
[ "MIT" ]
1
2021-04-13T07:29:50.000Z
2021-04-13T09:46:13.000Z
algorithms/age/optimizer.py
mpoiitis/iSpine
333c5c6326c7052a673ce89eabadb476031c7a35
[ "MIT" ]
1
2022-03-27T01:01:18.000Z
2022-03-27T01:01:18.000Z
import torch import torch.nn.modules.loss import torch.nn.functional as F def loss_function(adj_preds, adj_labels, n_nodes): cost = 0. cost += F.binary_cross_entropy_with_logits(adj_preds, adj_labels) return cost
25.555556
69
0.756522
37
230
4.432432
0.621622
0.20122
0.158537
0.207317
0
0
0
0
0
0
0
0.005208
0.165217
230
9
70
25.555556
0.848958
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.714286
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
00c17bb6b700a9b50a307924752cf9ab122d0a70
228
py
Python
systemtest/pts/models/__init__.py
IBM-Power-SystemTest/systemtest
a29e6d54500ca13f554073cc66a4a2d403ea5b14
[ "BSD-3-Clause" ]
1
2022-03-09T18:07:11.000Z
2022-03-09T18:07:11.000Z
systemtest/pts/models/__init__.py
IBM-Power-SystemTest/systemtest
a29e6d54500ca13f554073cc66a4a2d403ea5b14
[ "BSD-3-Clause" ]
null
null
null
systemtest/pts/models/__init__.py
IBM-Power-SystemTest/systemtest
a29e6d54500ca13f554073cc66a4a2d403ea5b14
[ "BSD-3-Clause" ]
null
null
null
""" Django Models References: https://docs.djangoproject.com/en/3.1/topics/db/ https://docs.djangoproject.com/en/3.1/ref/models/ """ from .models import * from .request import * from .request_group import *
20.727273
57
0.671053
31
228
4.903226
0.548387
0.118421
0.289474
0.328947
0.381579
0.381579
0.381579
0
0
0
0
0.021277
0.175439
228
10
58
22.8
0.787234
0.631579
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
00c345a8f3b33eef65e4ae737206f64e2e507294
247
py
Python
src/icemac/callonchange/tests/__init__.py
icemac/icemac.callonchange
ad91599b1c095b4b7a0cc05a4895f6ad479c8352
[ "BSD-2-Clause" ]
1
2020-10-07T15:09:32.000Z
2020-10-07T15:09:32.000Z
src/icemac/callonchange/tests/__init__.py
icemac/icemac.callonchange
ad91599b1c095b4b7a0cc05a4895f6ad479c8352
[ "BSD-2-Clause" ]
null
null
null
src/icemac/callonchange/tests/__init__.py
icemac/icemac.callonchange
ad91599b1c095b4b7a0cc05a4895f6ad479c8352
[ "BSD-2-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2010-2011 Michael Howitz # See also LICENSE.txt def additional_tests(): # needed function to find doctests when runing `python setup.py test` import test_recipe return test_recipe.test_suite()
24.7
73
0.712551
35
247
4.914286
0.885714
0.116279
0
0
0
0
0
0
0
0
0
0.044776
0.186235
247
9
74
27.444444
0.810945
0.603239
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
5
00d36aa8889f98e483dca818c48d7890b3787091
25
py
Python
tests/syntax/def_missing_parens_2.py
matan-h/friendly
3ab0fc6541c837271e8865e247750007acdd18fb
[ "MIT" ]
287
2019-04-08T13:18:29.000Z
2021-03-14T19:10:21.000Z
tests/syntax/def_missing_parens_2.py
matan-h/friendly
3ab0fc6541c837271e8865e247750007acdd18fb
[ "MIT" ]
191
2019-04-08T14:39:18.000Z
2021-03-14T22:14:56.000Z
tests/syntax/def_missing_parens_2.py
matan-h/friendly
3ab0fc6541c837271e8865e247750007acdd18fb
[ "MIT" ]
9
2019-04-08T12:54:08.000Z
2020-11-20T02:26:27.000Z
def name a, b: pass
6.25
14
0.52
5
25
2.6
1
0
0
0
0
0
0
0
0
0
0
0
0.4
25
3
15
8.333333
0.866667
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0.5
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
5
00dcd8c39b4e44ac2ccff8973fe61b9e60de07cc
79
py
Python
python/testData/intentions/returnTypeInDocstring.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/intentions/returnTypeInDocstring.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/intentions/returnTypeInDocstring.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def f(x): pass def g(x): y = x f(x) # (1) f(<caret>y) # (2)
9.875
22
0.341772
16
79
1.6875
0.5625
0.148148
0
0
0
0
0
0
0
0
0
0.043478
0.417722
79
8
22
9.875
0.543478
0.088608
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0.166667
0
null
null
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
5
00dd6b4b556462911710dd95c9029b29fe02ce7c
150
py
Python
tests/mock_responses.py
vikcena01/django-push-notification
b478eee1274e5a42c4b8d7f11f610f08e9c1e73f
[ "MIT" ]
1
2015-03-04T04:30:19.000Z
2015-03-04T04:30:19.000Z
tests/mock_responses.py
peterhinson/django-push-notifications
bf4e781aa4e67ea287027aa0d93c0faa0953c591
[ "MIT" ]
null
null
null
tests/mock_responses.py
peterhinson/django-push-notifications
bf4e781aa4e67ea287027aa0d93c0faa0953c591
[ "MIT" ]
null
null
null
GCM_PLAIN_RESPONSE = 'id=1:08' GCM_JSON_RESPONSE = '{"multicast_id":108,"success":1,"failure":0,"canonical_ids":0,"results":[{"message_id":"1:08"}]}'
50
118
0.7
24
150
4.083333
0.666667
0.061224
0.102041
0
0
0
0
0
0
0
0
0.083333
0.04
150
2
119
75
0.597222
0
0
0
0
0.5
0.686667
0.64
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
00ef6e816765e7fc076a3a76cb49523cc509aa5d
177
py
Python
enp/settings.py
igor47/enp
3362e12e811ab5cfb461f622c5cffcd56fed5f9b
[ "MIT" ]
null
null
null
enp/settings.py
igor47/enp
3362e12e811ab5cfb461f622c5cffcd56fed5f9b
[ "MIT" ]
null
null
null
enp/settings.py
igor47/enp
3362e12e811ab5cfb461f622c5cffcd56fed5f9b
[ "MIT" ]
null
null
null
import pathlib PROJECT_ROOT = pathlib.Path(__file__).parent.parent.absolute() INDEX_PATH = PROJECT_ROOT / "dist" / "index.html" ASSETS_PATH = PROJECT_ROOT / "dist" / "assets"
25.285714
62
0.745763
23
177
5.347826
0.521739
0.268293
0.243902
0.308943
0
0
0
0
0
0
0
0
0.118644
177
6
63
29.5
0.788462
0
0
0
0
0
0.135593
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
dab442172f803e7461bb4f2fed5eba962ce28bb6
30
py
Python
Problems/carrots.py
rikgj/Kattis
2e34dee307aef5acea5837732bf9f27f8c548e9c
[ "MIT" ]
null
null
null
Problems/carrots.py
rikgj/Kattis
2e34dee307aef5acea5837732bf9f27f8c548e9c
[ "MIT" ]
null
null
null
Problems/carrots.py
rikgj/Kattis
2e34dee307aef5acea5837732bf9f27f8c548e9c
[ "MIT" ]
null
null
null
print(input().split(' ')[1])
10
28
0.533333
4
30
4
1
0
0
0
0
0
0
0
0
0
0
0.037037
0.1
30
2
29
15
0.555556
0
0
0
0
0
0.034483
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
dae735ba86b0dff9e9ad8624976c8fb2ed759f78
106
py
Python
reference/Python/media/pillow/others/background_removal/merge.py
steadylearner/code
ba6df6c38a6e25b7ea996f4df905921e27760e04
[ "MIT" ]
4
2019-07-17T14:43:32.000Z
2022-03-27T21:38:01.000Z
reference/Python/media/pillow/others/background_removal/merge.py
steadylearner/code
ba6df6c38a6e25b7ea996f4df905921e27760e04
[ "MIT" ]
39
2020-09-04T03:31:16.000Z
2022-03-08T22:54:03.000Z
reference/Python/media/pillow/others/background_removal/merge.py
steadylearner/code
ba6df6c38a6e25b7ea996f4df905921e27760e04
[ "MIT" ]
1
2021-03-03T13:04:28.000Z
2021-03-03T13:04:28.000Z
# $composite -dissolve 50 -gravity Center -geometry -11+6 prop-passer.png React.png -alpha Set result.png
53
105
0.764151
17
106
4.764706
0.882353
0
0
0
0
0
0
0
0
0
0
0.053763
0.122642
106
1
106
106
0.817204
0.971698
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
daf3a3c3296fd1144e9212c24f4e83e710b2a91b
23,499
py
Python
build/lib/pyhpeimc/plat/termaccess.py
HPNetworking/HP-Intelligent-Management-Center
4fba31827573587e03a6233c7db60f188038c8e5
[ "Apache-2.0" ]
13
2016-03-14T17:55:03.000Z
2021-03-26T07:18:26.000Z
build/lib/pyhpeimc/plat/termaccess.py
HPNetworking/HP-Intelligent-Management-Center
4fba31827573587e03a6233c7db60f188038c8e5
[ "Apache-2.0" ]
7
2016-08-04T17:39:11.000Z
2017-09-19T13:42:35.000Z
build/lib/pyhpeimc/plat/termaccess.py
HPNetworking/HP-Intelligent-Management-Center
4fba31827573587e03a6233c7db60f188038c8e5
[ "Apache-2.0" ]
17
2016-03-03T05:24:20.000Z
2022-03-10T08:16:31.000Z
#!/usr/bin/env python3 # author: @netmanchris # This section imports required libraries import json import requests import ipaddress HEADERS = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Accept-encoding': 'application/json'} def get_real_time_locate(ipAddress, auth, url): """ function takes the ipAddress of a specific host and issues a RESTFUL call to get the device and interface that the target host is currently connected to. Note: Although intended to return a single location, Multiple locations may be returned for a single host due to a partially discovered network or misconfigured environment. :param ipAddress: str value valid IPv4 IP address :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: list of dictionaries where each element of the list represents the location of the target host :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> found_device = get_real_time_locate('10.101.0.51', auth.creds, auth.url) >>> assert type(found_device) is list >>> assert 'deviceId' in found_device[0] >>> assert 'deviceId' in found_device[0] >>> assert 'deviceId' in found_device[0] >>> assert 'deviceId' in found_device[0] >>> no_device = get_real_time_locate('192.168.254.254', auth.creds, auth.url) >>> assert type(no_device) is dict >>> assert len(no_device) == 0 """ real_time_locate_url = "/imcrs/res/access/realtimeLocate?type=2&value=" + str(ipAddress) + "&total=false" f_url = url + real_time_locate_url r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents try: if r.status_code == 200: response = json.loads(r.text) if 'realtimeLocation' in response: real_time_locate = json.loads(r.text)['realtimeLocation'] if type(real_time_locate) is dict: real_time_locate = [real_time_locate] return real_time_locate else: return json.loads(r.text)['realtimeLocation'] else: return json.loads(r.text) except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_real_time_locate: An Error has occured" def get_ip_mac_arp_list(devId, auth,url): """ function takes devid of specific device and issues a RESTFUL call to get the IP/MAC/ARP list from the target device. :param devId: int or str value of the target device. :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: list of dictionaries containing the IP/MAC/ARP list of the target device. :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> ip_mac_list = get_ip_mac_arp_list('10', auth.creds, auth.url) >>> assert type(ip_mac_list) is list >>> assert 'deviceId' in ip_mac_list[0] """ if auth is None or url is None: # checks to see if the imc credentials are already available set_imc_creds() ip_mac_arp_list_url = "/imcrs/res/access/ipMacArp/" + str(devId) f_url = url + ip_mac_arp_list_url r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents try: if r.status_code == 200: macarplist = (json.loads(r.text)) if len(macarplist) > 1: return macarplist['ipMacArp'] else: return ['this function is unsupported'] except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_ip_mac_arp_list: An Error has occured" #this section deals with the IP Address Manager functions with terminal access of HPE IMC Base platform #Following functions deal with IP scopes def get_ip_scope(auth, url, scopeId=None,): """ function requires no inputs and returns all IP address scopes currently configured on the HPE IMC server. If the optional scopeId parameter is included, this will automatically return only the desired scope id. :param scopeId: integer of the desired scope id ( optional ) :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: list of dictionary objects where each element of the list represents one IP scope :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> ip_scope_list = get_ip_scope(auth.creds, auth.url) >>> assert type(ip_scope_list) is list >>> assert 'ip' in ip_scope_list[0] """ if scopeId is None: get_ip_scope_url = "/imcrs/res/access/assignedIpScope" else: get_ip_scope_url = "/imcrs/res/access/assignedIpScope/ip?ipScopeId="+str(scopeId) f_url = url + get_ip_scope_url r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents try: if r.status_code == 200: ipscopelist = (json.loads(r.text)) return ipscopelist['assignedIpScope'] except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_ip_scope: An Error has occured" def get_ip_scope_detail(scopeId, auth, url ): """ function requires no inputs and returns all IP address scopes currently configured on the HPE IMC server. If the optional scopeId parameter is included, this will automatically return only the desired scope id. :param scopeId: integer of the desired scope id ( optional ) :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: dictionary, may containing multiple entries if sub-scopes have been created :rtype: dict >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> ip_scope_detail = get_ip_scope_detail('45', auth.creds, auth.url) >>> assert type(ip_scope_detail) is dict >>> assert 'startIp' in ip_scope_detail """ get_ip_scope_url = "/imcrs/res/access/assignedIpScope/"+str(scopeId) f_url = url + get_ip_scope_url r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents try: if r.status_code == 200: ipscopelist = (json.loads(r.text)) return ipscopelist except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_ip_scope: An Error has occured" def add_ip_scope(startIp, endIp, name, description, auth, url): """ Function takes input of four strings Start Ip, endIp, name, and description to add new Ip Scope to terminal access in the HPE IMC base platform :param startIp: str Start of IP address scope ex. '10.101.0.1' :param endIp: str End of IP address scope ex. '10.101.0.254' :param name: str Name of the owner of this IP scope ex. 'admin' :param description: str description of the Ip scope :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: 200 if successfull :rtype: >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> delete_ip_scope('10.50.0.0/24', auth.creds, auth.url) <Response [204]> >>> new_scope = add_ip_scope('10.50.0.1', '10.50.0.254', 'cyoung', 'test group', auth.creds, auth.url) >>> assert type(new_scope) is int >>> assert new_scope == 200 >>> existing_scope = add_ip_scope('10.50.0.1', '10.50.0.254', 'cyoung', 'test group', auth.creds, auth.url) >>> assert type(existing_scope) is int >>> assert existing_scope == 409 """ if auth is None or url is None: # checks to see if the imc credentials are already available set_imc_creds() add_ip_scope_url = "/imcrs/res/access/assignedIpScope" f_url = url + add_ip_scope_url payload = ('''{ "startIp": "%s", "endIp": "%s","name": "%s","description": "%s" }''' %(str(startIp), str(endIp), str(name), str(description))) r = requests.post(f_url, auth=auth, headers=HEADERS, data=payload) # creates the URL using the payload variable as the contents try: if r.status_code == 200: #print("IP Scope Successfully Created") return r.status_code elif r.status_code == 409: #print ("IP Scope Already Exists") return r.status_code except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " add_ip_scope: An Error has occured" def add_child_ip_scope(startIp, endIp, name, description, scopeid, auth, url): """ Function takes input of four strings Start Ip, endIp, name, and description to add new Ip Scope to terminal access in the HPE IMC base platform :param startIp: str Start of IP address scope ex. '10.101.0.1' :param endIp: str End of IP address scope ex. '10.101.0.254' :param name: str Name of the owner of this IP scope ex. 'admin' :param description: str description of the Ip scope :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: 200 :rtype: >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> add_child_ip_scope('10.50.0.1', '10.50.0.126', 'cyoung', 'test sub scope', '175', auth.creds, auth.url) """ if auth is None or url is None: # checks to see if the imc credentials are already available set_imc_creds() add_ip_scope_url = "/imcrs/res/access/assignedIpScope/" + str(scopeid) f_url = url + add_ip_scope_url payload = ('''{ "startIp": "%s", "endIp": "%s","name": "%s","description": "%s", "parentId" : "%s"}''' %(str(startIp), str(endIp), str(name), str(description), str(scopeid))) r = requests.post(f_url, auth=auth, headers=HEADERS, data=payload) # creates the URL using the payload variable as the contents try: if r.status_code == 200: #print("IP Scope Successfully Created") return r.status_code elif r.status_code == 409: #print ("Conflict with Current Scope") return r.status_code except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " add_ip_scope: An Error has occured" def delete_ip_scope(network_address, auth, url): '''Function to delete an entire IP segment from the IMC IP Address management under terminal access :param network_address :param auth :param url >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> new_scope = add_ip_scope('10.50.0.1', '10.50.0.254', 'cyoung', 'test group', auth.creds, auth.url) >>> delete_scope = delete_ip_scope('10.50.0.0/24', auth.creds, auth.url) ''' scope_id = get_scope_id(network_address, auth,url) delete_ip_address_url = '''/imcrs/res/access/assignedIpScope/'''+str(scope_id) f_url = url + delete_ip_address_url r = requests.delete(f_url, auth=auth, headers=HEADERS) try: return r if r.status_code == 204: #print("IP Segment Successfully Deleted") return r.status_code except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " delete_ip_scope: An Error has occured" #Following functions deal with hosts assigned to IP scopes def add_scope_ip(ipaddress, name, description, scopeid, auth, url): """ Function to add new host IP address allocation to existing scope ID :param ipaddress: :param name: name of the owner of this host :param description: Description of the host :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: :rtype: >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> add_scope_ip('10.50.0.5', 'cyoung', 'New Test Host','175', auth.creds, auth.url) """ new_ip = { "ip": ipaddress, "name": name, "description": description} add_scope_ip_url = '/imcrs/res/access/assignedIpScope/ip?ipScopeId='+str(scopeid) f_url = url + add_scope_ip_url payload = json.dumps(new_ip) r = requests.post(f_url, auth=auth, headers=HEADERS, data=payload) # creates the URL using the payload variable as the contents try: if r.status_code == 200: #print("IP Scope Successfully Created") return r.status_code elif r.status_code == 409: #print("IP Scope Already Exists") return r.status_code except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " add_ip_scope: An Error has occured" def remove_scope_ip(hostid, auth, url): """ Function to add remove IP address allocation :param hostid: Host id of the host to be deleted :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: String of HTTP response code. Should be 204 is successfull :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> new_scope = add_ip_scope('10.50.0.1', '10.50.0.254', 'cyoung', 'test group', auth.creds, auth.url) >>> add_host_to_segment('10.50.0.5', 'cyoung', 'New Test Host', '10.50.0.0/24', auth.creds, auth.url) >>> host_id = get_host_id('10.50.0.5', '10.50.0.0/24', auth.creds, auth.url) >>> rem_host = remove_scope_ip(host_id, auth.creds, auth.url) >>> assert type(rem_host) is int >>> assert rem_host == 204 """ add_scope_ip_url = '/imcrs/res/access/assignedIpScope/ip/'+str(hostid) f_url = url + add_scope_ip_url r = requests.delete(f_url, auth=auth, headers=HEADERS, ) try: if r.status_code == 204: #print("Host Successfully Deleted") return r.status_code elif r.status_code == 409: #print("IP Scope Already Exists") return r.status_code except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " add_ip_scope: An Error has occured" def get_ip_scope_hosts( scopeId, auth, url): """ Function requires input of scope ID and returns list of allocated IP address for the specified scope :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :param scopeId: Interger of teh desired scope id :return: list of dictionary objects where each element of the list represents a single host assigned to the IP scope :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> scope_id = get_scope_id('10.50.0.0/24', auth.creds, auth.url) >>> ip_scope_hosts = get_ip_scope_hosts(scope_id, auth.creds, auth.url) >>> assert type(ip_scope_hosts) is list >>> assert 'name' in ip_scope_hosts[0] >>> assert 'description' in ip_scope_hosts[0] >>> assert 'ip' in ip_scope_hosts[0] >>> assert 'id' in ip_scope_hosts[0] """ get_ip_scope_url = "/imcrs/res/access/assignedIpScope/ip?size=10000&ipScopeId="+str(scopeId) f_url = url + get_ip_scope_url r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents try: if r.status_code == 200: ipscopelist = (json.loads(r.text)) if ipscopelist == {}: return ipscopelist else: ipscopelist = ipscopelist['assignedIpInfo'] if type(ipscopelist) is dict: ipscope = [] ipscope.append(ipscopelist) return ipscope return ipscopelist except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_ip_scope: An Error has occured" def add_scope_ip(ipaddress, name, description, scopeid, auth, url): """ Function to add new host IP address allocation to existing scope ID :param ipaddress: :param name: name of the owner of this host :param description: Description of the host :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: :rtype: >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> new_host = add_scope_ip('10.50.0.5', 'cyoung', 'New Test Host','175', auth.creds, auth.url) """ new_ip = { "ip": ipaddress, "name": name, "description": description} add_scope_ip_url = '/imcrs/res/access/assignedIpScope/ip?ipScopeId='+str(scopeid) f_url = url + add_scope_ip_url payload = json.dumps(new_ip) r = requests.post(f_url, auth=auth, headers=HEADERS, data=payload) # creates the URL using the payload variable as the contents try: if r.status_code == 200: #print("IP Host Successfully Created") return r.status_code elif r.status_code == 409: #print("IP Host Already Exists") return r.status_code except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " add_ip_scope: An Error has occured" def add_host_to_segment(ipaddress, name, description, network_address, auth, url): ''' Function to abstract existing add_scope_ip_function. Allows for use of network address rather than forcing human to learn the scope_id :param ipaddress: :param name: name of the owner of this host :param description: Description of the host :param: network_address: network address of the target scope in format x.x.x.x/yy where x.x.x.x representents the network address and yy represents the length of the subnet mask. Example: 10.50.0.0 255.255.255.0 would be written as 10.50.0.0/24 :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: :rtype: ''' scope_id = get_scope_id(network_address, auth, url) add_scope_ip(ipaddress, name, description, scope_id, auth,url) def delete_host_from_segment(ipaddress, networkaddress, auth, url): '''Function to abstract ''' host_id = get_host_id(ipaddress, networkaddress, auth, url) remove_scope_ip(host_id, auth.creds, auth.url) """ Following Section are Helper functions to help translate human readable IPv4 address to IMC internal keys for working with IP scopes and hosts """ def get_scope_id(network_address, auth, url): """ :param network_address: network address of the target scope in format x.x.x.x/yy where x.x.x.x representents the network address and yy represents the length of the subnet mask. Example: 10.50.0.0 255.255.255.0 would be written as 10.50.0.0/24 :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: str object which contains the numerical ID of the target scope :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> new_scope = add_ip_scope('10.50.0.1', '10.50.0.254', 'cyoung', 'test group', auth.creds, auth.url) >>> scope_id = get_scope_id('10.50.0.0/24', auth.creds, auth.url) >>> assert type(scope_id) is str """ netaddr = ipaddress.ip_network(network_address) scopes = get_ip_scope(auth, url) for i in scopes: if int(i['id']) > 0: if ipaddress.ip_address(i['startIp']) in netaddr and ipaddress.ip_address(i['endIp']) in netaddr: return i['id'] if "assignedIpScope" in i: for child in i['assignedIpScope']: if ipaddress.ip_address(child['startIp']) in netaddr and ipaddress.ip_address(child['endIp']) in netaddr: return child['id'] def get_host_id(host_address, network_address, auth, url): """ :param host: str describing network address of the target scope in format x.x.x.x where x.x.x.x representents the full ipv4 address. Example: 10.50.0.5 255.255.255.0 would be written as 10.50.0.5 :param network_address: network address of the target scope in format x.x.x.x/yy where x.x.x.x representents the network address and yy represents the length of the subnet mask. Example: 10.50.0.0 255.255.255.0 would be written as 10.50.0.0/24 :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: str object which contains the numerical ID of the target scope :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.termaccess import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> new_scope = add_ip_scope('10.50.0.1', '10.50.0.254', 'cyoung', 'test group', auth.creds, auth.url) >>> add_host_to_segment('10.50.0.5', 'cyoung', 'New Test Host', '10.50.0.0/24', auth.creds, auth.url) >>> new_host_id = get_host_id('10.50.0.5', '10.50.0.0/24', auth.creds, auth.url) >>> assert type(new_host_id) is str """ scope_id = get_scope_id(network_address, auth, url) all_scope_hosts = get_ip_scope_hosts(scope_id, auth, url) for host in all_scope_hosts: if host['ip'] == host_address: return host['id']
35.283784
131
0.6599
3,407
23,499
4.441151
0.087173
0.030533
0.011896
0.026436
0.799485
0.766175
0.732998
0.71641
0.703324
0.673518
0
0.033947
0.225286
23,499
665
132
35.336842
0.797199
0.569939
0
0.575269
0
0.010753
0.159223
0.055605
0
0
0
0
0
1
0.080645
false
0
0.016129
0
0.290323
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
970307819dd2998ccd3d9c0eb0f2799af4fe665d
93
py
Python
LeapMotionBlender/UI/__init__.py
ALucatero03/Blender-Puppet-Motion
ce3beea7d681966a17aeaf6382424efaa3f05f5d
[ "MIT" ]
21
2020-11-05T23:11:28.000Z
2022-03-22T16:01:08.000Z
LeapMotionBlender/UI/__init__.py
ALucatero03/Blender-Puppet-Motion
ce3beea7d681966a17aeaf6382424efaa3f05f5d
[ "MIT" ]
6
2019-10-08T20:55:22.000Z
2019-12-17T07:05:22.000Z
LeapMotionBlender/UI/__init__.py
ALucatero03/Blender-Puppet-Motion
ce3beea7d681966a17aeaf6382424efaa3f05f5d
[ "MIT" ]
5
2019-10-09T18:08:44.000Z
2019-12-24T10:46:43.000Z
from .Panels import HandSelect, TrackSettings, MainLeapPanel from .Menus import SettingsPanel
46.5
60
0.860215
10
93
8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.096774
93
2
61
46.5
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9723a6d7d8853f69e03036b65fe1cbe7411bef70
180
py
Python
clustering/distance_measures.py
rodriggs/pipeline
a91a19dd631a5f352e1f610f82ff626e525b679d
[ "MIT" ]
null
null
null
clustering/distance_measures.py
rodriggs/pipeline
a91a19dd631a5f352e1f610f82ff626e525b679d
[ "MIT" ]
null
null
null
clustering/distance_measures.py
rodriggs/pipeline
a91a19dd631a5f352e1f610f82ff626e525b679d
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np from sklearn import metrics def distance_measures(): """ INPUT: OUTPUT: """ pass if __name__ == '__main__': main()
12.857143
27
0.622222
22
180
4.681818
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.277778
180
13
28
13.846154
0.792308
0.077778
0
0
0
0
0.054795
0
0
0
0
0
0
1
0.142857
true
0.142857
0.428571
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
5
973063fe4268fc45f9f78ef4bed7f0a8a3d559bd
620
py
Python
scripts/efusionRegistrationTest.py
hz-ants/LabelFusion-docker2-
8dc116064a1bdcfa2c2dd814580b5f43d46c6f40
[ "BSD-3-Clause" ]
313
2017-07-16T02:00:16.000Z
2022-03-31T11:00:10.000Z
scripts/efusionRegistrationTest.py
hz-ants/LabelFusion-docker2-
8dc116064a1bdcfa2c2dd814580b5f43d46c6f40
[ "BSD-3-Clause" ]
94
2017-07-16T19:59:06.000Z
2022-03-30T08:14:22.000Z
scripts/efusionRegistrationTest.py
hz-ants/LabelFusion-docker2-
8dc116064a1bdcfa2c2dd814580b5f43d46c6f40
[ "BSD-3-Clause" ]
87
2017-07-14T16:01:54.000Z
2022-03-23T17:33:47.000Z
''' # phone pick_point 0.75556, -0.04087, -0.23920 pick_normal -0.79761, 0.29078, 0.52846 dist_to_previous_point 0.000000 # toothpaste pick_point 0.66325, -0.12406, -0.11679 pick_normal -0.46777, -0.05299, 0.88226 dist_to_previous_point 0.174430 # oil bottle pick_point 0.78493, -0.03217, -0.01731 pick_normal -0.63093, -0.11345, 0.76750 dist_to_previous_point 0.182061 # toy robot pick_point 0.76878, 0.10297, -0.14594 pick_normal -0.69869, -0.01452, 0.71528 dist_to_previous_point 0.187268 # table pick_point 0.78427, 0.03044, -0.24667 pick_normal -0.76159, 0.16045, 0.62788 dist_to_previous_point 0.125088 '''
20.666667
39
0.753226
117
620
3.777778
0.418803
0.135747
0.113122
0.214932
0.226244
0
0
0
0
0
0
0.391621
0.114516
620
29
40
21.37931
0.413479
0.980645
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
97384b1b5d496898b55b560f28f00b9d4c85f0c4
199
py
Python
users/urls.py
rohanjsuresh/extracted_keyword_validation
94e56c645c066d9d20097433b1716b3e76625b3d
[ "MIT" ]
null
null
null
users/urls.py
rohanjsuresh/extracted_keyword_validation
94e56c645c066d9d20097433b1716b3e76625b3d
[ "MIT" ]
null
null
null
users/urls.py
rohanjsuresh/extracted_keyword_validation
94e56c645c066d9d20097433b1716b3e76625b3d
[ "MIT" ]
1
2021-05-18T16:40:55.000Z
2021-05-18T16:40:55.000Z
from django.urls import path from . import views from django.conf import settings from django.conf.urls.static import static urlpatterns = [ path('register/', views.register, name='register'), ]
24.875
55
0.758794
27
199
5.592593
0.444444
0.198676
0.18543
0
0
0
0
0
0
0
0
0
0.135678
199
8
56
24.875
0.877907
0
0
0
0
0
0.085
0
0
0
0
0
0
1
0
false
0
0.571429
0
0.571429
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
979c0948b812296129b262b1790c6dc8982735ce
7,083
py
Python
test/unit/test_dataset_load.py
zlinzju/DeepReg
e808a4d80ea29233fa664df374e79af46d9ff60f
[ "Apache-2.0" ]
1
2020-11-05T12:17:27.000Z
2020-11-05T12:17:27.000Z
test/unit/test_dataset_load.py
zlinzju/DeepReg
e808a4d80ea29233fa664df374e79af46d9ff60f
[ "Apache-2.0" ]
null
null
null
test/unit/test_dataset_load.py
zlinzju/DeepReg
e808a4d80ea29233fa664df374e79af46d9ff60f
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 """ Tests for deepreg/dataset/load.py in pytest style """ import pytest import yaml import deepreg.dataset.load as load from deepreg.dataset.loader.grouped_loader import GroupedDataLoader from deepreg.dataset.loader.nifti_loader import NiftiFileLoader from deepreg.dataset.loader.paired_loader import PairedDataLoader from deepreg.dataset.loader.unpaired_loader import UnpairedDataLoader def load_yaml(file_path: str) -> dict: """ load the yaml file and return a dictionary """ assert file_path.endswith(".yaml") with open(file_path) as file: return yaml.load(file, Loader=yaml.FullLoader) def test_get_data_loader(): """ Test for get_data_loader to make sure it get correct data loader and raise correct errors """ # single paired data loader config = load_yaml("config/test/paired_nifti.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, PairedDataLoader) config = load_yaml("config/test/paired_h5.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, PairedDataLoader) # single unpaired data loader config = load_yaml("config/test/unpaired_nifti.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, UnpairedDataLoader) config = load_yaml("config/test/unpaired_h5.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, UnpairedDataLoader) # single grouped data loader config = load_yaml("config/test/grouped_nifti.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, GroupedDataLoader) config = load_yaml("config/test/grouped_h5.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, GroupedDataLoader) # empty data loader config = load_yaml("config/test/paired_nifti.yaml") config["dataset"]["dir"]["train"] = "" got = load.get_data_loader(data_config=config["dataset"], mode="train") assert got is None config = load_yaml("config/test/paired_nifti.yaml") config["dataset"]["dir"]["train"] = None got = load.get_data_loader(data_config=config["dataset"], mode="train") assert got is None # unpaired data loader with multiple dirs config = load_yaml("config/test/unpaired_nifti_multi_dirs.yaml") got = load.get_data_loader(data_config=config["dataset"], mode="train") assert isinstance(got, UnpairedDataLoader) # check not a directory error config = load_yaml("config/test/paired_nifti.yaml") config["dataset"]["dir"]["train"] += ".h5" with pytest.raises(ValueError) as err_info: load.get_data_loader(data_config=config["dataset"], mode="train") assert "is not a directory or does not exist" in str(err_info.value) # check directory not existed error config = load_yaml("config/test/paired_nifti.yaml") config["dataset"]["dir"]["train"] = "/this_should_not_existed" with pytest.raises(ValueError) as err_info: load.get_data_loader(data_config=config["dataset"], mode="train") assert "is not a directory or does not exist" in str(err_info.value) # check mode config = load_yaml("config/test/paired_nifti.yaml") with pytest.raises(AssertionError) as err_info: load.get_data_loader(data_config=config["dataset"], mode="example") assert "mode must be one of train/valid/test" in str(err_info.value) def test_get_single_data_loader(): """ Test for get_single_data_loader to make sure it get correct data loader and raise correct errors Mainly based on nifti file loader """ common_args = dict( file_loader=NiftiFileLoader, labeled=True, sample_label="sample", seed=0 ) # single paired data loader config = load_yaml("config/test/paired_nifti.yaml") got = load.get_single_data_loader( data_type=config["dataset"]["type"], data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert isinstance(got, PairedDataLoader) # single unpaired data loader config = load_yaml("config/test/unpaired_nifti.yaml") got = load.get_single_data_loader( data_type=config["dataset"]["type"], data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert isinstance(got, UnpairedDataLoader) # single grouped data loader config = load_yaml("config/test/grouped_nifti.yaml") got = load.get_single_data_loader( data_type=config["dataset"]["type"], data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert isinstance(got, GroupedDataLoader) # not supported data loader config = load_yaml("config/test/paired_nifti.yaml") with pytest.raises(ValueError) as err_info: load.get_single_data_loader( data_type="NotSupported", data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert "Unknown data format" in str(err_info.value) # wrong keys for paired loader config = load_yaml("config/test/paired_nifti.yaml") # delete correct keys and add wrong one config["dataset"].pop("moving_image_shape", None) config["dataset"].pop("fixed_image_shape", None) with pytest.raises(ValueError) as err_info: load.get_single_data_loader( data_type="paired", data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert "Paired Loader requires 'moving_image_shape' and 'fixed_image_shape'" in str( err_info.value ) # wrong keys for unpaired loader config = load_yaml("config/test/unpaired_nifti.yaml") # delete correct keys and add wrong one config["dataset"].pop("image_shape", None) with pytest.raises(ValueError) as err_info: load.get_single_data_loader( data_type="unpaired", data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert "Unpaired Loader requires 'image_shape'" in str(err_info.value) # wrong keys for grouped loader config = load_yaml("config/test/unpaired_nifti.yaml") # delete correct keys and add wrong one config["dataset"].pop("intra_group_prob", None) with pytest.raises(ValueError) as err_info: load.get_single_data_loader( data_type="grouped", data_config=config["dataset"], common_args=common_args, data_dir_paths=[config["dataset"]["dir"]["train"]], ) assert "Grouped Loader requires 'image_shape'" in str(err_info.value)
38.286486
100
0.691233
925
7,083
5.085405
0.125405
0.102253
0.056548
0.080782
0.790391
0.77466
0.754889
0.747024
0.740859
0.721514
0
0.001045
0.189044
7,083
184
101
38.494565
0.817897
0.120006
0
0.616
0
0
0.225885
0.096135
0
0
0
0
0.168
1
0.024
false
0
0.056
0
0.088
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c101acad67fe9cceca2b7bea2708f02fccec8303
177
py
Python
extension.py
javaarchive/idle
b55e7808fef3f2c16c0652faad7f7434207bc2d0
[ "MIT" ]
null
null
null
extension.py
javaarchive/idle
b55e7808fef3f2c16c0652faad7f7434207bc2d0
[ "MIT" ]
1
2017-03-16T01:36:03.000Z
2017-03-16T01:36:03.000Z
extension.py
javaarchive/PIDLE
b55e7808fef3f2c16c0652faad7f7434207bc2d0
[ "MIT" ]
null
null
null
class Plugin: def __init__(self,stuff): self.parms=stuff def find(self): self.parms[3].insert(0.0,"find not finished")
22.125
54
0.502825
21
177
4.047619
0.619048
0.211765
0
0
0
0
0
0
0
0
0
0.027523
0.384181
177
7
55
25.285714
0.752294
0
0
0
0
0
0.1
0
0
0
0
0
0
1
0.4
false
0
0
0
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
c12eafc362cb79f4938bce92d9314f6611f0f750
132
py
Python
fastapi_skeleton/models/mood.py
Sayar1106/OTTPlatformRecommender
85b72dfe9f810e3b6e12f8c7702ef94db3a03190
[ "MIT" ]
null
null
null
fastapi_skeleton/models/mood.py
Sayar1106/OTTPlatformRecommender
85b72dfe9f810e3b6e12f8c7702ef94db3a03190
[ "MIT" ]
1
2020-09-10T17:48:09.000Z
2020-09-10T17:50:28.000Z
fastapi_skeleton/models/mood.py
Sayar1106/OTTPlatformRecommender
85b72dfe9f810e3b6e12f8c7702ef94db3a03190
[ "MIT" ]
1
2020-09-25T11:32:57.000Z
2020-09-25T11:32:57.000Z
from pydantic import BaseModel from fastapi_skeleton.models.payload import (song) class MoodListResult(BaseModel): songs: str
18.857143
50
0.80303
16
132
6.5625
0.8125
0
0
0
0
0
0
0
0
0
0
0
0.136364
132
7
51
18.857143
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c1304b1b0f686111bc6092c8a250e17da719896a
1,134
py
Python
tests/base/conftest.py
Gabinou/NHPPy
1068b1548d008771a58d5479d8333703c54abbed
[ "MIT" ]
51
2019-02-01T19:43:37.000Z
2022-03-16T09:07:03.000Z
tests/base/conftest.py
noisyoscillator/stochastic
168659c36fd16a33f69b1f21654a7661286dc9d0
[ "MIT" ]
2
2019-02-23T18:54:22.000Z
2019-11-09T01:30:32.000Z
tests/base/conftest.py
noisyoscillator/stochastic
168659c36fd16a33f69b1f21654a7661286dc9d0
[ "MIT" ]
35
2019-02-08T02:00:31.000Z
2022-03-01T23:17:00.000Z
"""Pytest fixtures.""" # flake8: noqa import pytest # Checks class fixtures @pytest.fixture(params=[4, 4.2, "4", -4]) def increments_fixture(request): return request.param @pytest.fixture(params=[4, 4.2, "4"]) def number_fixture(request): return request.param @pytest.fixture(params=[4, 0, -4]) def positive_number_fixture(request): return request.param @pytest.fixture(params=["PARAMETER_NAME"]) def parameter_name_fixture(request): return request.param @pytest.fixture(params=[4, 0, -4]) def nonnegative_number_fixture(request): return request.param @pytest.fixture(params=[True, False, 0]) def zero_fixture(request): return request.param has_negative = [-5, -4, 0, 4, 5] bad_order = [0, 3, 2, 5] good_example = list(range(10)) @pytest.fixture(params=[has_negative, bad_order, good_example]) def times_fixture(request): return request.param # Continuous class fixtures @pytest.fixture(params=[True, False]) def zero(request): return request.param @pytest.fixture(params=[16]) def n(request): return request.param @pytest.fixture(params=[1]) def end(request): return request.param
22.235294
63
0.723986
160
1,134
5.025
0.2625
0.161692
0.236318
0.310945
0.655473
0.497512
0.497512
0.355721
0.355721
0.141791
0
0.031408
0.12963
1,134
50
64
22.68
0.783181
0.068783
0
0.352941
0
0
0.015267
0
0
0
0
0
0
1
0.294118
false
0
0.029412
0.294118
0.617647
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
c1332ce51bdac56785471aeb9c850075e78657fc
53
py
Python
d3m/__main__.py
tods-doc/tamu_d3m
a6a05f022ea60ab9787cbd89659ea8e0062ca22b
[ "Apache-2.0" ]
null
null
null
d3m/__main__.py
tods-doc/tamu_d3m
a6a05f022ea60ab9787cbd89659ea8e0062ca22b
[ "Apache-2.0" ]
null
null
null
d3m/__main__.py
tods-doc/tamu_d3m
a6a05f022ea60ab9787cbd89659ea8e0062ca22b
[ "Apache-2.0" ]
null
null
null
import sys from d3m import cli cli.main(sys.argv)
7.571429
19
0.735849
10
53
3.9
0.7
0
0
0
0
0
0
0
0
0
0
0.023256
0.188679
53
6
20
8.833333
0.883721
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c16a4562ad9039f6a6589dfdf3d2441fb3d01ae2
1,591
py
Python
predictionserver/habits.py
EricZLou/predictionserver
5f9d4a6711f9941f3c221ef1dccf57418dd17306
[ "MIT" ]
null
null
null
predictionserver/habits.py
EricZLou/predictionserver
5f9d4a6711f9941f3c221ef1dccf57418dd17306
[ "MIT" ]
null
null
null
predictionserver/habits.py
EricZLou/predictionserver
5f9d4a6711f9941f3c221ef1dccf57418dd17306
[ "MIT" ]
null
null
null
from predictionserver.futureconventions.autoconfigure import AutoConfigure from predictionserver.serverhabits.balancehabits import BalanceHabits from predictionserver.serverhabits.keyhabits import KeyHabits from predictionserver.serverhabits.attributehabits import AttributeHabits from predictionserver.serverhabits.horizonhabits import HorizonHabits from predictionserver.serverhabits.naminghabits import NamingHabits from predictionserver.serverhabits.laggedhabits import LaggedHabits from predictionserver.serverhabits.memohabits import MemoHabits from predictionserver.serverhabits.scenariohabits import ScenarioHabits from predictionserver.serverhabits.ownershiphabits import OwnershipHabits from predictionserver.serverhabits.plottinghabits import PlottingHabits from predictionserver.serverhabits.statshabits import StatsHabits from predictionserver.futureconventions.zcurveconventions import ZCurveConventions from predictionserver.serverhabits.metrichabits import MetricHabits from predictionserver.serverhabits.hashhabits import HashHabits from predictionserver.serverhabits.sortedsethabits import SortedSetHabits class Habits(PlottingHabits, LaggedHabits, HorizonHabits, ScenarioHabits, StatsHabits, NamingHabits, AttributeHabits, MetricHabits, HashHabits, SortedSetHabits, MemoHabits, BalanceHabits, OwnershipHabits, KeyHabits, AutoConfigure, ZCurveConventions): def __init__(self,**kwargs): super().__init__(**kwargs) if __name__=='__main__': habits = Habits() print(habits.BALANCE) print(habits._OWNERS()) # <--- will fail
53.033333
102
0.853551
134
1,591
10.007463
0.268657
0.238628
0.334079
0
0
0
0
0
0
0
0
0
0.093652
1,591
30
103
53.033333
0.929958
0.0088
0
0
0
0
0.005076
0
0
0
0
0
0
1
0.04
false
0
0.64
0
0.72
0.08
0
0
1
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
c1905719e923d7007fa44f52324618a56328bcc6
275
py
Python
src/cr/__init__.py
Crunch-io/crunch-cube
80986d5b2106c774f05176fb6c6a5ea0d840f09d
[ "MIT" ]
3
2021-01-22T20:42:31.000Z
2021-06-02T17:53:19.000Z
src/cr/__init__.py
Crunch-io/crunch-cube
80986d5b2106c774f05176fb6c6a5ea0d840f09d
[ "MIT" ]
331
2017-11-13T22:41:56.000Z
2021-12-02T21:59:43.000Z
src/cr/__init__.py
Crunch-io/crunch-cube
80986d5b2106c774f05176fb6c6a5ea0d840f09d
[ "MIT" ]
1
2021-02-19T02:49:00.000Z
2021-02-19T02:49:00.000Z
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages try: # pragma: no cover __import__("pkg_resources").declare_namespace(__name__) except ImportError: # pragma: no cover __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
45.833333
75
0.767273
32
275
5.875
0.78125
0.085106
0.138298
0.202128
0
0
0
0
0
0
0
0
0.112727
275
5
76
55
0.770492
0.432727
0
0
0
0
0.133333
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c1ba571884c0e66477ff1de2fe45a395773e14c9
136
py
Python
astroquery/casda/tests/setup_package.py
rickynilsson/astroquery
b7edec0d8e36b11c25baa39ad72e4160bc30d465
[ "BSD-3-Clause" ]
577
2015-02-12T18:23:49.000Z
2022-03-22T21:38:58.000Z
astroquery/casda/tests/setup_package.py
rickynilsson/astroquery
b7edec0d8e36b11c25baa39ad72e4160bc30d465
[ "BSD-3-Clause" ]
1,812
2015-01-01T08:02:20.000Z
2022-03-31T13:03:52.000Z
astroquery/casda/tests/setup_package.py
rickynilsson/astroquery
b7edec0d8e36b11c25baa39ad72e4160bc30d465
[ "BSD-3-Clause" ]
322
2015-02-23T19:31:29.000Z
2022-03-25T18:51:30.000Z
import os def get_package_data(): paths_test = [os.path.join('data', '*.xml')] return {'astroquery.casda.tests': paths_test}
17
49
0.661765
19
136
4.526316
0.789474
0.209302
0
0
0
0
0
0
0
0
0
0
0.161765
136
7
50
19.428571
0.754386
0
0
0
0
0
0.227941
0.161765
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
c1d5c7648b7e134f6e349aed4454b29d6a695e90
153
py
Python
src/main/__init__.py
kyehyukahn/scp-prototype
4e92b47ab82068a154c407c22e8c396196a31942
[ "Apache-2.0" ]
null
null
null
src/main/__init__.py
kyehyukahn/scp-prototype
4e92b47ab82068a154c407c22e8c396196a31942
[ "Apache-2.0" ]
null
null
null
src/main/__init__.py
kyehyukahn/scp-prototype
4e92b47ab82068a154c407c22e8c396196a31942
[ "Apache-2.0" ]
null
null
null
from .application import ( # noqa Application, ) from .base import ( # noqa BaseApplication, ) from .event import ( # noqa EventManager, )
15.3
34
0.653595
15
153
6.666667
0.533333
0.3
0
0
0
0
0
0
0
0
0
0
0.248366
153
9
35
17
0.869565
0.091503
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
de0ac14d3fd2097fd5f18b0ae40e9bc286167997
96
py
Python
caquita_backend/score/admin.py
EmmanuelPerezP/Caquita
708c1582459e9157bb7dc31170b8a0f11b2d03d0
[ "MIT" ]
null
null
null
caquita_backend/score/admin.py
EmmanuelPerezP/Caquita
708c1582459e9157bb7dc31170b8a0f11b2d03d0
[ "MIT" ]
null
null
null
caquita_backend/score/admin.py
EmmanuelPerezP/Caquita
708c1582459e9157bb7dc31170b8a0f11b2d03d0
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import HighScore admin.site.register(HighScore)
16
32
0.822917
13
96
6.076923
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.114583
96
5
33
19.2
0.929412
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a9ab1fc4e75c67f70c718a89349c465f3a92f0a5
98
py
Python
app/api_1_0/__init__.py
amsayeed/Flask-Data-visualization
96b5b0b3cd2a17c8faeae8771e1756987a794798
[ "MIT" ]
10
2020-02-29T17:39:47.000Z
2021-01-01T22:38:32.000Z
app/api_1_0/__init__.py
amsayeed/Flask-Data-visualization
96b5b0b3cd2a17c8faeae8771e1756987a794798
[ "MIT" ]
null
null
null
app/api_1_0/__init__.py
amsayeed/Flask-Data-visualization
96b5b0b3cd2a17c8faeae8771e1756987a794798
[ "MIT" ]
6
2020-02-29T16:40:59.000Z
2021-03-16T19:25:06.000Z
from flask import Blueprint bp = Blueprint('data_api', __name__) from app.api_1_0 import routes
16.333333
36
0.785714
16
98
4.375
0.75
0
0
0
0
0
0
0
0
0
0
0.02381
0.142857
98
5
37
19.6
0.809524
0
0
0
0
0
0.081633
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
5
a9bbf4a575d64951be04f6e7df7fbb9a9e34c3ad
52
py
Python
pm4pyws/handlers/parquet/ctmc/__init__.py
ehbasouri/pm4py-ws
9bf5f88848a4aa2873bae86af95d37f64ae1dde1
[ "Apache-2.0" ]
null
null
null
pm4pyws/handlers/parquet/ctmc/__init__.py
ehbasouri/pm4py-ws
9bf5f88848a4aa2873bae86af95d37f64ae1dde1
[ "Apache-2.0" ]
null
null
null
pm4pyws/handlers/parquet/ctmc/__init__.py
ehbasouri/pm4py-ws
9bf5f88848a4aa2873bae86af95d37f64ae1dde1
[ "Apache-2.0" ]
null
null
null
from pm4pyws.handlers.parquet.ctmc import transient
26
51
0.865385
7
52
6.428571
1
0
0
0
0
0
0
0
0
0
0
0.020833
0.076923
52
1
52
52
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a9e4eb913002c25b43d0bebe6d453788c9d7f01b
888
py
Python
thash/WSN_antiguo/app/models.py
Limiloiko/WSN_WebServer
78d804d19f9db8dcb451dfb6bca4f2911ddd69c6
[ "MIT" ]
null
null
null
thash/WSN_antiguo/app/models.py
Limiloiko/WSN_WebServer
78d804d19f9db8dcb451dfb6bca4f2911ddd69c6
[ "MIT" ]
null
null
null
thash/WSN_antiguo/app/models.py
Limiloiko/WSN_WebServer
78d804d19f9db8dcb451dfb6bca4f2911ddd69c6
[ "MIT" ]
1
2019-06-15T07:49:33.000Z
2019-06-15T07:49:33.000Z
"""models.py Este modulo almaena las tablas de la base de datos del servidor web! """ from app import db from werkzeug.security import check_password_hash, generate_password_hash from app import login from flask_login import UserMixin class User(UserMixin, db.Model): id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(64), index=True, unique=True) email = db.Column(db.String(120), index=True, unique=True) password_hash = db.Column(db.String(128)) def __repr__(self): return '<User {}>'.format(self.username) def set_password(self, password): self.password_hash = generate_password_hash(password) def check_password(self, password): return check_password_hash(self.password_hash, password) @login.user_loader def load_user(id): return User.query.get(int(id))
26.909091
73
0.699324
124
888
4.83871
0.443548
0.14
0.066667
0.08
0.106667
0
0
0
0
0
0
0.011236
0.198198
888
33
74
26.909091
0.831461
0.087838
0
0
1
0
0.011321
0
0
0
0
0
0
1
0.222222
false
0.333333
0.222222
0.166667
0.888889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
5
a9f627de83e66de3421bf1148346155e4f779f57
91
py
Python
Python/desafios/d21.py
prazerfelipe/Python
b1f4c768b1823d34898935bbd037ae43711c79e1
[ "MIT" ]
null
null
null
Python/desafios/d21.py
prazerfelipe/Python
b1f4c768b1823d34898935bbd037ae43711c79e1
[ "MIT" ]
null
null
null
Python/desafios/d21.py
prazerfelipe/Python
b1f4c768b1823d34898935bbd037ae43711c79e1
[ "MIT" ]
null
null
null
import pygame pygame.init() pygame.mixer.music.load('d21.mp3') pygame.mixer.music.play()
13
34
0.747253
14
91
4.857143
0.642857
0.323529
0.470588
0
0
0
0
0
0
0
0
0.035714
0.076923
91
6
35
15.166667
0.77381
0
0
0
0
0
0.077778
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
e70f74ed92b2eca6fc94971a15dfa7acb5d1e7ca
124
py
Python
src/parsimony/generators/__init__.py
ryanfeather/parsimony
0d3bbe247b47234a0c15962e538b2f04609c4a33
[ "MIT" ]
1
2018-07-02T11:08:29.000Z
2018-07-02T11:08:29.000Z
src/parsimony/generators/__init__.py
ryanfeather/parsimony
0d3bbe247b47234a0c15962e538b2f04609c4a33
[ "MIT" ]
5
2015-03-19T13:29:29.000Z
2015-04-04T19:47:01.000Z
src/parsimony/generators/__init__.py
ryanfeather/parsimony
0d3bbe247b47234a0c15962e538b2f04609c4a33
[ "MIT" ]
null
null
null
from .generator import Generator from .file import TextFile, PathMonitor from .callable_wrapper import StoredCallableWrapper
41.333333
51
0.870968
14
124
7.642857
0.642857
0
0
0
0
0
0
0
0
0
0
0
0.096774
124
3
51
41.333333
0.955357
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e713bbf9d5c72f377bb527a6d649363f0c91d4cd
304
py
Python
src/python/WMComponent/PhEDExInjector/Database/Oracle/GetMigratedBlocks.py
cbbrainerd/WMCore
317969fdcfbfbb957e74aa1b45f92408d05a09a8
[ "Apache-2.0" ]
null
null
null
src/python/WMComponent/PhEDExInjector/Database/Oracle/GetMigratedBlocks.py
cbbrainerd/WMCore
317969fdcfbfbb957e74aa1b45f92408d05a09a8
[ "Apache-2.0" ]
1
2016-10-13T14:57:35.000Z
2016-10-13T14:57:35.000Z
src/python/WMComponent/PhEDExInjector/Database/Oracle/GetMigratedBlocks.py
juztas/WMCore
f7e830a573d50fb1d7240797f18d809f994b934d
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python """ _GetMigratedBlocks_ Oracle implementation on PhEDExInjector.GetMigratedBlocks. """ from WMComponent.PhEDExInjector.Database.MySQL.GetMigratedBlocks import GetMigratedBlocks as MySQLBase class GetMigratedBlocks(MySQLBase): """ _GetMigratedBlocks_ """ pass
16
102
0.769737
25
304
9.2
0.72
0
0
0
0
0
0
0
0
0
0
0
0.144737
304
18
103
16.888889
0.884615
0.394737
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
5
e7302b3c11edbb438db4447042d5143b7cd6b0c1
63
py
Python
exercises/02_argparse/sys_argv.py
AnnieBrunton/biosystems-analytics-2020
219e961b07e62dae6c27675e1de94cb56c9adb8e
[ "MIT" ]
14
2019-07-14T08:29:04.000Z
2022-03-07T06:33:26.000Z
exercises/02_argparse/sys_argv.py
AnnieBrunton/biosystems-analytics-2020
219e961b07e62dae6c27675e1de94cb56c9adb8e
[ "MIT" ]
1
2020-02-11T20:15:59.000Z
2020-02-11T20:15:59.000Z
exercises/02_argparse/sys_argv.py
AnnieBrunton/biosystems-analytics-2020
219e961b07e62dae6c27675e1de94cb56c9adb8e
[ "MIT" ]
24
2020-01-15T17:34:40.000Z
2021-08-23T05:57:24.000Z
#!/usr/bin/env python3 import sys print('\n'.join(sys.argv))
10.5
26
0.666667
11
63
3.818182
0.909091
0
0
0
0
0
0
0
0
0
0
0.017857
0.111111
63
5
27
12.6
0.732143
0.333333
0
0
0
0
0.04878
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
e7388719889174f2c633b00c9767409c8ddbd4f4
3,788
py
Python
tf1/tensorflow_collocation/Elastodynamics/utils/gridPlot.py
ISM-Weimar/DeepEnergyMethods
3a51131e0827446bf5986d698aaac396c7fa5037
[ "MIT" ]
15
2019-09-11T01:35:03.000Z
2022-03-25T03:17:59.000Z
tf1/tensorflow_collocation/Elastodynamics/utils/gridPlot.py
yizheng-wang/DeepEnergyMethods
da77802b6b230e6962bf2c0a7918340989640d77
[ "MIT" ]
2
2020-10-16T19:06:07.000Z
2022-01-06T16:23:49.000Z
tf1/tensorflow_collocation/Elastodynamics/utils/gridPlot.py
yizheng-wang/DeepEnergyMethods
da77802b6b230e6962bf2c0a7918340989640d77
[ "MIT" ]
15
2019-09-11T01:35:05.000Z
2022-02-18T21:18:55.000Z
import os import matplotlib.pyplot as plt import numpy as np def createFolder(folder_name): try: if not os.path.exists(folder_name): os.makedirs(folder_name) except OSError: print ('Error: Creating folder. ' + folder_name) def plot1d(u_pred_err,X_star,u_pred,u_exact,v_pred,v_exact,v_pred_err,figHeight,figWidth): filename = 'disp_err' plt.figure(figsize=(figWidth, figHeight)) plt.plot(X_star, 100*u_pred_err, c='b', linewidth=2.0) #plt.title('$\phi_{comp}$ and $\phi_{exact}$') plt.xticks(fontsize=12) plt.yticks(fontsize=12) plt.xlabel('x',fontweight='bold',fontsize=14) plt.ylabel('Relative $\%$ error in displacement',fontweight='bold',fontsize=14) plt.tight_layout() plt.savefig(filename +".pdf", dpi=700, facecolor='w', edgecolor='w', transparent = 'true', bbox_inches = 'tight') plt.show() plt.close() filename = 'disp' plt.figure(figsize=(figWidth, figHeight)) ax0, = plt.plot(X_star,u_pred, label='$u_{comp}$', c='b', linewidth=2.0) ax1, = plt.plot(X_star,u_exact, label='$u_{exact}$', c='r',linewidth=2.0) plt.legend(handles=[ax0,ax1],fontsize=14) #plt.title('$\phi_{comp}$ and $\phi_{exact}$') plt.xticks(fontsize=12) plt.yticks(fontsize=12) plt.xlabel('x',fontweight='bold',fontsize=14) plt.ylabel('Displacement',fontweight='bold',fontsize=14) plt.tight_layout() plt.savefig(filename +".pdf", dpi=700, facecolor='w', edgecolor='w', transparent = 'true', bbox_inches = 'tight') plt.show() plt.close() filename = 'velocity' plt.figure(figsize=(figWidth, figHeight)) ax0, = plt.plot(X_star,v_pred, label='$v_{comp}$', c='b', linewidth=2.0) ax1, = plt.plot(X_star,v_exact, label='$v_{exact}$', c='r',linewidth=2.0) plt.legend(handles=[ax0,ax1],fontsize=14) #plt.title('$\phi_{comp}$ and $\phi_{exact}$') plt.xticks(fontsize=12) plt.yticks(fontsize=12) plt.xlabel('x',fontweight='bold',fontsize=14) plt.ylabel('Velocity',fontweight='bold',fontsize=14) plt.tight_layout() plt.savefig(filename +".pdf", dpi=700, facecolor='w', edgecolor='w', transparent = 'true', bbox_inches = 'tight') plt.show() plt.close() filename = 'vel_err' plt.figure(figsize=(figWidth, figHeight)) plt.plot(X_star, 100*v_pred_err, c='b', linewidth=2.0) #plt.title('$\phi_{comp}$ and $\phi_{exact}$') plt.xticks(fontsize=12) plt.yticks(fontsize=12) plt.xlabel('x',fontweight='bold',fontsize=14) plt.ylabel('Relative $\%$ error in velocity',fontweight='bold',fontsize=14) plt.tight_layout() plt.savefig(filename +".pdf", dpi=700, facecolor='w', edgecolor='w', transparent = 'true', bbox_inches = 'tight') plt.show() plt.close() def plotConvergence(iter,adam_buff,lbfgs_buff,figHeight,figWidth): filename = "convergence" plt.figure(figsize=(figWidth, figHeight)) range_adam = np.arange(1,iter+1) range_lbfgs = np.arange(iter+2, iter+2+len(lbfgs_buff)) ax0, = plt.semilogy(range_adam, adam_buff, c='b', label='Adam',linewidth=2.0) ax1, = plt.semilogy(range_lbfgs, lbfgs_buff, c='r', label='L-BFGS',linewidth=2.0) plt.legend(handles=[ax0,ax1],fontsize=14) plt.xticks(fontsize=12) plt.yticks(fontsize=12) plt.xlabel('Iteration',fontweight='bold',fontsize=14) plt.ylabel('Loss value',fontweight='bold',fontsize=14) plt.tight_layout() plt.savefig(filename +".pdf", dpi=700, facecolor='w', edgecolor='w', transparent = 'true', bbox_inches = 'tight') plt.show() plt.close()
40.297872
91
0.62302
510
3,788
4.509804
0.194118
0.056522
0.073478
0.104348
0.761304
0.737826
0.723478
0.723478
0.723478
0.723478
0
0.033036
0.200898
3,788
93
92
40.731183
0.726792
0.047518
0
0.594937
0
0
0.098576
0
0
0
0
0
0
1
0.037975
false
0
0.037975
0
0.075949
0.012658
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e7683145b651d7a28a62f3f427bc50b81ae00052
110
py
Python
ActivitySync/geo/admin.py
FreexD/ActivitySyncDjango
93cebb65ef5c68dd1082fd7fe1d121a11eeafae8
[ "Beerware" ]
null
null
null
ActivitySync/geo/admin.py
FreexD/ActivitySyncDjango
93cebb65ef5c68dd1082fd7fe1d121a11eeafae8
[ "Beerware" ]
null
null
null
ActivitySync/geo/admin.py
FreexD/ActivitySyncDjango
93cebb65ef5c68dd1082fd7fe1d121a11eeafae8
[ "Beerware" ]
null
null
null
from django.contrib import admin from ActivitySync.geo.models import Location admin.site.register(Location)
18.333333
44
0.836364
15
110
6.133333
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.1
110
5
45
22
0.929293
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e7db83d7d78c6a0bfe4c614e333d3e78b36aa04d
1,989
py
Python
song/examples/allessoeinfach.py
Rocka84/Hackulele
a93d4a7a5501bd2976701f62a613b04230edcc28
[ "Apache-2.0" ]
null
null
null
song/examples/allessoeinfach.py
Rocka84/Hackulele
a93d4a7a5501bd2976701f62a613b04230edcc28
[ "Apache-2.0" ]
null
null
null
song/examples/allessoeinfach.py
Rocka84/Hackulele
a93d4a7a5501bd2976701f62a613b04230edcc28
[ "Apache-2.0" ]
null
null
null
from song import Song class AllesSoEinfach(Song): def __init__(self, chord_repo): super(AllesSoEinfach, self).__init__(chord_repo) self.name = "Alles so einfach" self.bpm = 140 self.elements = [ # Intro self._createElement("C", 4), self._createElement("Am", 4), self._createElement("Dm", 4), self._createElement("G", 4), self._createElement("C", 4), self._createElement("Am", 4), self._createElement("Dm", 4), self._createElement("G", 4), # Strophe self._createElement("Am", 4), self._createElement("Em", 4), self._createElement("F", 4), self._createElement("G", 4), self._createElement("Am", 4), self._createElement("Em", 4), self._createElement("F", 4), self._createElement("G", 4), self._createElement("F", 4), self._createElement("G", 4), self._createElement("C", 4), self._createElement("E", 4), self._createElement("F", 8), self._createElement("G", 4), self._createElement("G", 1), # refrain self._createElement("0", 3), self._createElement("C", 4), self._createElement("Em", 4), self._createElement("B", 4), self._createElement("F", 4), self._createElement("C", 4), self._createElement("Em", 4), self._createElement("B", 4), self._createElement("F", 4), self._createElement("C", 8), self._createElement("Am", 8), self._createElement("F", 8), self._createElement("G", 8), ] self.reset()
31.571429
56
0.457516
177
1,989
4.881356
0.20339
0.708333
0.583333
0.131944
0.752315
0.752315
0.731481
0.645833
0.645833
0.645833
0
0.033585
0.401207
1,989
62
57
32.080645
0.691856
0.010558
0
0.666667
0
0
0.03211
0
0
0
0
0
0
1
0.022222
false
0
0.022222
0
0.066667
0
0
0
0
null
1
1
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
41396461fb811a1d57834c999743436de44cc8c0
60
py
Python
ufork/__init__.py
kurtbrose/ufork
4674abc5a27a2e3076e28756685512359e8e809f
[ "BSD-3-Clause" ]
5
2016-09-13T05:12:42.000Z
2021-08-31T11:15:59.000Z
ufork/__init__.py
skhortiuk/ufork
aa96ae8e903c0e95b0b307e8c738e8455ab477f4
[ "MIT" ]
7
2016-10-13T01:08:01.000Z
2021-06-24T19:09:29.000Z
ufork/__init__.py
skhortiuk/ufork
aa96ae8e903c0e95b0b307e8c738e8455ab477f4
[ "MIT" ]
4
2017-04-11T20:29:41.000Z
2020-10-12T15:22:33.000Z
from __future__ import absolute_import from .ufork import *
20
38
0.833333
8
60
5.625
0.625
0
0
0
0
0
0
0
0
0
0
0
0.133333
60
2
39
30
0.865385
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
413f106d776ed32cf9cf7183f40cbb341e0b8651
67
py
Python
systemofrecord/services/configure_logging.py
LandRegistry/system-of-record-alpha
135172e4c69bb82301b5b66039142d6fa0c56d70
[ "MIT" ]
null
null
null
systemofrecord/services/configure_logging.py
LandRegistry/system-of-record-alpha
135172e4c69bb82301b5b66039142d6fa0c56d70
[ "MIT" ]
null
null
null
systemofrecord/services/configure_logging.py
LandRegistry/system-of-record-alpha
135172e4c69bb82301b5b66039142d6fa0c56d70
[ "MIT" ]
1
2021-04-11T06:07:06.000Z
2021-04-11T06:07:06.000Z
from flask import logging from systemofrecord.server import app
11.166667
37
0.820896
9
67
6.111111
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.164179
67
5
38
13.4
0.982143
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
4153c2e2eb90eb8b463cca88d69263d03ac26715
5,073
py
Python
data_loader/data_loaders.py
lehduong/Knowledge-Distilation-CNN
dfb7b881de9740260a59e83a7a4f5dbba8787c23
[ "MIT" ]
9
2020-01-21T04:27:18.000Z
2020-04-12T03:35:54.000Z
data_loader/data_loaders.py
lehduong/Knowledge-Distilation-CNN
dfb7b881de9740260a59e83a7a4f5dbba8787c23
[ "MIT" ]
2
2020-03-05T10:42:10.000Z
2020-03-06T12:41:27.000Z
data_loader/data_loaders.py
lehduong/Knowledge-Distilation-CNN
dfb7b881de9740260a59e83a7a4f5dbba8787c23
[ "MIT" ]
2
2020-05-20T07:42:03.000Z
2021-10-08T02:48:08.000Z
from torchvision import datasets from torchvision import transforms as tfs from base import BaseDataLoader from .cityscapes import Cityscapes, CityScapesUniform from torch.utils.data import ConcatDataset class Cifar100Dataloader(BaseDataLoader): """ CIFAR100 data loading using BaseDataloder """ def __init__(self, data_dir, batch_size, shuffle=True, validation_split=0.0, num_workers=1, training=True): if training: trsfm = tfs.Compose([ tfs.RandomCrop(32, padding=4), tfs.RandomHorizontalFlip(), tfs.ToTensor(), tfs.Normalize((0.4914, 0.4822, 0.4465),(0.2023, 0.1994, 0.2010)) ]) else: trsfm = tfs.Compose([ tfs.ToTensor(), tfs.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)) ]) self.data_dir = data_dir self.dataset = datasets.CIFAR100(self.data_dir, train=training, download=True, transform=trsfm) super().__init__(self.dataset, batch_size, shuffle, validation_split, num_workers) class Cifar10Dataloader(BaseDataLoader): """ CIFAR10 data loading using BaseDataloder """ def __init__(self, data_dir, batch_size, shuffle=True, validation_split=0.0, num_workers=1, training=True): if training: trsfm = tfs.Compose([ tfs.RandomCrop(32, padding=4), tfs.RandomHorizontalFlip(), tfs.ToTensor(), tfs.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) ]) else: trsfm = tfs.Compose([ tfs.ToTensor(), tfs.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) ]) self.data_dir = data_dir self.dataset = datasets.CIFAR10(self.data_dir, train=training, download=True, transform=trsfm) super().__init__(self.dataset, batch_size, shuffle, validation_split, num_workers) class CityscapesDataloader(BaseDataLoader): """ CityScape data loading using BaseDataLoader """ def __init__(self, data_dir, batch_size, shuffle=True, validation_split=0.0, num_workers=0, split='train', transform=None, target_transform=None, transforms=None, mode='fine', target_type='semantic', num_samples=None, return_image_name=False): self.data_dir = data_dir if split == 'train_val': train_dataset = self.dataset = Cityscapes(root=self.data_dir, transform=transform, transforms=transforms, target_transform=target_transform, split='train', mode=mode, target_type=target_type, num_samples=num_samples, return_image_name=return_image_name) val_dataset = self.dataset = Cityscapes(root=self.data_dir, transform=transform, transforms=transforms, target_transform=target_transform, split='val', mode=mode, target_type=target_type, num_samples=num_samples, return_image_name=return_image_name) self.dataset = ConcatDataset([train_dataset, val_dataset]) else: self.dataset = Cityscapes(root=self.data_dir, transform=transform, transforms=transforms, target_transform=target_transform, split=split, mode=mode, target_type=target_type, num_samples=num_samples, return_image_name=return_image_name) super().__init__(self.dataset, batch_size, shuffle, validation_split, num_workers) class CityscapesUniformDataloader(BaseDataLoader): def __init__(self, data_dir, batch_size, shuffle=True, validation_split=0.0, num_workers=0, split='train', transform=None, target_transform=None, transforms=None, mode='fine', target_type='semantic', class_uniform_pct=0.5, class_uniform_tile = 1024, num_samples=None, return_image_name=False): self.data_dir = data_dir if split == 'train_val': raise ValueError("Only support train split for Uniform Cityscapes") else: self.dataset = CityScapesUniform(root=self.data_dir, quality=mode, mode=split, joint_transform_list=transforms, transform=transform, target_transform=target_transform, class_uniform_pct=class_uniform_pct, class_uniform_tile=class_uniform_tile, num_samples=num_samples, return_image_name=return_image_name) super().__init__(self.dataset, batch_size, shuffle, validation_split, num_workers)
51.242424
119
0.590578
533
5,073
5.354597
0.181989
0.044149
0.053959
0.021023
0.758935
0.744219
0.744219
0.744219
0.704975
0.704975
0
0.042461
0.317564
5,073
98
120
51.765306
0.781918
0.024837
0
0.671053
0
0
0.021837
0
0
0
0
0
0
1
0.052632
false
0
0.065789
0
0.171053
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
41a2acceaeb69f3040d342aa6c309603d5516d75
309
py
Python
S4/S4 Library/simulation/routing/path_planner/path_plan_enums.py
NeonOcean/Environment
ca658cf66e8fd6866c22a4a0136d415705b36d26
[ "CC-BY-4.0" ]
1
2021-05-20T19:33:37.000Z
2021-05-20T19:33:37.000Z
S4/S4 Library/simulation/routing/path_planner/path_plan_enums.py
NeonOcean/Environment
ca658cf66e8fd6866c22a4a0136d415705b36d26
[ "CC-BY-4.0" ]
null
null
null
S4/S4 Library/simulation/routing/path_planner/path_plan_enums.py
NeonOcean/Environment
ca658cf66e8fd6866c22a4a0136d415705b36d26
[ "CC-BY-4.0" ]
null
null
null
import enum import routing class FootprintKeyMaskBits(enum.IntFlags): SMALL_HEIGHT = routing.FOOTPRINT_KEY_REQUIRE_SMALL_HEIGHT TINY_HEIGHT = routing.FOOTPRINT_KEY_REQUIRE_TINY_HEIGHT FLOATING = routing.FOOTPRINT_KEY_REQUIRE_FLOATING LARGE_HEIGHT = routing.FOOTPRINT_KEY_REQUIRE_LARGE_HEIGHT
34.333333
61
0.847896
38
309
6.421053
0.368421
0.262295
0.311475
0.42623
0.393443
0
0
0
0
0
0
0
0.113269
309
8
62
38.625
0.890511
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.285714
0
1
0.142857
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
41a5db2f44a4e055874cf0b976c87b502c95f5ae
45
py
Python
dj_gui_api_server/__init__.py
ChihweiLHBird/pharus
44de192c3de73a2e32bec9239adae1cde7ed9439
[ "MIT" ]
null
null
null
dj_gui_api_server/__init__.py
ChihweiLHBird/pharus
44de192c3de73a2e32bec9239adae1cde7ed9439
[ "MIT" ]
null
null
null
dj_gui_api_server/__init__.py
ChihweiLHBird/pharus
44de192c3de73a2e32bec9239adae1cde7ed9439
[ "MIT" ]
null
null
null
from .version import __version__ __version__
15
32
0.866667
5
45
6.2
0.6
0
0
0
0
0
0
0
0
0
0
0
0.111111
45
2
33
22.5
0.775
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
41ba4734d14f87d1bcbfea8dadb02ecd7cd1ad93
1,495
py
Python
tests/pyutils/suggestion_list_test.py
ATyped/atgql
3fa9e09c9cea346dc42c205452487420ceb493e2
[ "MIT" ]
null
null
null
tests/pyutils/suggestion_list_test.py
ATyped/atgql
3fa9e09c9cea346dc42c205452487420ceb493e2
[ "MIT" ]
null
null
null
tests/pyutils/suggestion_list_test.py
ATyped/atgql
3fa9e09c9cea346dc42c205452487420ceb493e2
[ "MIT" ]
null
null
null
from atgql.pyutils.suggestion_list import suggestion_list def test_returns_when_input_is_empty(): assert suggestion_list('', ['a']) == ['a'] def test_returns_empty_array_when_there_are_no_options(): assert suggestion_list('input', []) == [] def test_returns_options_with_small_lexical_distance(): assert suggestion_list('greenish', ['green']) == ['green'] assert suggestion_list('green', ['greenish']) == ['greenish'] def test_rejects_options_with_small_lexical_distance(): assert suggestion_list('aaaa', ['aaab']) == ['aaab'] assert suggestion_list('aaaa', ['aabb']) == ['aabb'] assert suggestion_list('aaaa', ['abbb']) == [] assert suggestion_list('ab', ['ca']) == [] def test_returns_options_with_different_case(): assert suggestion_list('verylongstring', ['VERYLONGSTRING']) == ['VERYLONGSTRING'] assert suggestion_list('VERYLONGSTRING', ['verylongstring']) == ['verylongstring'] assert suggestion_list('VERYLONGSTRING', ['VeryLongString']) == ['VeryLongString'] def test_returns_options_with_transpositions(): assert suggestion_list('agr', ['arg']) == ['arg'] assert suggestion_list('214365879', ['123456789']) == ['123456789'] def test_returns_options_sorted_based_on_lexical_distance(): assert suggestion_list('abc', ['a', 'ab', 'abc']) == ['abc', 'ab', 'a'] def test_returns_options_with_same_lexical_distance_sorted_lexicographically(): assert suggestion_list('a', ['az', 'ax', 'ay']) == ['ax', 'ay', 'az']
33.977273
86
0.699666
166
1,495
5.903614
0.301205
0.242857
0.306122
0.107143
0.420408
0.293878
0.293878
0.293878
0.189796
0.189796
0
0.020564
0.121739
1,495
43
87
34.767442
0.725819
0
0
0
0
0
0.18194
0
0
0
0
0
0.625
1
0.333333
true
0
0.041667
0
0.375
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
5
41c014f1ca665aede207e05e417dbfdecd40bad8
201
py
Python
plotly/validators/surface/lighting/__init__.py
gnestor/plotly.py
a8ae062795ddbf9867b8578fe6d9e244948c15ff
[ "MIT" ]
12
2020-04-18T18:10:22.000Z
2021-12-06T10:11:15.000Z
plotly/validators/surface/lighting/__init__.py
Vesauza/plotly.py
e53e626d59495d440341751f60aeff73ff365c28
[ "MIT" ]
27
2020-04-28T21:23:12.000Z
2021-06-25T15:36:38.000Z
plotly/validators/surface/lighting/__init__.py
Vesauza/plotly.py
e53e626d59495d440341751f60aeff73ff365c28
[ "MIT" ]
6
2020-04-18T23:07:08.000Z
2021-11-18T07:53:06.000Z
from ._specular import SpecularValidator from ._roughness import RoughnessValidator from ._fresnel import FresnelValidator from ._diffuse import DiffuseValidator from ._ambient import AmbientValidator
33.5
42
0.875622
20
201
8.55
0.6
0
0
0
0
0
0
0
0
0
0
0
0.099502
201
5
43
40.2
0.944751
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
68c60ece6fb0177a2c47cda7e553a270d640cb5b
2,688
py
Python
signbank/dictionary/migrations/0009_auto_20151106_1019.py
GDiaz16/signbank
fe492965a6e1294eb036c3a2adc54d1c8ba19f0f
[ "BSD-3-Clause" ]
null
null
null
signbank/dictionary/migrations/0009_auto_20151106_1019.py
GDiaz16/signbank
fe492965a6e1294eb036c3a2adc54d1c8ba19f0f
[ "BSD-3-Clause" ]
3
2020-04-30T14:18:57.000Z
2021-06-10T22:30:30.000Z
signbank/dictionary/migrations/0009_auto_20151106_1019.py
GDiaz16/signbank
fe492965a6e1294eb036c3a2adc54d1c8ba19f0f
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dictionary', '0008_auto_20151030_1125'), ] operations = [ migrations.AlterField( model_name='gloss', name='annotation_comments', field=models.CharField(max_length=200, verbose_name='Comments', blank=True), ), migrations.AlterField( model_name='gloss', name='annotation_idgloss_hki', field=models.CharField(help_text="\n This is the Helsinki name of a sign used by annotators when glossing the corpus in\nan ELAN annotation file. The Helsinki Annotation Idgloss may be the same for two or\nmore entries (each with their own 'Sign Entry Name'). If two sign entries\nhave the same 'Annotation Idgloss' that means they differ in form in only\nminor or insignificant ways that can be ignored.", max_length=60, verbose_name='Gloss HKI', blank=True), ), migrations.AlterField( model_name='gloss', name='annotation_idgloss_hki_en', field=models.CharField(help_text='\n This is the English name for the corresponding Jyvaskyla Gloss', max_length=60, verbose_name='Gloss HKI (Eng)', blank=True), ), migrations.AlterField( model_name='gloss', name='annotation_idgloss_jkl', field=models.CharField(help_text="\n This is the Jyvaskyla name of a sign used by annotators when glossing the corpus in\nan ELAN annotation file. The Jyvaskyla Annotation Idgloss may be the same for two or\nmore entries (each with their own 'Sign Entry Name'). If two sign entries\nhave the same 'Annotation Idgloss' that means they differ in form in only\nminor or insignificant ways that can be ignored.", max_length=60, verbose_name='Gloss JKL', blank=True), ), migrations.AlterField( model_name='gloss', name='annotation_idgloss_jkl_en', field=models.CharField(help_text='\n This is the English name for the corresponding Jyvaskyla Gloss', max_length=60, verbose_name='Gloss JKL (Eng)', blank=True), ), migrations.AlterField( model_name='gloss', name='idgloss', field=models.CharField(help_text='\n This is the unique identifying name of an entry of a sign form in the\ndatabase. No two Sign Entry Names can be exactly the same, but a "Sign\nEntry Name" can be (and often is) the same as the Annotation Idgloss.', max_length=60, verbose_name='Gloss'), ), ]
59.733333
478
0.667039
355
2,688
4.932394
0.287324
0.056539
0.085665
0.099372
0.762993
0.762993
0.747573
0.713307
0.713307
0.617933
0
0.014793
0.245536
2,688
44
479
61.090909
0.848619
0.007813
0
0.473684
0
0.078947
0.513544
0.044639
0
0
0
0
0
1
0
false
0
0.052632
0
0.131579
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
68ec7bc393eb53584936408de5a325649b1537c5
223
py
Python
processors/__init__.py
schlers/electra_pytorch
94cf3fec8158c93162e2f253ce5c746bdf1a6399
[ "MIT" ]
94
2020-03-13T01:41:13.000Z
2022-02-17T22:55:29.000Z
processors/__init__.py
schlers/electra_pytorch
94cf3fec8158c93162e2f253ce5c746bdf1a6399
[ "MIT" ]
13
2020-03-17T11:09:16.000Z
2022-03-31T03:41:11.000Z
processors/__init__.py
schlers/electra_pytorch
94cf3fec8158c93162e2f253ce5c746bdf1a6399
[ "MIT" ]
11
2020-03-17T04:15:15.000Z
2021-08-02T09:59:42.000Z
from .utils import InputExample, InputFeatures, DataProcessor from .task_processor import (task_output_modes, task_processors, task_tasks_num_labels, convert_examples_to_features, collate_fn)
37.166667
87
0.753363
25
223
6.28
0.8
0
0
0
0
0
0
0
0
0
0
0
0.201794
223
5
88
44.6
0.882022
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ec0222055bf43c4f16b6bbfedd8136553837ac82
29
py
Python
electricityLoadForecasting/tools/exceptions/__init__.py
BCD65/electricityLoadForecasting
07a6ed060afaf7cc2906c0389b5c9e9b0fede193
[ "MIT" ]
null
null
null
electricityLoadForecasting/tools/exceptions/__init__.py
BCD65/electricityLoadForecasting
07a6ed060afaf7cc2906c0389b5c9e9b0fede193
[ "MIT" ]
null
null
null
electricityLoadForecasting/tools/exceptions/__init__.py
BCD65/electricityLoadForecasting
07a6ed060afaf7cc2906c0389b5c9e9b0fede193
[ "MIT" ]
null
null
null
from .exceptions import *
5.8
25
0.689655
3
29
6.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.241379
29
5
25
5.8
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ec3a6ae09b4cbce299c89276b12bc6c8bf7c82ce
15
py
Python
main.py
PressStart10/cs-compiler
d0272d157b2bd99a5428d1718d07245af1105595
[ "MIT" ]
null
null
null
main.py
PressStart10/cs-compiler
d0272d157b2bd99a5428d1718d07245af1105595
[ "MIT" ]
1
2020-11-22T12:24:23.000Z
2020-11-22T17:22:26.000Z
main.py
PressStart10/cs-compiler
d0272d157b2bd99a5428d1718d07245af1105595
[ "MIT" ]
null
null
null
print("ok,ok")
7.5
14
0.6
3
15
3
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.066667
15
1
15
15
0.642857
0
0
0
0
0
0.333333
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
ec3cf7ec4de827cf7dc06def28cb99d377968c8d
64
py
Python
old/testkivy/cheesepoofs/kerplunk/script.py
joshem/Autopen2
4dd6bedac5f8f5c5e9bf2457d82c2f6e41f1d045
[ "MIT" ]
4
2017-09-06T15:01:03.000Z
2018-02-01T19:00:24.000Z
old/testkivy/cheesepoofs/kerplunk/script.py
joshem/Autopen2
4dd6bedac5f8f5c5e9bf2457d82c2f6e41f1d045
[ "MIT" ]
null
null
null
old/testkivy/cheesepoofs/kerplunk/script.py
joshem/Autopen2
4dd6bedac5f8f5c5e9bf2457d82c2f6e41f1d045
[ "MIT" ]
2
2017-11-03T15:14:23.000Z
2018-02-07T02:50:20.000Z
def install_script(): print ("JESUS BEJESUS ITS INSTALLING")
32
42
0.734375
8
64
5.75
1
0
0
0
0
0
0
0
0
0
0
0
0.15625
64
2
42
32
0.851852
0
0
0
0
0
0.430769
0
0
0
0
0
0
1
0.5
true
0
0
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
1
0
5
6be7b43b3314ef0e21d35af48a9a74cc7f53cb3a
13,204
py
Python
tests/unittests/test_solace_queue.py
ExalDraen/python-libsolace
76abd2ac8b9f2c579fa9c23ae0c988ce001fabaf
[ "MIT" ]
null
null
null
tests/unittests/test_solace_queue.py
ExalDraen/python-libsolace
76abd2ac8b9f2c579fa9c23ae0c988ce001fabaf
[ "MIT" ]
null
null
null
tests/unittests/test_solace_queue.py
ExalDraen/python-libsolace
76abd2ac8b9f2c579fa9c23ae0c988ce001fabaf
[ "MIT" ]
2
2019-09-06T23:47:35.000Z
2020-09-14T10:06:07.000Z
import logging from libsolace.plugin import PluginResponse __author__ = 'keghol' import unittest2 as unittest from libsolace.SolaceAPI import SolaceAPI from test_util import get_plugin_from_api __plugin_name__ = "SolaceQueue" class TestSolaceQueue(unittest.TestCase): def setUp(self): self.api = SolaceAPI("dev") self.plugin = get_plugin_from_api(self.api, __plugin_name__) self.queue_config = { "retries": 0, "consume": "all", "exclusive": "true", "max_bind_count": 10, "queue_size": 1024, "owner": "default" } def test_zzz_batch_mode(self): self.plugin = get_plugin_from_api(self.api, __plugin_name__, queues=[{"name": "solacetest.prov.queue", "queue_config": self.queue_config}], vpn_name="default") self.assertTrue(isinstance(self.plugin.commands.commands, list)) print(self.plugin.commands.commands[0]) self.assertEqual(self.plugin.commands.commands[0], ('<rpc semp-version="soltr/7_1_1"><message-spool><vpn-name>default</vpn-name><create><queue><name>solacetest.prov.queue</name></queue></create></message-spool></rpc>', {'queues': [{'name': 'solacetest.prov.queue', 'queue_config': {'retries': 0, 'consume': 'all', 'exclusive': 'true', 'max_bind_count': 10, 'queue_size': 1024, 'owner': 'default'}}], 'queue_name': 'solacetest.prov.queue', 'primaryOnly': True, 'vpn_name': 'default'})) def test_get_queue(self): xml = self.plugin.get(queue_name="please.dont.exist", vpn_name="default") self.assertIsInstance(xml, list) self.assertEqual(str(xml[0]), "{'HOST': 'http://solace1.swe1.unibet.com/SEMP', u'rpc-reply': {u'rpc': {u'show': {u'queue': {u'queues': None}}}, u'execute-result': {u'@code': u'ok'}, u'@semp-version': u'%s'}}" % self.api.version) # testing overlaying config empty = class defaults def test_get_queue_config_defaults(self): config = self.plugin.get_queue_config({"name": "testqueue1", "env": [ {"name": "dev", "queue_config": {}} ] } ) self.assertEqual(config, self.plugin.defaults) # test setting config to specific values def test_get_queue_config(self): config = self.plugin.get_queue_config({"name": "testqueue1", "env": [ {"name": "dev", "queue_config": { "retries": 1, "exclusive": "false", "max_bind_count": 10, "owner": "somebody", "queue_size": 1, "consume": "not_all" }} ] } ) self.assertEqual(config, {"retries": 1, "exclusive": "false", "max_bind_count": 10, "owner": "somebody", "queue_size": 1, "consume": "not_all" }) def test_create_queue(self): xml = get_plugin_from_api(self.api, __plugin_name__).create_queue(queue_name="some_new_queue", vpn_name="default", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><create><queue><name>some_new_queue</name></queue></create></message-spool></rpc>' % self.api.version) def test_shutdown_egress(self): xml = get_plugin_from_api(self.api, __plugin_name__).shutdown_egress(queue_name="somequeue_name", vpn_name="default", shutdown_on_apply=True, force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><shutdown><egress/></shutdown></queue></message-spool></rpc>' % self.api.version) def test_shutdown_egress_without_shutdown(self): xml = get_plugin_from_api(self.api, __plugin_name__).shutdown_egress(queue_name="somequeue_name", vpn_name="default") self.assertIs(xml, None) def test_shutdown_ingress(self): xml = get_plugin_from_api(self.api, __plugin_name__).shutdown_ingress(queue_name="somequeue_name", vpn_name="default", shutdown_on_apply=True, force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><shutdown><ingress/></shutdown></queue></message-spool></rpc>' % self.api.version) def test_shutdown_ingress_without_shutdown(self): xml = get_plugin_from_api(self.api, __plugin_name__).shutdown_ingress(queue_name="somequeue_name", vpn_name="default") self.assertIs(xml, None) def test_exclusive(self): xml = get_plugin_from_api(self.api, __plugin_name__).exclusive(queue_name="somequeue_name", vpn_name="default", shutdown_on_apply=True, exclusive=True, force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><access-type><exclusive/></access-type></queue></message-spool></rpc>' % self.api.version) def test_owner(self): xml = get_plugin_from_api(self.api, __plugin_name__).owner(queue_name="somequeue_name", vpn_name="default", owner_username="someuser", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><owner><owner>someuser</owner></owner></queue></message-spool></rpc>' % self.api.version) def test_max_bind_count(self): xml = get_plugin_from_api(self.api, __plugin_name__).max_bind_count(queue_name="somequeue_name", vpn_name="default", max_bind_count=10, force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><max-bind-count><value>10</value></max-bind-count></queue></message-spool></rpc>' % self.api.version) def test_consume(self): xml = get_plugin_from_api(self.api, __plugin_name__).consume(queue_name="somequeue_name", vpn_name="default", consume="all", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><permission><all/><consume/></permission></queue></message-spool></rpc>' % self.api.version) def test_permission_consume(self): xml = get_plugin_from_api(self.api, __plugin_name__).permission(queue_name="somequeue_name", vpn_name="default", permission="consume", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><permission><all/><consume/></permission></queue></message-spool></rpc>' % self.api.version) def test_permission_delete(self): xml = get_plugin_from_api(self.api, __plugin_name__).permission(queue_name="somequeue_name", vpn_name="default", permission="delete", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><permission><all/><delete/></permission></queue></message-spool></rpc>' % self.api.version) def test_permission_modify_topic(self): xml = get_plugin_from_api(self.api, __plugin_name__).permission(queue_name="somequeue_name", vpn_name="default", permission="modify-topic", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><permission><all/><modify-topic/></permission></queue></message-spool></rpc>' % self.api.version) def test_permission_readonly(self): xml = get_plugin_from_api(self.api, __plugin_name__).permission(queue_name="somequeue_name", vpn_name="default", permission="read-only", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><permission><all/><read-only/></permission></queue></message-spool></rpc>' % self.api.version) def test_spool_size(self): xml = get_plugin_from_api(self.api, __plugin_name__).spool_size(queue_name="somequeue_name", vpn_name="default", queue_size=10, force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><max-spool-usage><size>10</size></max-spool-usage></queue></message-spool></rpc>' % self.api.version) def test_retries(self): xml = get_plugin_from_api(self.api, __plugin_name__).retries(queue_name="somequeue_name", vpn_name="default", retries=9, force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><max-redelivery><value>9</value></max-redelivery></queue></message-spool></rpc>' % self.api.version) def test_enable(self): xml = get_plugin_from_api(self.api, __plugin_name__).enable(queue_name="somequeue_name", vpn_name="default", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><no><shutdown><full/></shutdown></no></queue></message-spool></rpc>' % self.api.version) def test_reject_on_discard(self): xml = get_plugin_from_api(self.api, __plugin_name__).reject_on_discard(queue_name="somequeue_name", vpn_name="default", force=True) self.assertIsInstance(xml, PluginResponse) self.assertEqual(xml.xml, '<rpc semp-version="%s"><message-spool><vpn-name>default</vpn-name><queue><name>somequeue_name</name><reject-msg-to-sender-on-discard/></queue></message-spool></rpc>' % self.api.version)
68.061856
492
0.536731
1,333
13,204
5.067517
0.098275
0.053886
0.074611
0.097705
0.798816
0.774241
0.774241
0.724204
0.724204
0.687787
0
0.004877
0.332248
13,204
193
493
68.414508
0.761257
0.006589
0
0.402439
0
0.103659
0.290758
0.204514
0
0
0
0
0.231707
1
0.134146
false
0
0.030488
0
0.170732
0.006098
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5