hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ecdc5db1bf4e292ca2494bbac36ea901279643fc
| 77
|
py
|
Python
|
src/apps/reservation/models/__init__.py
|
lizaveta-stasevich/booking
|
acebd0d9d3035e802cdf6e719a142fe5f74ec2c1
|
[
"Apache-2.0"
] | null | null | null |
src/apps/reservation/models/__init__.py
|
lizaveta-stasevich/booking
|
acebd0d9d3035e802cdf6e719a142fe5f74ec2c1
|
[
"Apache-2.0"
] | 6
|
2020-06-06T00:11:43.000Z
|
2022-02-10T09:33:51.000Z
|
src/apps/reservation/models/__init__.py
|
lizaveta-stasevich/booking
|
acebd0d9d3035e802cdf6e719a142fe5f74ec2c1
|
[
"Apache-2.0"
] | null | null | null |
from .city import City
from .train import Train
from .comfort import Comfort
| 19.25
| 28
| 0.805195
| 12
| 77
| 5.166667
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155844
| 77
| 3
| 29
| 25.666667
| 0.953846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
01b53bc11f444771acf9b6cd17faf43b54ce2a93
| 204
|
py
|
Python
|
tests/conftest.py
|
charmed-kubernetes/layer-docker
|
f47ea965f598f25aaf1d02e457b54660afa8303a
|
[
"Apache-2.0"
] | 11
|
2015-09-02T16:32:33.000Z
|
2020-08-19T02:10:35.000Z
|
tests/conftest.py
|
charmed-kubernetes/layer-docker
|
f47ea965f598f25aaf1d02e457b54660afa8303a
|
[
"Apache-2.0"
] | 130
|
2015-09-14T17:28:45.000Z
|
2020-03-02T15:47:40.000Z
|
tests/conftest.py
|
charmed-kubernetes/layer-docker
|
f47ea965f598f25aaf1d02e457b54660afa8303a
|
[
"Apache-2.0"
] | 22
|
2015-09-26T23:34:53.000Z
|
2021-03-03T06:30:48.000Z
|
import sys
from unittest.mock import MagicMock
# mock dependencies which we don't care about covering in our tests
sys.modules['charms.docker'] = MagicMock()
sys.modules['charms.reactive'] = MagicMock()
| 29.142857
| 67
| 0.77451
| 29
| 204
| 5.448276
| 0.724138
| 0.126582
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122549
| 204
| 6
| 68
| 34
| 0.882682
| 0.318627
| 0
| 0
| 0
| 0
| 0.20438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
01de7ebf216c9143f55507d7c59b8601e6f0206d
| 426
|
py
|
Python
|
objdict/__init__.py
|
StepaTa/ModularBotForUser
|
c82691aa970ae936336de1981708abb40f0c5ac9
|
[
"MIT"
] | null | null | null |
objdict/__init__.py
|
StepaTa/ModularBotForUser
|
c82691aa970ae936336de1981708abb40f0c5ac9
|
[
"MIT"
] | null | null | null |
objdict/__init__.py
|
StepaTa/ModularBotForUser
|
c82691aa970ae936336de1981708abb40f0c5ac9
|
[
"MIT"
] | null | null | null |
from objdict.objDict import dumps, loads, from_json, to_json
from objdict.objDict import ClassRegistry, JsonEncoder, ObjDict
from objdict.objDict import JsonDecodeError
from objdict.dualUrlJson import combiParse, unParse
from objdict import objDict as objDicter
from objdict.pytestcode import pytester
from objdict.struct import Struct, DictStruct
from objdict.inputs import inputs
from objdict.enums import OEnum
#extra line
| 38.727273
| 63
| 0.852113
| 57
| 426
| 6.333333
| 0.421053
| 0.274238
| 0.149584
| 0.199446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110329
| 426
| 10
| 64
| 42.6
| 0.952507
| 0.023474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bf3ca9c333722891b4e8ec140fe7fd22e4140a2c
| 73
|
py
|
Python
|
generi/commands/__init__.py
|
nick-lehmann/Generi
|
5f13860eb91973670fe378479731f95feab1e380
|
[
"MIT"
] | 1
|
2019-12-21T22:04:07.000Z
|
2019-12-21T22:04:07.000Z
|
generi/commands/__init__.py
|
nick-lehmann/Generi
|
5f13860eb91973670fe378479731f95feab1e380
|
[
"MIT"
] | null | null | null |
generi/commands/__init__.py
|
nick-lehmann/Generi
|
5f13860eb91973670fe378479731f95feab1e380
|
[
"MIT"
] | null | null | null |
from .build import build
from .push import push
from .write import write
| 18.25
| 24
| 0.794521
| 12
| 73
| 4.833333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164384
| 73
| 3
| 25
| 24.333333
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1721fdf33b91dc8187371ebef485db814baa3ad3
| 76
|
py
|
Python
|
tk02.py
|
visalpop/tkinter_sample
|
15474250431727e2b24b6f6aebc654c36ccf8d87
|
[
"MIT"
] | null | null | null |
tk02.py
|
visalpop/tkinter_sample
|
15474250431727e2b24b6f6aebc654c36ccf8d87
|
[
"MIT"
] | null | null | null |
tk02.py
|
visalpop/tkinter_sample
|
15474250431727e2b24b6f6aebc654c36ccf8d87
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# tk02.pyw
import tkinter as tk
print(tk.TkVersion)
| 15.2
| 23
| 0.657895
| 12
| 76
| 4.166667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 0.157895
| 76
| 5
| 24
| 15.2
| 0.734375
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
17365f20a37aac6d4e23d5ca480b0b353708de21
| 176
|
py
|
Python
|
tortilla/api.py
|
camomile-project/tortilla
|
e749a7be49cc272bd1149a3acfe0d352f87f372b
|
[
"MIT"
] | 261
|
2015-01-02T02:18:44.000Z
|
2018-04-02T07:33:53.000Z
|
tortilla/api.py
|
camomile-project/tortilla
|
e749a7be49cc272bd1149a3acfe0d352f87f372b
|
[
"MIT"
] | 31
|
2015-01-26T15:25:32.000Z
|
2018-03-30T15:13:01.000Z
|
tortilla/api.py
|
camomile-project/tortilla
|
e749a7be49cc272bd1149a3acfe0d352f87f372b
|
[
"MIT"
] | 29
|
2015-01-05T19:21:43.000Z
|
2017-11-07T14:52:42.000Z
|
# -*- coding: utf-8 -*-
from . import wrappers
def wrap(url, **options):
"""Syntax sugar for creating service wrappers."""
return wrappers.Wrap(part=url, **options)
| 19.555556
| 53
| 0.647727
| 22
| 176
| 5.181818
| 0.772727
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006944
| 0.181818
| 176
| 8
| 54
| 22
| 0.784722
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
175571259c01a33fadf2f76eddd44cc9f82450a0
| 92
|
py
|
Python
|
trackdecoder/__init__.py
|
PredaaA/aikaterna-cogs
|
d34732def7bf8d0be0adc2fa7bd63e57596ff88f
|
[
"Apache-2.0"
] | null | null | null |
trackdecoder/__init__.py
|
PredaaA/aikaterna-cogs
|
d34732def7bf8d0be0adc2fa7bd63e57596ff88f
|
[
"Apache-2.0"
] | null | null | null |
trackdecoder/__init__.py
|
PredaaA/aikaterna-cogs
|
d34732def7bf8d0be0adc2fa7bd63e57596ff88f
|
[
"Apache-2.0"
] | null | null | null |
from .trackdecoder import TrackDecoder
def setup(bot):
bot.add_cog(TrackDecoder(bot))
| 15.333333
| 38
| 0.76087
| 12
| 92
| 5.75
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141304
| 92
| 5
| 39
| 18.4
| 0.873418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
175ac9439e03546b2bbebb60f278dde2447e2fbc
| 227
|
py
|
Python
|
src/plotman/_tests/archive_test.py
|
pieterhelsen/plotman
|
1b7dfba139ed03d3c32a2f04f6011e03bfb1f442
|
[
"Apache-2.0"
] | 1,016
|
2020-11-24T10:27:08.000Z
|
2022-03-20T23:46:45.000Z
|
src/plotman/_tests/archive_test.py
|
pieterhelsen/plotman
|
1b7dfba139ed03d3c32a2f04f6011e03bfb1f442
|
[
"Apache-2.0"
] | 436
|
2021-01-23T23:28:54.000Z
|
2022-03-30T00:33:29.000Z
|
src/plotman/_tests/archive_test.py
|
pieterhelsen/plotman
|
1b7dfba139ed03d3c32a2f04f6011e03bfb1f442
|
[
"Apache-2.0"
] | 332
|
2021-02-02T03:42:25.000Z
|
2022-03-31T09:03:38.000Z
|
from plotman import archive, job
def test_compute_priority() -> None:
assert archive.compute_priority(
job.Phase(major=3, minor=1), 1000, 10
) > archive.compute_priority(job.Phase(major=3, minor=6), 1000, 10)
| 28.375
| 71
| 0.700441
| 33
| 227
| 4.69697
| 0.575758
| 0.290323
| 0.283871
| 0.322581
| 0.529032
| 0.529032
| 0.529032
| 0.529032
| 0
| 0
| 0
| 0.085106
| 0.171806
| 227
| 7
| 72
| 32.428571
| 0.739362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1772d990f11381101ceb9d8d1bc4176f4d25c788
| 45
|
py
|
Python
|
rdf_io/views/__init__.py
|
GlauberMC/django-rdf-io
|
5deaec40264407574351dd20f758b17b89b495a9
|
[
"CC0-1.0"
] | null | null | null |
rdf_io/views/__init__.py
|
GlauberMC/django-rdf-io
|
5deaec40264407574351dd20f758b17b89b495a9
|
[
"CC0-1.0"
] | null | null | null |
rdf_io/views/__init__.py
|
GlauberMC/django-rdf-io
|
5deaec40264407574351dd20f758b17b89b495a9
|
[
"CC0-1.0"
] | null | null | null |
from serialize import *
from manage import *
| 15
| 23
| 0.777778
| 6
| 45
| 5.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 24
| 22.5
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bd77b2a831bc46979ce050cab6205e2a0557cecb
| 161
|
py
|
Python
|
Day 2/BMI_Calculator.py
|
hamzaoda/100-Days-of-Code---The-Complete-Python-Pro-Bootcamp-for-2021
|
5340007d8405df2e29643b47d3ff9fa4f7af9e10
|
[
"Unlicense"
] | null | null | null |
Day 2/BMI_Calculator.py
|
hamzaoda/100-Days-of-Code---The-Complete-Python-Pro-Bootcamp-for-2021
|
5340007d8405df2e29643b47d3ff9fa4f7af9e10
|
[
"Unlicense"
] | null | null | null |
Day 2/BMI_Calculator.py
|
hamzaoda/100-Days-of-Code---The-Complete-Python-Pro-Bootcamp-for-2021
|
5340007d8405df2e29643b47d3ff9fa4f7af9e10
|
[
"Unlicense"
] | null | null | null |
height= float(input("Please Enter your height : "))
weight= int(input("Please Enter your weight : "))
BMI=weight/height ** 2
print("your BMI is : " + str(BMI))
| 26.833333
| 51
| 0.670807
| 24
| 161
| 4.5
| 0.541667
| 0.203704
| 0.296296
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007353
| 0.15528
| 161
| 6
| 52
| 26.833333
| 0.786765
| 0
| 0
| 0
| 0
| 0
| 0.419753
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bd8200c45ae0a7535198fbd7fa17cbf7ec1616aa
| 43
|
py
|
Python
|
src/models/rotate/__init__.py
|
wang-yuhao/Practical-Big-Data-Science-ADL-AI
|
0bf63bf210f506e287f8492e716bb3394137d74b
|
[
"MIT"
] | null | null | null |
src/models/rotate/__init__.py
|
wang-yuhao/Practical-Big-Data-Science-ADL-AI
|
0bf63bf210f506e287f8492e716bb3394137d74b
|
[
"MIT"
] | null | null | null |
src/models/rotate/__init__.py
|
wang-yuhao/Practical-Big-Data-Science-ADL-AI
|
0bf63bf210f506e287f8492e716bb3394137d74b
|
[
"MIT"
] | 1
|
2021-12-24T00:26:26.000Z
|
2021-12-24T00:26:26.000Z
|
from .rotate_evaluation_model import RotatE
| 43
| 43
| 0.906977
| 6
| 43
| 6.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 43
| 1
| 43
| 43
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bdc8e14eb3536e316c7d637d8c43082a1cd80ba8
| 185
|
py
|
Python
|
hypnospy/__init__.py
|
HypnosPy/HypnosPy
|
28b17d07ee78f7714bbbbd66f6253764addf9d94
|
[
"MIT"
] | 4
|
2022-01-02T18:40:57.000Z
|
2022-02-17T12:59:57.000Z
|
hypnospy/__init__.py
|
ippozuelo/HypnosPy
|
28b17d07ee78f7714bbbbd66f6253764addf9d94
|
[
"MIT"
] | 2
|
2020-11-11T07:13:56.000Z
|
2020-11-11T07:38:54.000Z
|
hypnospy/__init__.py
|
ippozuelo/HypnosPy
|
28b17d07ee78f7714bbbbd66f6253764addf9d94
|
[
"MIT"
] | 2
|
2020-11-24T22:46:31.000Z
|
2021-02-05T16:43:12.000Z
|
#__all__ = ["Wearable", "data"]
from .diary import Diary
from .wearable import Wearable
from .experiment import Experiment
from .demographics import Demographics
from .cgm import CGM
| 20.555556
| 38
| 0.783784
| 23
| 185
| 6.130435
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140541
| 185
| 8
| 39
| 23.125
| 0.886792
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bdee20a5ee98079d4824390ed4132e8fbf4a5462
| 97
|
py
|
Python
|
thg/toplevel.py
|
thgdevelopers/thg_lib
|
d9fc28096c8e05267a22f6899890ae5f429b5b19
|
[
"BSD-3-Clause"
] | null | null | null |
thg/toplevel.py
|
thgdevelopers/thg_lib
|
d9fc28096c8e05267a22f6899890ae5f429b5b19
|
[
"BSD-3-Clause"
] | 4
|
2020-04-22T02:24:27.000Z
|
2020-04-22T02:28:57.000Z
|
thg/toplevel.py
|
thgdevelopers/thg_lib
|
d9fc28096c8e05267a22f6899890ae5f429b5b19
|
[
"BSD-3-Clause"
] | null | null | null |
from thglibs import *
try:
import cPickle as pickle
except ImportError:
import pickle
| 10.777778
| 28
| 0.721649
| 12
| 97
| 5.833333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.247423
| 97
| 8
| 29
| 12.125
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bdf68382c03ed2c427ad74794bc6451649a2128b
| 179
|
py
|
Python
|
utils/__init__.py
|
feelool007/Lotto1224
|
fd360fad7260fd9022c8f71d6f48a79f59266b72
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
feelool007/Lotto1224
|
fd360fad7260fd9022c8f71d6f48a79f59266b72
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
feelool007/Lotto1224
|
fd360fad7260fd9022c8f71d6f48a79f59266b72
|
[
"MIT"
] | null | null | null |
from .analysis import analysis
from .oddAndEven import oddAndEven
from .smallAndLarge import smallAndLarge
from .parseInt import parseInt
from .winNumCounter import winNumCounter
| 29.833333
| 40
| 0.860335
| 20
| 179
| 7.7
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111732
| 179
| 5
| 41
| 35.8
| 0.968553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
da0b3fb05b93ec711bbba7ea7a9d3b300a60a1ae
| 2,335
|
py
|
Python
|
tests/test_model_evaluators.py
|
superphy/acheron
|
cd9838f000085409e306a5f66b04276a1e4eb5f5
|
[
"Apache-2.0"
] | 1
|
2022-01-07T17:23:14.000Z
|
2022-01-07T17:23:14.000Z
|
tests/test_model_evaluators.py
|
superphy/acheron
|
cd9838f000085409e306a5f66b04276a1e4eb5f5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_model_evaluators.py
|
superphy/acheron
|
cd9838f000085409e306a5f66b04276a1e4eb5f5
|
[
"Apache-2.0"
] | 1
|
2021-06-18T17:36:08.000Z
|
2021-06-18T17:36:08.000Z
|
from acheron.helpers import model_evaluators
def test_to_float():
mics = [32, 12, '>32', '>32.0000', '>16']
floats = [float(i) for i in [32, 12, 32, 32, 16]]
for i in range(len(mics)):
assert(model_evaluators.to_float(mics[i]) == floats[i])
def test_is_equiv():
mics = [32, 16, '>32', '>32.0000', '>16']
floats = [model_evaluators.to_float(i) for i in [32, 12, 32, 32, 16]]
for i in range(len(mics)):
mic_float = model_evaluators.to_float(mics[i])
if i == 1:
assert(not model_evaluators.is_equiv(mic_float,floats[i]))
else:
assert(model_evaluators.is_equiv(mic_float,floats[i]))
assert model_evaluators.is_equiv(0.12, 0.125)
def test_to_resistance_type():
bps = {}
bps['AMP'] = [8,[16],32]
bps['AZM'] = [16,[],32]
bps['CIP'] = [0.06,[0.12,0.25,0.5],1]
assert model_evaluators.to_resistance_type(1, bps['AMP']) == 'S'
assert model_evaluators.to_resistance_type(8, bps['AMP']) == 'S'
assert model_evaluators.to_resistance_type(16, bps['AMP']) == 'I'
assert model_evaluators.to_resistance_type(32, bps['AMP']) == 'R'
assert model_evaluators.to_resistance_type(64, bps['AMP']) == 'R'
assert model_evaluators.to_resistance_type(8, bps['AZM']) == 'S'
assert model_evaluators.to_resistance_type(16, bps['AZM']) == 'S'
assert model_evaluators.to_resistance_type(32, bps['AZM']) == 'R'
assert model_evaluators.to_resistance_type(0.06, bps['CIP']) == 'S'
assert model_evaluators.to_resistance_type(0.12, bps['CIP']) == 'I'
assert model_evaluators.to_resistance_type(0.25, bps['CIP']) == 'I'
assert model_evaluators.to_resistance_type(1, bps['CIP']) == 'R'
def test_find_error_type():
#find_error_type(predicted, actual, abx)
assert model_evaluators.find_error_type(1, '32', "AMP") == "Very Major Error"
assert model_evaluators.find_error_type('1', 32, "AMP") == "Very Major Error"
assert model_evaluators.find_error_type(16, 32, "AMP") == "Non Major Error"
assert model_evaluators.find_error_type(64, 1, "AMP") == "Major Error"
assert model_evaluators.find_error_type(0.06, 1, "CIP") == "Very Major Error"
assert model_evaluators.find_error_type(1, 0.06, "CIP") == "Major Error"
assert model_evaluators.find_error_type(0.12, '0.125', "CIP") =="Correct"
| 42.454545
| 81
| 0.659529
| 358
| 2,335
| 4.064246
| 0.148045
| 0.268041
| 0.317526
| 0.205498
| 0.773883
| 0.728522
| 0.691409
| 0.636426
| 0.494845
| 0.186254
| 0
| 0.065844
| 0.167452
| 2,335
| 54
| 82
| 43.240741
| 0.682613
| 0.016702
| 0
| 0.04878
| 0
| 0
| 0.08976
| 0
| 0
| 0
| 0
| 0
| 0.560976
| 1
| 0.097561
| false
| 0
| 0.02439
| 0
| 0.121951
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
da4283d432e93b4a956c4e6e9d62dfc7c8c9716c
| 189
|
py
|
Python
|
teragested/cli.py
|
demosdemon/furry-octo-fiesta
|
e72d95cb73a02a25dfb34ca327aea7cc9eb1391f
|
[
"MIT"
] | null | null | null |
teragested/cli.py
|
demosdemon/furry-octo-fiesta
|
e72d95cb73a02a25dfb34ca327aea7cc9eb1391f
|
[
"MIT"
] | 1
|
2021-06-01T23:04:04.000Z
|
2021-06-01T23:04:04.000Z
|
teragested/cli.py
|
demosdemon/teragested
|
e72d95cb73a02a25dfb34ca327aea7cc9eb1391f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""The teragested command line interface."""
import click
@click.command()
def main():
"""Interact with the teragested parser and a shell script."""
pass
| 17.181818
| 65
| 0.650794
| 24
| 189
| 5.125
| 0.833333
| 0.211382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006536
| 0.190476
| 189
| 10
| 66
| 18.9
| 0.797386
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
da58156f0269be074ab886fb95cac537deca0c06
| 3,230
|
py
|
Python
|
ds18b20S/__init__.py
|
danielbair/ds18b20S
|
4531b70ed5788e3f551aee2deb0f1effad2b9d61
|
[
"MIT"
] | null | null | null |
ds18b20S/__init__.py
|
danielbair/ds18b20S
|
4531b70ed5788e3f551aee2deb0f1effad2b9d61
|
[
"MIT"
] | null | null | null |
ds18b20S/__init__.py
|
danielbair/ds18b20S
|
4531b70ed5788e3f551aee2deb0f1effad2b9d61
|
[
"MIT"
] | null | null | null |
#Code by Sahak Sahakyan
#Library for ds18b20 temperature sensor
#Contact`
#Email: sahak.sahakyan2017@gmail.com
import sys
import glob
import os
class DsbS():
def __init__(self, initial=False):
if initial:
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
def getSensorIds(slef):
return [i.split("/")[5] for i in glob.glob("/sys/bus/w1/devices/28-0*/w1_slave")]
def getTemps(self, Ttype="C"):
temps = list()
if len([i.split("/")[5] for i in glob.glob("/sys/bus/w1/devices/28-0*/w1_slave")]) < 1: print("NO DEVICES FOUND"); return "Null"
for sensor in glob.glob("/sys/bus/w1/devices/28-0*/w1_slave"):
id = sensor.split("/")[5]
try:
f = open(sensor, "r")
data = f.read()
f.close()
if "YES" in data:
(discard, sep, reading) = data.partition(' t=')
if Ttype.lower() == "c":
t = float(reading) / 1000.0
temps.append(t)
elif Ttype.lower() == "f":
t = (float(reading) / 1000.0 * 9 / 5) + 32
temps.append(t)
elif Ttype.lower() == "k":
t = (float(reading) / 1000.0) + 273.15
temps.append(t)
else:
t = float(reading) / 1000.0
temps.append(t)
print("WARNING: UNKOWN TEMPERATURE TYPE")
else:
print("EROR WHILE READING TEMPERATURE")
except:
print("EROR WHILE READING TEMPERATURE1")
return temps
def getIdTemp(self, Ttype="c"):
temps = dict()
if len([i.split("/")[5] for i in glob.glob("/sys/bus/w1/devices/28-0*/w1_slave")]) < 1: print("NO DEVICES FOUND"); return "Null"
for sensor in glob.glob("/sys/bus/w1/devices/28-0*/w1_slave"):
id = sensor.split("/")[5]
try:
f = open(sensor, "r")
data = f.read()
f.close()
if "YES" in data:
(discard, sep, reading) = data.partition(' t=')
if Ttype.lower() == "c":
t = float(reading) / 1000.0
temps[sensor] = t
elif Ttype.lower() == "f":
t = (float(reading) / 1000.0 * 9 / 5) + 32
temps[sensor] = t
elif Ttype.lower() == "k":
t = (float(reading) / 1000.0) + 273.15
temps[sensor] = t
else:
t = float(reading) / 1000.0
temps[sensor] = t
print("WARNING: UNKOWN TEMPERATURE TYPE")
else:
print("EROR WHILE READING TEMPERATURE")
except:
print("EROR WHILE READING TEMPERATURE")
return temps
| 37.55814
| 137
| 0.419505
| 336
| 3,230
| 4.005952
| 0.261905
| 0.035661
| 0.077266
| 0.10104
| 0.751114
| 0.742942
| 0.734027
| 0.734027
| 0.674591
| 0.674591
| 0
| 0.057289
| 0.45418
| 3,230
| 86
| 138
| 37.55814
| 0.706183
| 0.031889
| 0
| 0.779412
| 0
| 0
| 0.14972
| 0.055939
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.044118
| 0.014706
| 0.161765
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
da8ed0e7e7abc4fc5c1f608294795bed2be4d831
| 26
|
py
|
Python
|
src/__init__.py
|
uliebal/batchslopes
|
a68c4e85a836cc1f378f8ae91f0d6fa38d66b4e3
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
uliebal/batchslopes
|
a68c4e85a836cc1f378f8ae91f0d6fa38d66b4e3
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
uliebal/batchslopes
|
a68c4e85a836cc1f378f8ae91f0d6fa38d66b4e3
|
[
"MIT"
] | null | null | null |
from .batchslopes import *
| 26
| 26
| 0.807692
| 3
| 26
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
da9008c4e7fa1addf1f9f1db21e7fe01a2ba36f2
| 21,743
|
py
|
Python
|
src/extract/compute_predictions.py
|
atseng95/fourier_attribution_priors
|
53f668e315621e4f64f9e11a403f2ea80529eb29
|
[
"MIT"
] | 8
|
2020-11-19T02:58:55.000Z
|
2021-09-10T14:11:29.000Z
|
src/extract/compute_predictions.py
|
amtseng/fourier_attribution_priors
|
53f668e315621e4f64f9e11a403f2ea80529eb29
|
[
"MIT"
] | null | null | null |
src/extract/compute_predictions.py
|
amtseng/fourier_attribution_priors
|
53f668e315621e4f64f9e11a403f2ea80529eb29
|
[
"MIT"
] | 1
|
2020-09-26T00:49:25.000Z
|
2020-09-26T00:49:25.000Z
|
import model.util as model_util
import model.profile_models as profile_models
import model.binary_models as binary_models
import extract.data_loading as data_loading
import numpy as np
import torch
import tqdm
def _get_profile_model_predictions_batch(
model, coords, num_tasks, input_func, controls=None,
fourier_att_prior_freq_limit=200, fourier_att_prior_freq_limit_softness=0.2,
att_prior_grad_smooth_sigma=3, return_losses=False, return_gradients=False
):
"""
Fetches the necessary data from the given coordinates or bin indices and
runs it through a profile or binary model. This will perform computation
in a single batch.
Arguments:
`model`: a trained `ProfilePredictorWithMatchedControls`,
`ProfilePredictorWithSharedControls`, or
`ProfilePredictorWithoutControls`
`coords`: a B x 3 array of coordinates to compute outputs for
`num_tasks`: number of tasks for the model
`input_func`: a function that takes in `coords` and returns the
B x I x 4 array of one-hot sequences and the
B x (T or T + 1 or 2T) x O x S array of profiles (perhaps with
controls)
`controls`: the type of control profiles (if any) used in model; can be
"matched" (each task has a matched control), "shared" (all tasks
share a control), or None (no controls); must match the model class
`fourier_att_prior_freq_limit`: limit for frequencies in Fourier prior
loss
`fourier_att_prior_freq_limit_softness`: degree of softness for limit
`att_prior_grad_smooth_sigma`: width of smoothing kernel for gradients
`return_losses`: if True, compute/return the loss values
`return_gradients`: if True, compute/return the input gradients and
sequences
Returns a dictionary of the following structure:
true_profs: true profile raw counts (B x T x O x S)
log_pred_profs: predicted profile log probabilities (B x T x O x S)
true_counts: true total counts (B x T x S)
log_pred_counts: predicted log counts (B x T x S)
prof_losses: profile NLL losses (B-array), if `return_losses` is True
count_losses: counts MSE losses (B-array) if `return_losses` is True
att_losses: prior losses (B-array), if `return_losses` is True
input_seqs: one-hot input sequences (B x I x 4), if `return_gradients`
is true
input_grads: "hypothetical" input gradients (B x I x 4), if
`return_gradients` is true
"""
result = {}
input_seqs, profiles = input_func(coords)
if return_gradients:
input_seqs_np = input_seqs
model.zero_grad() # Zero out weights because we are computing gradients
input_seqs = model_util.place_tensor(torch.tensor(input_seqs)).float()
profiles = model_util.place_tensor(torch.tensor(profiles)).float()
if controls is not None:
tf_profs = profiles[:, :num_tasks, :, :]
cont_profs = profiles[:, num_tasks:, :, :] # Last half or just one
else:
tf_profs, cont_profs = profiles, None
if return_losses or return_gradients:
input_seqs.requires_grad = True # Set gradient required
logit_pred_profs, log_pred_counts = model(input_seqs, cont_profs)
# Subtract mean along output profile dimension; this wouldn't change
# softmax probabilities, but normalizes the magnitude of gradients
norm_logit_pred_profs = logit_pred_profs - \
torch.mean(logit_pred_profs, dim=2, keepdim=True)
# Weight by post-softmax probabilities, but do not take the
# gradients of these probabilities; this upweights important regions
# exponentially
pred_prof_probs = profile_models.profile_logits_to_log_probs(
logit_pred_profs
).detach()
weighted_norm_logits = norm_logit_pred_profs * pred_prof_probs
input_grads, = torch.autograd.grad(
weighted_norm_logits, input_seqs,
grad_outputs=model_util.place_tensor(
torch.ones(weighted_norm_logits.size())
),
retain_graph=True, create_graph=True
# We'll be operating on the gradient itself, so we need to
# create the graph
# Gradients are summed across strands and tasks
)
input_grads_np = input_grads.detach().cpu().numpy()
input_seqs.requires_grad = False # Reset gradient required
else:
logit_pred_profs, log_pred_counts = model(input_seqs, cont_profs)
result["true_profs"] = tf_profs.detach().cpu().numpy()
result["true_counts"] = np.sum(result["true_profs"], axis=2)
logit_pred_profs_np = logit_pred_profs.detach().cpu().numpy()
result["log_pred_profs"] = profile_models.profile_logits_to_log_probs(
logit_pred_profs_np
)
result["log_pred_counts"] = log_pred_counts.detach().cpu().numpy()
if return_losses:
log_pred_profs = profile_models.profile_logits_to_log_probs(
logit_pred_profs
)
num_samples = log_pred_profs.size(0)
result["prof_losses"] = np.empty(num_samples)
result["count_losses"] = np.empty(num_samples)
result["att_losses"] = np.empty(num_samples)
# Compute losses separately for each example
for i in range(num_samples):
_, prof_loss, count_loss = model.correctness_loss(
tf_profs[i:i+1], log_pred_profs[i:i+1], log_pred_counts[i:i+1],
1, return_separate_losses=True
)
att_loss = model.fourier_att_prior_loss(
model_util.place_tensor(torch.ones(1)),
input_grads[i:i+1], fourier_att_prior_freq_limit,
fourier_att_prior_freq_limit_softness,
att_prior_grad_smooth_sigma
)
result["prof_losses"][i] = prof_loss
result["count_losses"][i] = count_loss
result["att_losses"][i] = att_loss
if return_gradients:
result["input_seqs"] = input_seqs_np
result["input_grads"] = input_grads_np
return result
def _get_binary_model_predictions_batch(
model, bins, input_func, fourier_att_prior_freq_limit=150,
fourier_att_prior_freq_limit_softness=0.2, att_prior_grad_smooth_sigma=3,
return_losses=False, return_gradients=False
):
"""
Arguments:
`model`: a trained `BinaryPredictor`,
`bins`: an N-array of bin indices to compute outputs for
`input_func`: a function that takes in `bins` and returns the B x I x 4
array of one-hot sequences, the B x T array of output values, and
B x 3 array of underlying coordinates for the input sequence
`fourier_att_prior_freq_limit`: limit for frequencies in Fourier prior
loss
`fourier_att_prior_freq_limit_softness`: degree of softness for limit
`att_prior_grad_smooth_sigma`: width of smoothing kernel for gradients
`return_losses`: if True, compute/return the loss values
`return_gradients`: if True, compute/return the input gradients and
sequences
Returns a dictionary of the following structure:
true_vals: true binary values (B x T)
pred_vals: predicted probabilities (B x T)
coords: coordinates used for prediction (B x 3 object array)
corr_losses: correctness losses (B-array) if `return_losses` is True
att_losses: prior losses (B-array), if `return_losses` is True
input_seqs: one-hot input sequences (B x I x 4), if `return_gradients`
is True
input_grads: "hypothetical" input gradients (B x I x 4), if
`return_gradients` is true
"""
result = {}
input_seqs, output_vals, coords = input_func(bins)
output_vals_np = output_vals
if return_gradients:
input_seqs_np = input_seqs
model.zero_grad()
input_seqs = model_util.place_tensor(torch.tensor(input_seqs)).float()
output_vals = model_util.place_tensor(torch.tensor(output_vals)).float()
if return_losses or return_gradients:
input_seqs.requires_grad = True # Set gradient required
logit_pred_vals = model(input_seqs)
# Compute the gradients of the output with respect to the input
input_grads, = torch.autograd.grad(
logit_pred_vals, input_seqs,
grad_outputs=model_util.place_tensor(
torch.ones(logit_pred_vals.size())
),
retain_graph=True, create_graph=True
# We'll be operating on the gradient itself, so we need to
# create the graph
# Gradients are summed across tasks
)
input_grads_np = input_grads.detach().cpu().numpy()
input_seqs.requires_grad = False # Reset gradient required
else:
logit_pred_vals = model(input_seqs)
status, input_grads = None, None
result["true_vals"] = output_vals_np
logit_pred_vals_np = logit_pred_vals.detach().cpu().numpy()
result["pred_vals"] = binary_models.binary_logits_to_probs(
logit_pred_vals_np
)
result["coords"] = coords
if return_losses:
num_samples = logit_pred_vals.size(0)
result["corr_losses"] = np.empty(num_samples)
result["att_losses"] = np.empty(num_samples)
# Compute losses separately for each example
for i in range(num_samples):
corr_loss = model.correctness_loss(
output_vals[i:i+1], logit_pred_vals[i:i+1], True
)
att_loss = model.fourier_att_prior_loss(
model_util.place_tensor(torch.ones(1)),
input_grads[i:i+1], fourier_att_prior_freq_limit,
fourier_att_prior_freq_limit_softness,
att_prior_grad_smooth_sigma
)
result["corr_losses"][i] = corr_loss
result["att_losses"][i] = att_loss
if return_gradients:
result["input_seqs"] = input_seqs_np
result["input_grads"] = input_grads_np
return result
def get_profile_model_predictions(
model, coords, num_tasks, input_func, controls=None,
fourier_att_prior_freq_limit=200, fourier_att_prior_freq_limit_softness=0.2,
att_prior_grad_smooth_sigma=3, return_losses=False, return_gradients=False,
batch_size=128, show_progress=False
):
"""
Fetches the necessary data from the given coordinates and runs it through a
profile model.
Arguments:
`model`: a trained `ProfilePredictorWithMatchedControls`,
`ProfilePredictorWithSharedControls`, or
`ProfilePredictorWithoutControls`
`coords`: a N x 3 array of coordinates to compute outputs for
`num_tasks`: number of tasks for the model
`input_func`: a function that takes in `coords` and returns the
N x I x 4 array of one-hot sequences and the
N x (T or T + 1 or 2T) x O x S array of profiles (perhaps with
controls)
`controls`: the type of control profiles (if any) used in model; can be
"matched" (each task has a matched control), "shared" (all tasks
share a control), or None (no controls); must match the model class
`fourier_att_prior_freq_limit`: limit for frequencies in Fourier prior
loss
`fourier_att_prior_freq_limit_softness`: degree of softness for limit
`att_prior_grad_smooth_sigma`: width of smoothing kernel for gradients
`return_losses`: if True, compute/return the loss values
`return_gradients`: if True, compute/return the input gradients and
sequences
`batch_size`: batch size to use for prediction
`show_progress`: whether or not to show progress bar over batches
Returns a dictionary of the following structure:
true_profs: true profile raw counts (N x T x O x S)
log_pred_profs: predicted profile log probabilities (N x T x O x S)
true_counts: true total counts (N x T x S)
log_pred_counts: predicted log counts (N x T x S)
prof_losses: profile NLL losses (N-array), if `return_losses` is True
count_losses: counts MSE losses (N-array) if `return_losses` is True
att_loss: prior losses (N-array), if `return_losses` is True
input_seqs: one-hot input sequences (N x I x 4), if `return_gradients`
is true
input_grads: "hypothetical" input gradients (N x I x 4), if
`return_gradients` is true
"""
result = {}
num_examples = len(coords)
num_batches = int(np.ceil(num_examples / batch_size))
t_iter = tqdm.trange(num_batches) if show_progress else range(num_batches)
first_batch = True
for i in t_iter:
batch_slice = slice(i * batch_size, (i + 1) * batch_size)
coords_batch = coords[batch_slice]
batch_result = _get_profile_model_predictions_batch(
model, coords_batch, num_tasks, input_func, controls=controls,
fourier_att_prior_freq_limit=fourier_att_prior_freq_limit,
fourier_att_prior_freq_limit_softness=fourier_att_prior_freq_limit_softness,
att_prior_grad_smooth_sigma=att_prior_grad_smooth_sigma,
return_losses=return_losses, return_gradients=return_gradients
)
if first_batch:
# Allocate arrays of the same size, but holding all examples
result["true_profs"] = np.empty(
(num_examples,) + batch_result["true_profs"].shape[1:]
)
result["log_pred_profs"] = np.empty(
(num_examples,) + batch_result["log_pred_profs"].shape[1:]
)
result["true_counts"] = np.empty(
(num_examples,) + batch_result["true_counts"].shape[1:]
)
result["log_pred_counts"] = np.empty(
(num_examples,) + batch_result["log_pred_counts"].shape[1:]
)
if return_losses:
result["prof_losses"] = np.empty(num_examples)
result["count_losses"] = np.empty(num_examples)
result["att_losses"] = np.empty(num_examples)
if return_gradients:
result["input_seqs"] = np.empty(
(num_examples,) + batch_result["input_seqs"].shape[1:]
)
result["input_grads"] = np.empty(
(num_examples,) + batch_result["input_grads"].shape[1:]
)
first_batch = False
result["true_profs"][batch_slice] = batch_result["true_profs"]
result["log_pred_profs"][batch_slice] = batch_result["log_pred_profs"]
result["true_counts"][batch_slice] = batch_result["true_counts"]
result["log_pred_counts"][batch_slice] = batch_result["log_pred_counts"]
if return_losses:
result["prof_losses"][batch_slice] = batch_result["prof_losses"]
result["count_losses"][batch_slice] = batch_result["count_losses"]
result["att_losses"][batch_slice] = batch_result["att_losses"]
if return_gradients:
result["input_seqs"][batch_slice] = batch_result["input_seqs"]
result["input_grads"][batch_slice] = batch_result["input_grads"]
return result
def get_binary_model_predictions(
model, bins, input_func, fourier_att_prior_freq_limit=150,
fourier_att_prior_freq_limit_softness=0.2, att_prior_grad_smooth_sigma=3,
return_losses=False, return_gradients=False, batch_size=128,
show_progress=False
):
"""
Fetches the necessary data from the given bin indices and runs it through a
binary model.
Arguments:
`model`: a trained `BinaryPredictor`,
`bins`: an N-array of bin indices to compute outputs for
`input_func`: a function that takes in `bins` and returns the B x I x 4
array of one-hot sequences, the B x T array of output values, and
B x 3 array of underlying coordinates for the input sequence
`fourier_att_prior_freq_limit`: limit for frequencies in Fourier prior
loss
`fourier_att_prior_freq_limit_softness`: degree of softness for limit
`att_prior_grad_smooth_sigma`: width of smoothing kernel for gradients
`return_losses`: if True, compute/return the loss values
`return_gradients`: if True, compute/return the input gradients and
sequences
`batch_size`: batch size to use for prediction
`show_progress`: whether or not to show progress bar over batches
Returns a dictionary of the following structure:
true_vals: true binary values (N x T)
pred_vals: predicted probabilities (N x T)
coords: coordinates used for prediction (N x 3 object array)
corr_losses: correctness losses (N-array) if `return_losses` is True
att_losses: prior losses (N-array), if `return_losses` is True
input_seqs: one-hot input sequences (N x I x 4), if `return_gradients`
is true
input_grads: "hypothetical" input gradients (N x I x 4), if
`return_gradients` is true
"""
result = {}
num_examples = len(bins)
num_batches = int(np.ceil(num_examples / batch_size))
t_iter = tqdm.trange(num_batches) if show_progress else range(num_batches)
first_batch = True
for i in t_iter:
batch_slice = slice(i * batch_size, (i + 1) * batch_size)
bins_batch = bins[batch_slice]
batch_result = _get_binary_model_predictions_batch(
model, bins_batch, input_func,
fourier_att_prior_freq_limit=fourier_att_prior_freq_limit,
fourier_att_prior_freq_limit_softness=fourier_att_prior_freq_limit_softness,
att_prior_grad_smooth_sigma=att_prior_grad_smooth_sigma,
return_losses=return_losses, return_gradients=return_gradients
)
if first_batch:
# Allocate arrays of the same size, but holding all examples
result["true_vals"] = np.empty(
(num_examples,) + batch_result["true_vals"].shape[1:]
)
result["pred_vals"] = np.empty(
(num_examples,) + batch_result["pred_vals"].shape[1:]
)
result["coords"] = np.empty((num_examples, 3), dtype=object)
if return_losses:
result["corr_losses"] = np.empty(num_examples)
result["att_losses"] = np.empty(num_examples)
if return_gradients:
result["input_seqs"] = np.empty(
(num_examples,) + batch_result["input_seqs"].shape[1:]
)
result["input_grads"] = np.empty(
(num_examples,) + batch_result["input_grads"].shape[1:]
)
first_batch = False
result["true_vals"][batch_slice] = batch_result["true_vals"]
result["pred_vals"][batch_slice] = batch_result["pred_vals"]
result["coords"][batch_slice] = batch_result["coords"]
if return_losses:
result["corr_losses"][batch_slice] = batch_result["corr_losses"]
result["att_losses"][batch_slice] = batch_result["att_losses"]
if return_gradients:
result["input_seqs"][batch_slice] = batch_result["input_seqs"]
result["input_grads"][batch_slice] = batch_result["input_grads"]
return result
if __name__ == "__main__":
reference_fasta = "/users/amtseng/genomes/hg38.fasta"
chrom_set = ["chr21"]
print("Testing profile model")
input_length = 1346
profile_length = 1000
controls = "matched"
num_tasks = 4
files_spec_path = "/users/amtseng/att_priors/data/processed/ENCODE_TFChIP/profile/config/SPI1/SPI1_training_paths.json"
model_class = profile_models.ProfilePredictorWithMatchedControls
model_path = "/users/amtseng/att_priors/models/trained_models/profile/SPI1/1/model_ckpt_epoch_1.pt"
input_func = data_loading.get_profile_input_func(
files_spec_path, input_length, profile_length, reference_fasta,
)
pos_coords = data_loading.get_positive_profile_coords(
files_spec_path, chrom_set=chrom_set
)
print("Loading model...")
torch.set_grad_enabled(True)
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
model = model_util.restore_model(model_class, model_path)
model.eval()
model = model.to(device)
print("Running predictions...")
x = get_profile_model_predictions(
model, pos_coords, num_tasks, input_func, controls=controls,
return_losses=True, return_gradients=True, show_progress=True
)
print("")
print("Testing binary model")
input_length = 1000
files_spec_path = "/users/amtseng/att_priors/data/processed/ENCODE_TFChIP/binary/config/SPI1/SPI1_training_paths.json"
model_class = binary_models.BinaryPredictor
model_path = "/users/amtseng/att_priors/models/trained_models/binary/SPI1/1/model_ckpt_epoch_1.pt"
input_func = data_loading.get_binary_input_func(
files_spec_path, input_length, reference_fasta
)
pos_bins = data_loading.get_positive_binary_bins(
files_spec_path, chrom_set=chrom_set
)
print("Loading model...")
torch.set_grad_enabled(True)
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
model = model_util.restore_model(model_class, model_path)
model.eval()
model = model.to(device)
print("Running predictions...")
x = get_binary_model_predictions(
model, pos_bins, input_func, return_losses=True, return_gradients=True,
show_progress=True
)
| 45.774737
| 123
| 0.66495
| 2,898
| 21,743
| 4.701518
| 0.099724
| 0.025835
| 0.033028
| 0.039046
| 0.834422
| 0.793248
| 0.752734
| 0.713835
| 0.697395
| 0.671486
| 0
| 0.006522
| 0.252495
| 21,743
| 474
| 124
| 45.871308
| 0.831785
| 0.349032
| 0
| 0.47931
| 0
| 0.006897
| 0.103058
| 0.029183
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013793
| false
| 0
| 0.024138
| 0
| 0.051724
| 0.024138
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e53e5afbe7fe1556a0bd1e7522da7fbebd4148d8
| 4,480
|
py
|
Python
|
test/test_main.py
|
jenshnielsen/versioningit
|
b575e300ae2ea78e283254537cffd30135ae7fe6
|
[
"MIT"
] | 17
|
2021-07-05T23:41:36.000Z
|
2022-03-10T14:55:24.000Z
|
test/test_main.py
|
jenshnielsen/versioningit
|
b575e300ae2ea78e283254537cffd30135ae7fe6
|
[
"MIT"
] | 20
|
2021-07-05T23:56:09.000Z
|
2022-03-14T13:04:09.000Z
|
test/test_main.py
|
jenshnielsen/versioningit
|
b575e300ae2ea78e283254537cffd30135ae7fe6
|
[
"MIT"
] | 4
|
2021-09-04T13:24:49.000Z
|
2022-03-25T19:44:19.000Z
|
import logging
import os
from pathlib import Path
import subprocess
import sys
from _pytest.capture import CaptureFixture
import pytest
from pytest_mock import MockerFixture
from versioningit.__main__ import main
from versioningit.errors import Error
def test_command(
capsys: CaptureFixture[str], mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch
) -> None:
monkeypatch.setattr(sys, "argv", ["versioningit"])
m = mocker.patch("versioningit.__main__.get_version", return_value="THE VERSION")
spy = mocker.spy(logging, "basicConfig")
main()
m.assert_called_once_with(os.curdir, write=False, fallback=True)
spy.assert_called_once_with(
format="[%(levelname)-8s] %(name)s: %(message)s",
level=logging.WARNING,
)
out, err = capsys.readouterr()
assert out == "THE VERSION\n"
assert err == ""
def test_command_arg(
capsys: CaptureFixture[str], mocker: MockerFixture, tmp_path: Path
) -> None:
m = mocker.patch("versioningit.__main__.get_version", return_value="THE VERSION")
main([str(tmp_path)])
m.assert_called_once_with(str(tmp_path), write=False, fallback=True)
out, err = capsys.readouterr()
assert out == "THE VERSION\n"
assert err == ""
def test_command_write(capsys: CaptureFixture[str], mocker: MockerFixture) -> None:
m = mocker.patch("versioningit.__main__.get_version", return_value="THE VERSION")
main(["--write"])
m.assert_called_once_with(os.curdir, write=True, fallback=True)
out, err = capsys.readouterr()
assert out == "THE VERSION\n"
assert err == ""
def test_command_next_version(
capsys: CaptureFixture[str], mocker: MockerFixture
) -> None:
m = mocker.patch(
"versioningit.__main__.get_next_version", return_value="THE NEXT VERSION"
)
main(["--next-version"])
m.assert_called_once_with(os.curdir)
out, err = capsys.readouterr()
assert out == "THE NEXT VERSION\n"
assert err == ""
def test_command_next_version_arg(
capsys: CaptureFixture[str], mocker: MockerFixture, tmp_path: Path
) -> None:
m = mocker.patch(
"versioningit.__main__.get_next_version", return_value="THE NEXT VERSION"
)
main(["-n", str(tmp_path)])
m.assert_called_once_with(str(tmp_path))
out, err = capsys.readouterr()
assert out == "THE NEXT VERSION\n"
assert err == ""
@pytest.mark.parametrize(
"arg,log_level",
[
("-v", logging.INFO),
("-vv", logging.DEBUG),
("-vvv", logging.DEBUG),
],
)
def test_command_verbose(
capsys: CaptureFixture[str], mocker: MockerFixture, arg: str, log_level: int
) -> None:
m = mocker.patch("versioningit.__main__.get_version", return_value="THE VERSION")
spy = mocker.spy(logging, "basicConfig")
main([arg])
m.assert_called_once_with(os.curdir, write=False, fallback=True)
spy.assert_called_once_with(
format="[%(levelname)-8s] %(name)s: %(message)s",
level=log_level,
)
out, err = capsys.readouterr()
assert out == "THE VERSION\n"
assert err == ""
def test_command_error(
capsys: CaptureFixture[str], mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch
) -> None:
monkeypatch.setattr(sys, "argv", ["versioningit"])
m = mocker.patch(
"versioningit.__main__.get_version", side_effect=Error("Something broke")
)
with pytest.raises(SystemExit) as excinfo:
main()
assert excinfo.value.args == (1,)
m.assert_called_once_with(os.curdir, write=False, fallback=True)
out, err = capsys.readouterr()
assert out == ""
assert err == "versioningit: Error: Something broke\n"
def test_command_subprocess_error(
caplog: pytest.LogCaptureFixture,
capsys: CaptureFixture[str],
mocker: MockerFixture,
monkeypatch: pytest.MonkeyPatch,
) -> None:
monkeypatch.setattr(sys, "argv", ["versioningit"])
m = mocker.patch(
"versioningit.__main__.get_version",
side_effect=subprocess.CalledProcessError(
returncode=42, cmd=["git", "-C", ".", "get details"], output=b"", stderr=b""
),
)
with pytest.raises(SystemExit) as excinfo:
main()
assert excinfo.value.args == (42,)
m.assert_called_once_with(os.curdir, write=False, fallback=True)
out, err = capsys.readouterr()
assert out == ""
assert err == ""
assert caplog.record_tuples == [
("versioningit", logging.ERROR, "git -C . 'get details': command returned 42")
]
| 32.230216
| 88
| 0.673661
| 540
| 4,480
| 5.375926
| 0.175926
| 0.041337
| 0.055115
| 0.068894
| 0.774371
| 0.759904
| 0.759904
| 0.749914
| 0.738202
| 0.725801
| 0
| 0.002479
| 0.189509
| 4,480
| 138
| 89
| 32.463768
| 0.797026
| 0
| 0
| 0.508197
| 0
| 0
| 0.16875
| 0.061161
| 0
| 0
| 0
| 0
| 0.237705
| 1
| 0.065574
| false
| 0
| 0.081967
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e5404c05d2371c5c4f196346c816c08400d2dfec
| 55
|
py
|
Python
|
src/ionotomo/tests/test_geometry.py
|
Joshuaalbert/IonoTomo
|
9f50fbac698d43a824dd098d76dce93504c7b879
|
[
"Apache-2.0"
] | 7
|
2017-06-22T08:47:07.000Z
|
2021-07-01T12:33:02.000Z
|
src/ionotomo/tests/test_geometry.py
|
Joshuaalbert/IonoTomo
|
9f50fbac698d43a824dd098d76dce93504c7b879
|
[
"Apache-2.0"
] | 1
|
2019-04-03T15:21:19.000Z
|
2019-04-03T15:48:31.000Z
|
src/ionotomo/tests/test_geometry.py
|
Joshuaalbert/IonoTomo
|
9f50fbac698d43a824dd098d76dce93504c7b879
|
[
"Apache-2.0"
] | 2
|
2020-03-01T16:20:00.000Z
|
2020-07-07T15:09:02.000Z
|
import numpy as np
from ionotomo import *
import os
| 7.857143
| 22
| 0.745455
| 9
| 55
| 4.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236364
| 55
| 6
| 23
| 9.166667
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
008b713d67efa7f6a689a868bbfcf7920ce7c668
| 28
|
py
|
Python
|
python/ql/test/library-tests/PointsTo/new/code/package/module.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/library-tests/PointsTo/new/code/package/module.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/library-tests/PointsTo/new/code/package/module.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
def module(args):
pass
| 7
| 17
| 0.607143
| 4
| 28
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 28
| 3
| 18
| 9.333333
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
008e25d18360eef4eadfd656279caa05e2e43534
| 113
|
py
|
Python
|
tests/conftest.py
|
elaspic/elaspic2-rest-api
|
460315387c5a7ea5a96bece8b6888e0b97af0580
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
elaspic/elaspic2-rest-api
|
460315387c5a7ea5a96bece8b6888e0b97af0580
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
elaspic/elaspic2-rest-api
|
460315387c5a7ea5a96bece8b6888e0b97af0580
|
[
"MIT"
] | null | null | null |
import os
from dotenv import load_dotenv
load_dotenv(dotenv_path=os.getenv("ENV_FILE", ".env"), override=True)
| 18.833333
| 69
| 0.778761
| 18
| 113
| 4.666667
| 0.611111
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097345
| 113
| 5
| 70
| 22.6
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0.106195
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
00e8116f1110d01caf75ccb9eeaf96ec7d7ddc0b
| 9,719
|
py
|
Python
|
tests/test_multipass_vm.py
|
jebeckford/cloudmesh-multipass
|
c8bc14c5093ab599c184b2bf5f934b8a4f2b0791
|
[
"Apache-2.0"
] | null | null | null |
tests/test_multipass_vm.py
|
jebeckford/cloudmesh-multipass
|
c8bc14c5093ab599c184b2bf5f934b8a4f2b0791
|
[
"Apache-2.0"
] | 5
|
2020-02-12T09:10:46.000Z
|
2020-03-04T22:16:35.000Z
|
tests/test_multipass_vm.py
|
jebeckford/cloudmesh-multipass
|
c8bc14c5093ab599c184b2bf5f934b8a4f2b0791
|
[
"Apache-2.0"
] | 14
|
2020-01-29T23:15:48.000Z
|
2020-03-23T03:04:38.000Z
|
###############################################################
# pytest -v --capture=no tests
# pytest -v --capture=no tests/test_multipass_general.py
# pytest -v tests/test_multipass_general.py
# pytest -v --capture=no tests/test_multipass_general.py::Test_Multipass::<METHODNAME>
###############################################################
import pytest
from cloudmesh.common.Shell import Shell
from cloudmesh.common.debug import VERBOSE
from cloudmesh.common.util import HEADING
from cloudmesh.common.Benchmark import Benchmark
from cloudmesh.multipass.Provider import Provider
Benchmark.debug()
cloud= "local"
instance="cloudmesh-test"
@pytest.mark.incremental
class TestMultipass:
vm_name_prefix = "cloudmeshvm" #Note: multipass does not allow - or _ in vm name.
def test_cms_vm(self):
HEADING()
self.provider = Provider()
Benchmark.Start()
result = Shell.execute("cms multipass vm", shell=True)
Benchmark.Stop()
VERBOSE(result)
result = str(result)
assert "18.04" in result
Benchmark.Status(True)
def test_provider_vm(self):
HEADING()
self.provider = Provider()
Benchmark.Start()
result = self.provider.vm()
Benchmark.Stop()
VERBOSE(result)
result = str(result)
assert "18.04" in result
Benchmark.Status(True)
def test_cms_shell(self):
HEADING()
Benchmark.Start()
Shell.execute(f"cms multipass launch --name={instance}", shell=True)
result = Shell.execute(f"cms multipass shell {instance}", shell=True)
Shell.execute(f"cms multipass delete {instance}",shell=True)
Shell.execute(f"cms multipass purge",shell=True)
Benchmark.Stop()
VERBOSE(result)
# assertion missing
Benchmark.Status(True)
def test_provider_shell(self):
HEADING()
Benchmark.Start()
Shell.execute(f"cms multipass launch --name={instance}", shell=True)
result = self.provider.shell(name=instance)
Shell.execute(f"cms multipass delete {instance}",shell=True)
Shell.execute(f"cms multipass purge",shell=True)
Benchmark.Stop()
VERBOSE(result)
# assertion missing
Benchmark.Status(True)
def test_info(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms multipass info", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert result != None, "result cannot be null"
Benchmark.Status(True)
def test_create(self):
HEADING()
vm_name = f"{self.vm_name_prefix}1"
Benchmark.Start()
result = Shell.execute(f"cms multipass create {vm_name}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert f'Launched: {vm_name}' in result, "Error creating instance"
Benchmark.Status(True)
def test_provider_create(self):
HEADING()
self.provider = Provider()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.create(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'Running' in result['status'], "Error creating instance"
Benchmark.Status(True)
def test_create_with_options(self):
HEADING()
vm_name = f"{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass create {vm_name} --cpus=2 --size=3G --image=bionic --mem=1G", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert f'Launched: {vm_name}' in result, "Error creating instance"
Benchmark.Status(True)
def test_stop(self):
HEADING()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass stop {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'Stopped' in result, "Error stopping instance"
Benchmark.Status(True)
def test_provider_stop(self):
HEADING()
self.provider = Provider()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.stop(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'Stopped' in result['status'], "Error stopping instance"
Benchmark.Status(True)
def test_start(self):
HEADING()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass start {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'Running' in result, "Error starting instance"
Benchmark.Status(True)
def test_provider_start(self):
HEADING()
self.provider = Provider()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.start(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'Running' in result['status'], "Error starting instance"
Benchmark.Status(True)
def test_suspend(self):
HEADING()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass suspend {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'Suspended' in result, "Error suspending instance"
Benchmark.Status(True)
def test_provider_suspend(self):
HEADING()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.suspend(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'Suspend' in result['status'], "Error suspending instance"
Benchmark.Status(True)
def test_resume(self):
HEADING()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Shell.execute(f"cms multipass suspend {vm_names}", shell=True)
Benchmark.Start()
result = Shell.execute(f"cms multipass resume {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'Resumed' in result, "Error resuming instance"
Benchmark.Status(True)
def test_provider_resume(self):
HEADING()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Provider.suspend(vm_name)
Benchmark.Start()
result = provider.resume(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'Resume' in result['status'], "Error resuming instance"
Benchmark.Status(True)
def test_reboot(self):
HEADING()
self.provider = Provider()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass reboot {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'Running' in result, "Error rebooting instance"
def test_provider_reboot(self):
HEADING()
self.provider = Provider()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.reboot(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'Running' in result['status'], "Error rebooting instance"
Benchmark.Status(True)
def test_delete(self):
HEADING()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass delete {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'deleted' in result, "Error deleting instance"
Benchmark.Status(True)
def test_provider_delete(self):
HEADING()
self.provider = Provider()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.delete(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'deleted' in result['status'], "Error deleting instance"
Benchmark.Status(True)
def test_destroy(self):
HEADING()
#Using 2 VMs to test_created usingn test_create* methods.
vm_names = f"{self.vm_name_prefix}1,{self.vm_name_prefix}3"
Benchmark.Start()
result = Shell.execute(f"cms multipass destroy {vm_names}", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert 'destroyed' in result, "Error destroying instance"
Benchmark.Status(True)
def test_provider_destroy(self):
HEADING()
self.provider = Provider()
vm_name = f"{self.vm_name_prefix}2"
provider = Provider(vm_name)
Benchmark.Start()
result = provider.destroy(vm_name)
Benchmark.Stop()
VERBOSE(result)
assert 'destroyed' in result['status'], "Error destroying instance"
Benchmark.Status(True)
#
# NOTHING BELOW THIS LINE
#
def test_benchmark(self):
HEADING()
Benchmark.print(csv=True, tag=cloud)
| 27.454802
| 120
| 0.618479
| 1,135
| 9,719
| 5.159471
| 0.096035
| 0.058402
| 0.05123
| 0.065574
| 0.851093
| 0.84375
| 0.798839
| 0.742999
| 0.615096
| 0.536714
| 0
| 0.005872
| 0.264122
| 9,719
| 353
| 121
| 27.532578
| 0.812919
| 0.073361
| 0
| 0.663793
| 0
| 0.00431
| 0.201309
| 0.06037
| 0
| 0
| 0
| 0
| 0.086207
| 1
| 0.099138
| false
| 0.090517
| 0.025862
| 0
| 0.133621
| 0.00431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
da9a2b19cb2fbe247f93d12fbf85877a0ee27508
| 171
|
py
|
Python
|
python/djoro_regulation_server.py
|
damienlaine/djoro-bcvtb
|
84e5a4b46d554c20ac2ffa22383f386b19af8bc5
|
[
"MIT"
] | 2
|
2019-12-15T13:45:54.000Z
|
2021-12-26T00:26:26.000Z
|
python/djoro_regulation_server.py
|
damienlaine/djoro-bcvtb
|
84e5a4b46d554c20ac2ffa22383f386b19af8bc5
|
[
"MIT"
] | null | null | null |
python/djoro_regulation_server.py
|
damienlaine/djoro-bcvtb
|
84e5a4b46d554c20ac2ffa22383f386b19af8bc5
|
[
"MIT"
] | 1
|
2021-12-26T00:26:41.000Z
|
2021-12-26T00:26:41.000Z
|
# -*- coding: utf-8 -*-
from djoro_regulation_server.djoro_regulation_server import DjoroRegulationServer
djoro = DjoroRegulationServer()
djoro.start("localhost", 9100)
| 24.428571
| 81
| 0.795322
| 18
| 171
| 7.333333
| 0.666667
| 0.227273
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0.093567
| 171
| 6
| 82
| 28.5
| 0.819355
| 0.122807
| 0
| 0
| 0
| 0
| 0.060811
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
daa9314caa642a03d9397c8a05b4f6456c29d69f
| 1,472
|
py
|
Python
|
generateGUID/tests/test_GUID_Bash.py
|
INT-NIT-calcul/generateGUID
|
446aa5e6d4d37a6c41732b63995c9e39e3ce9b1b
|
[
"MIT"
] | null | null | null |
generateGUID/tests/test_GUID_Bash.py
|
INT-NIT-calcul/generateGUID
|
446aa5e6d4d37a6c41732b63995c9e39e3ce9b1b
|
[
"MIT"
] | 1
|
2020-01-07T13:51:59.000Z
|
2020-01-07T15:58:22.000Z
|
generateGUID/tests/test_GUID_Bash.py
|
INT-NIT-calcul/generateGUID
|
446aa5e6d4d37a6c41732b63995c9e39e3ce9b1b
|
[
"MIT"
] | 3
|
2019-10-04T09:03:34.000Z
|
2019-10-28T15:22:45.000Z
|
# coding: utf-8
from guid_core.generate_GUID import generate_GUID
import unittest
GUID = generate_GUID("Jean-Michel"+"Frégnac"+"22/03/1949"+"M")
class TestUM(unittest.TestCase):
def setUp(self):
pass
def test_guid(self):
key = "Jean-Michel"+"Frégnac"+"22/03/1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid1(self):
key = "Jean-michel"+"Frégnac"+"22/03/1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid2(self):
key = "Jean-michel"+"Fregnac"+"22/03/1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid3(self):
key = "Jean michel"+"Frégnac"+"22/03/1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid4(self):
key = "Jean-Michel"+"Frégnac"+r"22\03\1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid5(self):
key = "Jean-michel"+"Fregnac"+"22-03-1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid6(self):
key = "Jean michel"+"Frégnac"+"22 03 1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid7(self):
key = "jean-michel"+"frégnac"+"22/03/1949"+"M"
self.assertEqual(generate_GUID(key), GUID)
def test_guid8(self):
key = "Jean michel"+"Frégnac"+"22031949"+"M"
self.assertEqual(generate_GUID(key), GUID)
if __name__ == '__main__':
unittest.main()
| 23
| 62
| 0.620924
| 197
| 1,472
| 4.48731
| 0.203046
| 0.162896
| 0.081448
| 0.091629
| 0.765837
| 0.711538
| 0.711538
| 0.642534
| 0.642534
| 0.642534
| 0
| 0.07679
| 0.212636
| 1,472
| 63
| 63
| 23.365079
| 0.685936
| 0.008832
| 0
| 0.314286
| 1
| 0
| 0.203157
| 0
| 0.257143
| 0
| 0
| 0
| 0.257143
| 1
| 0.285714
| false
| 0.028571
| 0.057143
| 0
| 0.371429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dad0ed184dabd9b1844b79fe42abb7b917c7763b
| 3,814
|
py
|
Python
|
code/main.py
|
jiaxx/temporal_learning_paper
|
abffd5bfb36aaad7139485a9b8bd29f3858389e8
|
[
"MIT"
] | null | null | null |
code/main.py
|
jiaxx/temporal_learning_paper
|
abffd5bfb36aaad7139485a9b8bd29f3858389e8
|
[
"MIT"
] | null | null | null |
code/main.py
|
jiaxx/temporal_learning_paper
|
abffd5bfb36aaad7139485a9b8bd29f3858389e8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 4 17:55:20 2020
@author: Xiaoxuan Jia
"""
import numpy as np
import scipy
from scipy.stats import norm
import numpy.random as npr
import random
import utils as ut
import learningutil as lt
def d_prime(CF):
d = []
for i in range(len(CF[1])):
H = CF[i, i]/sum(CF[:,i]) # H = target diagnal/target column
tempCF = scipy.delete(CF, i, 1) # delete the target column
F = sum(tempCF[i,:])/sum(tempCF)
d.append(norm.ppf(H)-norm.ppf(F))
return d
def sample_with_replacement(list):
l = len(list) # the sample needs to be as long as list
r = xrange(l)
_random = random.random
return [list[int(_random()*l)] for i in r] # using
def compute_CM(neuron, meta, obj, s, train, test):
metric_kwargs = {'model_type': 'MCC2'} # multi-class classifier
eval_config = {
'train_q': {'obj': [obj[0], obj[1]]}, # train on all sizes
'test_q': {'obj': [obj[0], obj[1]], 's': [s]}, #'size_range': [1.3],
'npc_train': train, #smaller than total number of samples in each split_by object
'npc_test': test,
'npc_validate': 0,
'num_splits': 100,
'split_by': 'obj',
'metric_screen': 'classifier', # use correlation matrix as classifier
'labelfunc': 'obj',
'metric_kwargs': metric_kwargs,
}
result = ut.compute_metric_base(neuron, meta, eval_config)
# sum of the CMs is equal to npc_test*number of objs
CMs = []
for i in range(eval_config['num_splits']):
temp = np.array(result['result_summary']['cms'])[:,:,i]
CMs.append(lt.normalize_CM(temp))
d = ut.dprime(np.mean(CMs,axis=0))[0]
return CMs, d
def compute_CM_samesize(neuron, meta, obj, s, train, test):
metric_kwargs = {'model_type': 'MCC2'} # multi-class classifier
eval_config = {
'train_q': {'obj': [obj[0], obj[1]], 's': [s]}, # train on particular size
'test_q': {'obj': [obj[0], obj[1]], 's': [s]}, #test on particular size
'npc_train': train, #smaller than total number of samples in each split_by object
'npc_test': test,
'npc_validate': 0,
'num_splits': 100,
'split_by': 'obj',
'metric_screen': 'classifier', # use correlation matrix as classifier
'labelfunc': 'obj',
'metric_kwargs': metric_kwargs,
}
result = ut.compute_metric_base(neuron, meta, eval_config)
# sum of the CMs is equal to npc_test*number of objs
CMs = []
for i in range(eval_config['num_splits']):
temp = np.array(result['result_summary']['cms'])[:,:,i]
CMs.append(lt.normalize_CM(temp))
d = ut.dprime(np.mean(CMs,axis=0))[0]
return CMs, d
def compute_CM_fixed_classifier(neuron, meta, obj, s, train, test):
metric_kwargs = {'model_type': 'MCC2'} # multi-class classifier
eval_config = {
'train_q': {'obj': [obj[0], obj[1]], 'test_phase':['Pre']}, # train on all sizes
'test_q': {'obj': [obj[0], obj[1]], 's': [s], 'test_phase':['Post']}, #'size_range': [1.3],
'npc_train': train, #smaller than total number of samples in each split_by object
'npc_test': test,
'npc_validate': 0,
'num_splits': 100,
'split_by': 'obj',
'metric_screen': 'classifier', # use correlation matrix as classifier
'labelfunc': 'obj',
'metric_kwargs': metric_kwargs,
}
result = ut.compute_metric_base(neuron, meta, eval_config)
# sum of the CMs is equal to npc_test*number of objs
CMs = []
for i in range(eval_config['num_splits']):
temp = np.array(result['result_summary']['cms'])[:,:,i]
CMs.append(lt.normalize_CM(temp))
d = ut.dprime(np.mean(CMs,axis=0))[0]
return CMs, d
| 34.990826
| 103
| 0.598322
| 552
| 3,814
| 3.987319
| 0.230072
| 0.049069
| 0.019082
| 0.021808
| 0.750568
| 0.750568
| 0.750568
| 0.750568
| 0.749659
| 0.749659
| 0
| 0.017708
| 0.244887
| 3,814
| 109
| 104
| 34.990826
| 0.746528
| 0.217619
| 0
| 0.638554
| 0
| 0
| 0.170897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060241
| false
| 0
| 0.084337
| 0
| 0.204819
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9742ca228af8af48e54daa5ae0d6558ce4105225
| 1,489
|
py
|
Python
|
src/mode_manager.py
|
brulzki/pianopad
|
3e67d9edaf6786876c22917926ef4736691d1653
|
[
"MIT"
] | 3
|
2018-01-04T10:43:49.000Z
|
2021-02-26T23:23:51.000Z
|
src/mode_manager.py
|
brulzki/pianopad
|
3e67d9edaf6786876c22917926ef4736691d1653
|
[
"MIT"
] | 8
|
2017-08-25T02:24:39.000Z
|
2021-10-10T03:49:42.000Z
|
src/mode_manager.py
|
brulzki/pianopad
|
3e67d9edaf6786876c22917926ef4736691d1653
|
[
"MIT"
] | null | null | null |
import os
from mode import Mode
def load_modes():
"""
"""
all_modes = []
for directory in os.listdir(r'modes'):
full_dir = 'modes' + os.sep + directory
if os.path.isdir(full_dir):
all_modes.append(Mode(full_dir))
return all_modes
modes = load_modes()
current_mode_position = 0
current_mode = modes[current_mode_position]
favorites = [None, None]
def cycle_mode(midiout):
global current_mode_position
global current_mode
current_mode_position += 1
if current_mode_position >= len(modes):
current_mode_position = 0
modes[current_mode_position].refresh_background(midiout)
current_mode = modes[current_mode_position]
def next_mode(midiout):
global current_mode_position
global current_mode
if current_mode_position < len(modes)-1:
current_mode_position += 1
modes[current_mode_position].refresh_background(midiout)
current_mode = modes[current_mode_position]
def previous_mode(midiout):
global current_mode_position
global current_mode
if current_mode_position > 0:
current_mode_position -= 1
modes[current_mode_position].refresh_background(midiout)
current_mode = modes[current_mode_position]
def set_mode(midiout, mode):
global current_mode_position
global current_mode
current_mode_position = mode
modes[current_mode_position].refresh_background(midiout)
current_mode = modes[current_mode_position]
| 24.016129
| 64
| 0.724647
| 190
| 1,489
| 5.315789
| 0.189474
| 0.337624
| 0.413861
| 0.261386
| 0.776238
| 0.747525
| 0.655446
| 0.655446
| 0.655446
| 0.644554
| 0
| 0.005887
| 0.201478
| 1,489
| 62
| 65
| 24.016129
| 0.843566
| 0
| 0
| 0.512195
| 0
| 0
| 0.006766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121951
| false
| 0
| 0.04878
| 0
| 0.195122
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9765a88b8c8109d937ec637ddab9f892858f75ae
| 159
|
py
|
Python
|
socialml/__init__.py
|
erees1/socialml
|
76ec276395e7819c6c834f4819ccdf8989aa6cab
|
[
"MIT"
] | null | null | null |
socialml/__init__.py
|
erees1/socialml
|
76ec276395e7819c6c834f4819ccdf8989aa6cab
|
[
"MIT"
] | null | null | null |
socialml/__init__.py
|
erees1/socialml
|
76ec276395e7819c6c834f4819ccdf8989aa6cab
|
[
"MIT"
] | null | null | null |
from socialml.extractors import FbMessenger, IMessage
from socialml.make_dataset import make_training_examples
from socialml.filter_array import filter_array
| 31.8
| 56
| 0.886792
| 21
| 159
| 6.47619
| 0.571429
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08805
| 159
| 4
| 57
| 39.75
| 0.937931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
979c981a445f7850b183db2350040d8d6d5ca0b6
| 2,903
|
py
|
Python
|
tests/test_components_eta_letters.py
|
Robpol86/etaprogress
|
224e8a248c2bf820bad218763281914ad3983fff
|
[
"MIT"
] | 13
|
2015-08-25T05:54:21.000Z
|
2021-03-23T15:56:58.000Z
|
tests/test_components_eta_letters.py
|
Robpol86/etaprogress
|
224e8a248c2bf820bad218763281914ad3983fff
|
[
"MIT"
] | 5
|
2015-03-14T16:31:38.000Z
|
2019-01-13T20:46:25.000Z
|
tests/test_components_eta_letters.py
|
Robpol86/etaprogress
|
224e8a248c2bf820bad218763281914ad3983fff
|
[
"MIT"
] | 5
|
2015-05-31T14:16:50.000Z
|
2021-02-06T11:23:43.000Z
|
from etaprogress.components.eta_conversions import eta_letters
def test():
assert '0s' == eta_letters(0)
assert '9s' == eta_letters(9)
assert '59s' == eta_letters(59)
assert '1m 0s' == eta_letters(60)
assert '1m 1s' == eta_letters(61)
assert '59m 59s' == eta_letters(3599)
assert '1h 0m 0s' == eta_letters(3600)
assert '1h 0m 1s' == eta_letters(3601)
assert '1h 1m 1s' == eta_letters(3661)
assert '6d 23h 59m 59s' == eta_letters(604799)
assert '1w 0d 0h 0m 0s' == eta_letters(604800)
assert '1w 0d 0h 0m 1s' == eta_letters(604801)
def test_leading_zero():
assert '00s' == eta_letters(0, leading_zero=True)
assert '09s' == eta_letters(9, leading_zero=True)
assert '59s' == eta_letters(59, leading_zero=True)
assert '01m 00s' == eta_letters(60, leading_zero=True)
assert '01m 01s' == eta_letters(61, leading_zero=True)
assert '59m 59s' == eta_letters(3599, leading_zero=True)
assert '1h 00m 00s' == eta_letters(3600, leading_zero=True)
assert '1h 00m 01s' == eta_letters(3601, leading_zero=True)
assert '1h 01m 01s' == eta_letters(3661, leading_zero=True)
assert '6d 23h 59m 59s' == eta_letters(604799, leading_zero=True)
assert '1w 0d 0h 00m 00s' == eta_letters(604800, leading_zero=True)
assert '1w 0d 0h 00m 01s' == eta_letters(604801, leading_zero=True)
def test_shortest():
assert '0s' == eta_letters(0, shortest=True)
assert '9s' == eta_letters(9, shortest=True)
assert '59s' == eta_letters(59, shortest=True)
assert '1m' == eta_letters(60, shortest=True)
assert '1m' == eta_letters(61, shortest=True)
assert '59m' == eta_letters(3599, shortest=True)
assert '1h' == eta_letters(3600, shortest=True)
assert '1h' == eta_letters(3601, shortest=True)
assert '1h' == eta_letters(3661, shortest=True)
assert '6d' == eta_letters(604799, shortest=True)
assert '1w' == eta_letters(604800, shortest=True)
assert '1w' == eta_letters(604801, shortest=True)
def test_shortest_and_leading_zero():
assert '00s' == eta_letters(0, shortest=True, leading_zero=True)
assert '09s' == eta_letters(9, shortest=True, leading_zero=True)
assert '59s' == eta_letters(59, shortest=True, leading_zero=True)
assert '01m' == eta_letters(60, shortest=True, leading_zero=True)
assert '01m' == eta_letters(61, shortest=True, leading_zero=True)
assert '59m' == eta_letters(3599, shortest=True, leading_zero=True)
assert '1h' == eta_letters(3600, shortest=True, leading_zero=True)
assert '1h' == eta_letters(3601, shortest=True, leading_zero=True)
assert '1h' == eta_letters(3661, shortest=True, leading_zero=True)
assert '6d' == eta_letters(604799, shortest=True, leading_zero=True)
assert '1w' == eta_letters(604800, shortest=True, leading_zero=True)
assert '1w' == eta_letters(604801, shortest=True, leading_zero=True)
| 39.22973
| 72
| 0.689287
| 423
| 2,903
| 4.539007
| 0.106383
| 0.255208
| 0.1875
| 0.240625
| 0.793229
| 0.648958
| 0.539583
| 0.507292
| 0.165104
| 0
| 0
| 0.121717
| 0.173614
| 2,903
| 73
| 73
| 39.767123
| 0.678616
| 0
| 0
| 0
| 0
| 0
| 0.086807
| 0
| 0
| 0
| 0
| 0
| 0.90566
| 1
| 0.075472
| true
| 0
| 0.018868
| 0
| 0.09434
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
97cae70ef775586d562727daf919a47339bd2ec9
| 79
|
py
|
Python
|
project/settings/common.py
|
teracyhq-incubator/django-boilerplate
|
827ace7d3a89caab9c3bba4da7c31f3daef58e2f
|
[
"BSD-3-Clause"
] | 1
|
2018-01-11T14:20:56.000Z
|
2018-01-11T14:20:56.000Z
|
project/settings/common.py
|
teracyhq-incubator/django-boilerplate
|
827ace7d3a89caab9c3bba4da7c31f3daef58e2f
|
[
"BSD-3-Clause"
] | null | null | null |
project/settings/common.py
|
teracyhq-incubator/django-boilerplate
|
827ace7d3a89caab9c3bba4da7c31f3daef58e2f
|
[
"BSD-3-Clause"
] | 2
|
2018-09-29T05:28:20.000Z
|
2019-07-10T17:47:45.000Z
|
"""
common specific project settings
"""
from settings.common import * # noqa
| 15.8
| 37
| 0.721519
| 9
| 79
| 6.333333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 79
| 4
| 38
| 19.75
| 0.863636
| 0.481013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8ae2b34490cd980519864c061431753907a158bc
| 43
|
py
|
Python
|
nautobot_chatops_ipfabric/api/__init__.py
|
justinjeffery-ipf/nautobot-plugin-chatops-ipfabric
|
67e58e3d251b41227808cabd6120d78411193863
|
[
"Apache-2.0"
] | 6
|
2021-11-26T15:50:21.000Z
|
2022-01-25T18:36:44.000Z
|
nautobot_chatops_ipfabric/api/__init__.py
|
justinjeffery-ipf/nautobot-plugin-chatops-ipfabric
|
67e58e3d251b41227808cabd6120d78411193863
|
[
"Apache-2.0"
] | 21
|
2021-11-30T02:31:40.000Z
|
2022-02-17T04:17:36.000Z
|
nautobot_chatops_ipfabric/api/__init__.py
|
justinjeffery-ipf/nautobot-plugin-chatops-ipfabric
|
67e58e3d251b41227808cabd6120d78411193863
|
[
"Apache-2.0"
] | 2
|
2022-01-18T17:53:29.000Z
|
2022-02-16T16:06:15.000Z
|
"""REST API module for ipfabric plugin."""
| 21.5
| 42
| 0.697674
| 6
| 43
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 43
| 1
| 43
| 43
| 0.810811
| 0.837209
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c11cd35941eddb0fe50f15c9a5f57ed3519a55de
| 118
|
bzl
|
Python
|
pesto/nil/sycl/platform.bzl
|
quantapix/semtools
|
dce8840adc86e6a9672447aace969d37e236f922
|
[
"MIT"
] | null | null | null |
pesto/nil/sycl/platform.bzl
|
quantapix/semtools
|
dce8840adc86e6a9672447aace969d37e236f922
|
[
"MIT"
] | null | null | null |
pesto/nil/sycl/platform.bzl
|
quantapix/semtools
|
dce8840adc86e6a9672447aace969d37e236f922
|
[
"MIT"
] | null | null | null |
def sycl_library_path(name):
return "lib/lib{}.so".format(name)
def readlink_command():
return "readlink"
| 13.111111
| 38
| 0.686441
| 16
| 118
| 4.875
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 118
| 8
| 39
| 14.75
| 0.795918
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c12477fb1b7e78f0ce243f4efb53297ce9171583
| 56
|
py
|
Python
|
api/__init__.py
|
rsoorajs/stream-cloud
|
e285f9d4cb3f12dc8e22584fc8948a02f5f035dd
|
[
"MIT"
] | 43
|
2021-10-30T08:18:11.000Z
|
2022-03-22T07:33:04.000Z
|
api/__init__.py
|
Artinfee/stream-cloud
|
c3469d43542bb97261e4884297cffe87c4d68e7a
|
[
"MIT"
] | 4
|
2021-11-15T14:24:48.000Z
|
2022-03-19T21:24:03.000Z
|
api/__init__.py
|
Artinfee/stream-cloud
|
c3469d43542bb97261e4884297cffe87c4d68e7a
|
[
"MIT"
] | 113
|
2021-10-30T06:45:59.000Z
|
2022-03-31T15:52:53.000Z
|
from .router import Router
from .telegram import Client
| 18.666667
| 28
| 0.821429
| 8
| 56
| 5.75
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 3
| 28
| 18.666667
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c13289691b24ba81cea60dd38f57c281bc585030
| 41
|
py
|
Python
|
tests/__init__.py
|
ojengwa/interledger
|
ccc24970a2ec7bd075d99efe0a18cf9922556605
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
ojengwa/interledger
|
ccc24970a2ec7bd075d99efe0a18cf9922556605
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
ojengwa/interledger
|
ccc24970a2ec7bd075d99efe0a18cf9922556605
|
[
"MIT"
] | null | null | null |
"""Unit test package for interledger."""
| 20.5
| 40
| 0.707317
| 5
| 41
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 1
| 41
| 41
| 0.805556
| 0.829268
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c17a4b8f7ae47f60b652c7b8406ebb869e8e9de8
| 127
|
py
|
Python
|
python/testData/intentions/googleNoReturnSectionForInit_after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/intentions/googleNoReturnSectionForInit_after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/intentions/googleNoReturnSectionForInit_after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class C:
def __init__(self, x, y):
"""
Args:
x:
y:
"""
return None
| 14.111111
| 29
| 0.307087
| 12
| 127
| 2.916667
| 0.833333
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.574803
| 127
| 9
| 30
| 14.111111
| 0.648148
| 0.149606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
c1ad744702da335ac7f00cac8f066b714404ae29
| 160
|
py
|
Python
|
src/cookbook/ingredients/admin.py
|
miguelzetina/python-graphene-initial
|
e6823ee4a7b2f72ebf592478966cd25339861019
|
[
"MIT"
] | null | null | null |
src/cookbook/ingredients/admin.py
|
miguelzetina/python-graphene-initial
|
e6823ee4a7b2f72ebf592478966cd25339861019
|
[
"MIT"
] | 2
|
2020-06-05T19:17:52.000Z
|
2021-06-10T20:55:11.000Z
|
src/cookbook/ingredients/admin.py
|
miguelzetina/python-graphene-initial
|
e6823ee4a7b2f72ebf592478966cd25339861019
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from cookbook.ingredients.models import Category, Ingredient
admin.site.register(Category)
admin.site.register(Ingredient)
| 17.777778
| 60
| 0.83125
| 20
| 160
| 6.65
| 0.6
| 0.135338
| 0.255639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 160
| 8
| 61
| 20
| 0.917241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c1c8130de3f0e9007575af26bb261e2e6b5a4473
| 98
|
py
|
Python
|
EduData/__init__.py
|
BAOOOOOM/EduData
|
affa465779cb94db00ed19291f8411229d342c0f
|
[
"Apache-2.0"
] | 98
|
2019-07-05T03:27:36.000Z
|
2022-03-30T08:38:09.000Z
|
EduData/__init__.py
|
BAOOOOOM/EduData
|
affa465779cb94db00ed19291f8411229d342c0f
|
[
"Apache-2.0"
] | 45
|
2020-12-25T03:49:43.000Z
|
2021-11-26T09:45:42.000Z
|
EduData/__init__.py
|
BAOOOOOM/EduData
|
affa465779cb94db00ed19291f8411229d342c0f
|
[
"Apache-2.0"
] | 50
|
2019-08-17T05:11:15.000Z
|
2022-03-29T07:54:13.000Z
|
# coding: utf-8
# create by tongshiwei on 2019/7/2
from .DataSet import get_data, list_resources
| 19.6
| 45
| 0.765306
| 17
| 98
| 4.294118
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 0.153061
| 98
| 4
| 46
| 24.5
| 0.795181
| 0.469388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c1c838280a76d5cdcedce820d891666a71f259f6
| 49
|
py
|
Python
|
tests/__init__.py
|
mlf-core/system-intelligence
|
f1241dbb9783f6277c7f22327a532486b9876263
|
[
"Apache-2.0"
] | 6
|
2020-06-23T10:41:17.000Z
|
2021-08-09T07:02:50.000Z
|
tests/__init__.py
|
mlf-core/system-intelligence
|
f1241dbb9783f6277c7f22327a532486b9876263
|
[
"Apache-2.0"
] | 161
|
2020-06-12T14:53:37.000Z
|
2022-03-31T21:02:07.000Z
|
tests/__init__.py
|
mlf-core/system-intelligence
|
f1241dbb9783f6277c7f22327a532486b9876263
|
[
"Apache-2.0"
] | null | null | null |
"""Unit test package for system_intelligence."""
| 24.5
| 48
| 0.755102
| 6
| 49
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 49
| 1
| 49
| 49
| 0.818182
| 0.857143
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a9b40805073bb7b510cea719d103112923f88822
| 548
|
py
|
Python
|
main.py
|
kewlamogh/theamogh
|
e1caaa808a6a2fbc4eb2be9dd0fc1433a7f59691
|
[
"MIT"
] | null | null | null |
main.py
|
kewlamogh/theamogh
|
e1caaa808a6a2fbc4eb2be9dd0fc1433a7f59691
|
[
"MIT"
] | null | null | null |
main.py
|
kewlamogh/theamogh
|
e1caaa808a6a2fbc4eb2be9dd0fc1433a7f59691
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template
from os import system as sys
app = Flask('app', template_folder = "pages", static_folder = 'static')
@app.route('/')
def home():
return render_template("not-article/home.html")
@app.route('/halo5tips')
def htips():
return render_template("articles/halo5tips.html")
@app.route('/humpty')
def humpty():
return render_template("articles/humptydumpty.html")
@app.route('/pytutorial')
def pytut():
return render_template("articles/python-tutorial.html")
app.run(host='0.0.0.0', port=8080)
sys('clear')
| 27.4
| 71
| 0.729927
| 77
| 548
| 5.103896
| 0.454545
| 0.178117
| 0.203562
| 0.21374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020202
| 0.096715
| 548
| 20
| 72
| 27.4
| 0.773737
| 0
| 0
| 0
| 0
| 0
| 0.28051
| 0.180328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235294
| false
| 0
| 0.117647
| 0.235294
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
a9b560cacaa3451173cc2b804c8098f2521c9235
| 14
|
py
|
Python
|
test1.py
|
Arik5050/PyDjangoApi
|
b2e5781ecdfab18c7da520020734f1f9da5ad0b1
|
[
"MIT"
] | null | null | null |
test1.py
|
Arik5050/PyDjangoApi
|
b2e5781ecdfab18c7da520020734f1f9da5ad0b1
|
[
"MIT"
] | null | null | null |
test1.py
|
Arik5050/PyDjangoApi
|
b2e5781ecdfab18c7da520020734f1f9da5ad0b1
|
[
"MIT"
] | null | null | null |
print("ffff")
| 7
| 13
| 0.642857
| 2
| 14
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 14
| 1
| 14
| 14
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e71affbfecf71180074bcb762037b7513dacf9f9
| 118
|
py
|
Python
|
testing/test_gcd.py
|
pcordemans/algorithms_examples
|
ce49fc1c1fc9ad02c8bd169051a22dd5b98a7ab4
|
[
"Apache-2.0"
] | null | null | null |
testing/test_gcd.py
|
pcordemans/algorithms_examples
|
ce49fc1c1fc9ad02c8bd169051a22dd5b98a7ab4
|
[
"Apache-2.0"
] | null | null | null |
testing/test_gcd.py
|
pcordemans/algorithms_examples
|
ce49fc1c1fc9ad02c8bd169051a22dd5b98a7ab4
|
[
"Apache-2.0"
] | null | null | null |
from gcd import gcd
def test_gcd():
assert gcd(3,6) == 3
assert gcd(1,2) == 1
assert gcd(12,18) == 6
| 19.666667
| 31
| 0.550847
| 22
| 118
| 2.909091
| 0.545455
| 0.421875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 0.29661
| 118
| 6
| 31
| 19.666667
| 0.638554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e721ab4aa94c9d49792a8e9ca2de72e193c24b56
| 338
|
py
|
Python
|
Treasuregram/main_app/models.py
|
Khalil71/Treasurers-
|
a22ee48480b18789e78e69cbc61eea386c7cea85
|
[
"MIT"
] | null | null | null |
Treasuregram/main_app/models.py
|
Khalil71/Treasurers-
|
a22ee48480b18789e78e69cbc61eea386c7cea85
|
[
"MIT"
] | null | null | null |
Treasuregram/main_app/models.py
|
Khalil71/Treasurers-
|
a22ee48480b18789e78e69cbc61eea386c7cea85
|
[
"MIT"
] | null | null | null |
from django.db import models
class Treasure(models.Model):
name = models.CharField(max_length=100)
value = models.DecimalField(max_digits=10, decimal_places=2)
material = models.CharField(max_length=100)
location = models.CharField(max_length=100)
img_url = models.CharField(max_length=100)
def __str__(self):
return self.name
| 28.166667
| 61
| 0.784024
| 49
| 338
| 5.183673
| 0.571429
| 0.23622
| 0.283465
| 0.377953
| 0.425197
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049834
| 0.109467
| 338
| 11
| 62
| 30.727273
| 0.79402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0.111111
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e73f383404b47e76efcb466c9786fbe85f67697e
| 195
|
py
|
Python
|
pcd_loader.py
|
ruc98/3D_PointClouds2
|
bf1ed31d895be6616992b9a35697ce762d102aec
|
[
"MIT"
] | 4
|
2019-08-20T09:38:50.000Z
|
2021-02-24T14:54:11.000Z
|
pcd_loader.py
|
ruc98/3D_PointClouds2
|
bf1ed31d895be6616992b9a35697ce762d102aec
|
[
"MIT"
] | 1
|
2021-02-08T10:17:25.000Z
|
2021-03-29T03:10:36.000Z
|
pcd_loader.py
|
ruc98/3D_PointClouds2
|
bf1ed31d895be6616992b9a35697ce762d102aec
|
[
"MIT"
] | 7
|
2019-03-29T21:05:05.000Z
|
2021-03-12T01:48:59.000Z
|
from pypcd import pypcd
pc = pypcd.PointCloud.from_path('/home/rahulchakwate/My_tensorflow/3D_Object_Segmentation/PointNet_Implementation/Edge_Extraction-master/ArtificialPointClouds/bunny.pcd')
| 65
| 170
| 0.876923
| 24
| 195
| 6.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005319
| 0.035897
| 195
| 2
| 171
| 97.5
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0.692308
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e740e20a26c5e948135fef39c05049d05cc7ff8e
| 211
|
py
|
Python
|
tests/test_asingleton.py
|
guallo/asingleton
|
03421633e2f17586584dbdf2a010449ffed7f675
|
[
"MIT"
] | null | null | null |
tests/test_asingleton.py
|
guallo/asingleton
|
03421633e2f17586584dbdf2a010449ffed7f675
|
[
"MIT"
] | null | null | null |
tests/test_asingleton.py
|
guallo/asingleton
|
03421633e2f17586584dbdf2a010449ffed7f675
|
[
"MIT"
] | null | null | null |
import unittest
import doctest
import asingleton.asingleton
def load_tests(loader, standard_tests, pattern):
standard_tests.addTests(doctest.DocTestSuite(asingleton.asingleton))
return standard_tests
| 21.1
| 72
| 0.824645
| 24
| 211
| 7.083333
| 0.541667
| 0.229412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113744
| 211
| 9
| 73
| 23.444444
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e79f471e20d608a2f436afca9cf9c16c22f636c0
| 113
|
py
|
Python
|
Chess/controller.py
|
surajsinghbisht054/GameOfChess
|
47edfa3fd4cc33fce9f2b9bc62e04e9ff93e5489
|
[
"Apache-2.0"
] | null | null | null |
Chess/controller.py
|
surajsinghbisht054/GameOfChess
|
47edfa3fd4cc33fce9f2b9bc62e04e9ff93e5489
|
[
"Apache-2.0"
] | null | null | null |
Chess/controller.py
|
surajsinghbisht054/GameOfChess
|
47edfa3fd4cc33fce9f2b9bc62e04e9ff93e5489
|
[
"Apache-2.0"
] | 1
|
2019-08-30T13:51:18.000Z
|
2019-08-30T13:51:18.000Z
|
import model
class Controller():
def __init__(self):
pass
def init_model(self):
self.model=model.Model()
| 12.555556
| 26
| 0.716814
| 16
| 113
| 4.75
| 0.5
| 0.184211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159292
| 113
| 8
| 27
| 14.125
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
e7c9eb1c3dd2ee80d516670e2775e2dccb5e03de
| 10,747
|
py
|
Python
|
tests/geo/test_quat.py
|
bigblindbais/pytk
|
7e21604ba9b4fc869949d3d7da845d98c480d872
|
[
"MIT"
] | null | null | null |
tests/geo/test_quat.py
|
bigblindbais/pytk
|
7e21604ba9b4fc869949d3d7da845d98c480d872
|
[
"MIT"
] | null | null | null |
tests/geo/test_quat.py
|
bigblindbais/pytk
|
7e21604ba9b4fc869949d3d7da845d98c480d872
|
[
"MIT"
] | null | null | null |
import unittest
import pytk.geo as geo
class GeoQuatTest(unittest.TestCase):
def test_shape(self):
self.assertRaises(geo.GeoException, geo.quat, [])
self.assertRaises(geo.GeoException, geo.quat, [0])
self.assertRaises(geo.GeoException, geo.quat, [0, 1])
self.assertRaises(geo.GeoException, geo.quat, [0, 1, 2])
self.assertRaises(geo.GeoException, geo.quat, [0, 1, 2, 3, 4])
self.assertRaises(geo.GeoException, geo.quat, [0, 1, 2, 3, 4, 5])
self.assertRaises(geo.GeoException, geo.quat, [0, 1, 2, 3, 4, 5, 6])
self.assertRaises(geo.GeoException, geo.quat, [0, 1, 2, 3, 4, 5, 6, 7])
def test_equality(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual(w, w)
self.assertEqual(x, x)
self.assertEqual(y, y)
self.assertEqual(z, z)
self.assertNotEqual(w, x)
self.assertNotEqual(w, y)
self.assertNotEqual(w, z)
self.assertNotEqual(x, y)
self.assertNotEqual(x, z)
self.assertNotEqual(y, z)
def test_add_sub(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual( w + x + y , geo.quat([1, 1, 1, 0]))
self.assertEqual( w + x + z, geo.quat([1, 1, 0, 1]))
self.assertEqual( w + y + z, geo.quat([1, 0, 1, 1]))
self.assertEqual( x + y + z, geo.quat([0, 1, 1, 1]))
self.assertEqual( w - x + y , geo.quat([ 1, -1, 1, 0]))
self.assertEqual( w - x + z, geo.quat([ 1, -1, 0, 1]))
self.assertEqual( w - y + z, geo.quat([ 1, 0, -1, 1]))
self.assertEqual( x - y + z, geo.quat([ 0, 1, -1, 1]))
self.assertEqual(- w + x - y , geo.quat([-1, 1, -1, 0]))
self.assertEqual(- w + x - z, geo.quat([-1, 1, 0, -1]))
self.assertEqual(- w + y - z, geo.quat([-1, 0, 1, -1]))
self.assertEqual( - x + y - z, geo.quat([ 0, -1, 1, -1]))
def test_neg(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual(w - x, w + (-x))
self.assertEqual(w - y, w + (-y))
self.assertEqual(w - z, w + (-z))
self.assertEqual(x - y, x + (-y))
self.assertEqual(x - z, x + (-z))
self.assertEqual(y - z, y + (-z))
def test_pow(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertAlmostEqual(w ** 2, 1)
self.assertAlmostEqual(x ** 2, 1)
self.assertAlmostEqual(y ** 2, 1)
self.assertAlmostEqual(z ** 2, 1)
self.assertAlmostEqual(( w - x + y ) ** 2, 3)
self.assertAlmostEqual(( w - x + z) ** 2, 3)
self.assertAlmostEqual(( w - y + z) ** 2, 3)
self.assertAlmostEqual(( x - y + z) ** 2, 3)
self.assertAlmostEqual((- w + x - y ) ** 2, 3)
self.assertAlmostEqual((- w + x - z) ** 2, 3)
self.assertAlmostEqual((- w + y - z) ** 2, 3)
self.assertAlmostEqual(( - x + y - z) ** 2, 3)
def test_conj(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual(w + w.conj, geo.quat([2, 0, 0, 0]))
self.assertEqual(x + x.conj, geo.quat([0, 0, 0, 0]))
self.assertEqual(y + y.conj, geo.quat([0, 0, 0, 0]))
self.assertEqual(z + z.conj, geo.quat([0, 0, 0, 0]))
def test_inv(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual(w * w.inv, geo.quat([1, 0, 0, 0]))
self.assertEqual(w.inv * w, geo.quat([1, 0, 0, 0]))
self.assertEqual(x * x.inv, geo.quat([1, 0, 0, 0]))
self.assertEqual(x.inv * x, geo.quat([1, 0, 0, 0]))
self.assertEqual(y * y.inv, geo.quat([1, 0, 0, 0]))
self.assertEqual(y.inv * y, geo.quat([1, 0, 0, 0]))
self.assertEqual(z * z.inv, geo.quat([1, 0, 0, 0]))
self.assertEqual(z.inv * z, geo.quat([1, 0, 0, 0]))
def test_normal(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertAlmostEqual((w ).normal ** 2, 1)
self.assertAlmostEqual(( x ).normal ** 2, 1)
self.assertAlmostEqual(( y ).normal ** 2, 1)
self.assertAlmostEqual(( z).normal ** 2, 1)
self.assertAlmostEqual((w + x ).normal ** 2, 1)
self.assertAlmostEqual((w + y ).normal ** 2, 1)
self.assertAlmostEqual((w + z).normal ** 2, 1)
self.assertAlmostEqual(( x + y ).normal ** 2, 1)
self.assertAlmostEqual(( x + z).normal ** 2, 1)
self.assertAlmostEqual(( y + z).normal ** 2, 1)
self.assertAlmostEqual((w + x + y ).normal ** 2, 1)
self.assertAlmostEqual((w + x + z).normal ** 2, 1)
self.assertAlmostEqual((w + y + z).normal ** 2, 1)
self.assertAlmostEqual(( x + y + z).normal ** 2, 1)
self.assertAlmostEqual((w + x + y + z).normal ** 2, 1)
def test_as_rquat(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual((w ).as_rquat, rquat([1, 0, 0, 0]))
self.assertEqual(( x ).as_rquat, rquat([0, 1, 0, 0]))
self.assertEqual(( y ).as_rquat, rquat([0, 0, 1, 0]))
self.assertEqual(( z).as_rquat, rquat([0, 0, 0, 1]))
self.assertEqual((w + x ).as_rquat, rquat([1, 1, 0, 0]))
self.assertEqual((w + y ).as_rquat, rquat([1, 0, 1, 0]))
self.assertEqual((w + z).as_rquat, rquat([1, 0, 0, 1]))
self.assertEqual(( x + y ).as_rquat, rquat([0, 1, 1, 0]))
self.assertEqual(( x + z).as_rquat, rquat([0, 1, 0, 1]))
self.assertEqual(( y + z).as_rquat, rquat([0, 0, 1, 1]))
self.assertEqual((w + x + y ).as_rquat, rquat([1, 1, 1, 0]))
self.assertEqual((w + x + z).as_rquat, rquat([1, 1, 0, 1]))
self.assertEqual((w + y + z).as_rquat, rquat([1, 0, 1, 1]))
self.assertEqual(( x + y + z).as_rquat, rquat([0, 1, 1, 1]))
self.assertEqual((w + x + y + z).as_rquat, rquat([1, 1, 1, 1]))
def test_as_vect(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual((w ).as_vect, vect([0, 0, 0]))
self.assertEqual(( x ).as_vect, vect([1, 0, 0]))
self.assertEqual(( y ).as_vect, vect([0, 1, 0]))
self.assertEqual(( z).as_vect, vect([0, 0, 1]))
self.assertEqual((w + x ).as_vect, vect([1, 0, 0]))
self.assertEqual((w + y ).as_vect, vect([0, 1, 0]))
self.assertEqual((w + z).as_vect, vect([0, 0, 1]))
self.assertEqual(( x + y ).as_vect, vect([1, 1, 0]))
self.assertEqual(( x + z).as_vect, vect([1, 0, 1]))
self.assertEqual(( y + z).as_vect, vect([0, 1, 1]))
self.assertEqual((w + x + y ).as_vect, vect([1, 1, 0]))
self.assertEqual((w + x + z).as_vect, vect([1, 0, 1]))
self.assertEqual((w + y + z).as_vect, vect([0, 1, 1]))
self.assertEqual(( x + y + z).as_vect, vect([1, 1, 1]))
self.assertEqual((w + x + y + z).as_vect, vect([1, 1, 1]))
def test_mul(self):
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual( w * x , geo.quat([ 0, 1, 0, 0]))
self.assertEqual( w * y , geo.quat([ 0, 0, 1, 0]))
self.assertEqual( w * z , geo.quat([ 0, 0, 0, 1]))
self.assertEqual( x * y , geo.quat([ 0, 0, 0, 1]))
self.assertEqual( x * z , geo.quat([ 0, 0, -1, 0]))
self.assertEqual( y * z , geo.quat([ 0, 1, 0, 0]))
self.assertEqual( w * x * y , geo.quat([ 0, 0, 0, 1]))
self.assertEqual( w * x * z , geo.quat([ 0, 0, -1, 0]))
self.assertEqual( w * y * z , geo.quat([ 0, 1, 0, 0]))
self.assertEqual( x * y * z , geo.quat([-1, 0, 0, 0]))
self.assertEqual( w * x * y * z , geo.quat([-1, 0, 0, 0]))
self.assertEqual( (w * x) * y * z , geo.quat([-1, 0, 0, 0]))
self.assertEqual( w * (x * y) * z , geo.quat([-1, 0, 0, 0]))
self.assertEqual( w * x * (y * z) , geo.quat([-1, 0, 0, 0]))
self.assertEqual((w * x * y) * (z), geo.quat([-1, 0, 0, 0]))
self.assertEqual((w * x) * (y * z), geo.quat([-1, 0, 0, 0]))
self.assertEqual((w) * (x * y * z), geo.quat([-1, 0, 0, 0]))
self.assertNotEqual(x * y, y * x)
self.assertNotEqual(x * z, z * x)
self.assertNotEqual(y * z, z * y)
def test_rotate(self):
v = geo.vect([0, 1, 2])
w = geo.quat([1, 0, 0, 0])
x = geo.quat([0, 1, 0, 0])
y = geo.quat([0, 0, 1, 0])
z = geo.quat([0, 0, 0, 1])
self.assertEqual( w * v , geo.vect([0, 1, 2]))
self.assertEqual( x * v , geo.vect([0, -1, -2]))
self.assertEqual( y * v , geo.vect([0, 1, -2]))
self.assertEqual( z * v , geo.vect([0, -1, 2]))
self.assertEqual( w * x * v , geo.vect([0, -1, -2]))
self.assertEqual( w * y * v , geo.vect([0, 1, -2]))
self.assertEqual( w * z * v , geo.vect([0, -1, 2]))
self.assertEqual( x * y * v , geo.vect([0, -1, 2]))
self.assertEqual( x * z * v , geo.vect([0, 1, -2]))
self.assertEqual( y * z * v , geo.vect([0, -1, -2]))
self.assertEqual( w * x * y * v , geo.vect([0, -1, 2]))
self.assertEqual( w * x * z * v , geo.vect([0, 1, -2]))
self.assertEqual( w * y * z * v , geo.vect([0, -1, -2]))
self.assertEqual( x * y * z * v , geo.vect([0, 1, 2]))
self.assertEqual( w * x * y * z * v , geo.vect([0, 1, 2]))
| 46.124464
| 79
| 0.456313
| 1,656
| 10,747
| 2.934179
| 0.025966
| 0.055979
| 0.090554
| 0.057419
| 0.889278
| 0.865404
| 0.802017
| 0.72731
| 0.671537
| 0.512245
| 0
| 0.081676
| 0.340374
| 10,747
| 232
| 80
| 46.323276
| 0.603752
| 0
| 0
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.7
| 1
| 0.06
| false
| 0
| 0.01
| 0
| 0.075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
99ccaa99fbe6412e99b1532758fc9507644a067b
| 191
|
py
|
Python
|
src/yellowdog_client/model/double_range.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/double_range.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/double_range.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from typing import Optional
from .range import Range
@dataclass
class DoubleRange(Range):
min: Optional[float] = None
max: Optional[float] = None
| 17.363636
| 33
| 0.753927
| 24
| 191
| 6
| 0.541667
| 0.180556
| 0.236111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17801
| 191
| 10
| 34
| 19.1
| 0.917197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
99d99aa30023ecc85d6d727ce08aaecdfb54f6e4
| 187
|
py
|
Python
|
indonesian_dot/agents/dfs_agent.py
|
Ra-Ni/Indonesian-Dot-Solver
|
2baf507d23816b686f046f89d4c833728b25f2dc
|
[
"MIT"
] | null | null | null |
indonesian_dot/agents/dfs_agent.py
|
Ra-Ni/Indonesian-Dot-Solver
|
2baf507d23816b686f046f89d4c833728b25f2dc
|
[
"MIT"
] | null | null | null |
indonesian_dot/agents/dfs_agent.py
|
Ra-Ni/Indonesian-Dot-Solver
|
2baf507d23816b686f046f89d4c833728b25f2dc
|
[
"MIT"
] | 1
|
2020-03-18T15:23:24.000Z
|
2020-03-18T15:23:24.000Z
|
from . import Agent
class DFSAgent(Agent):
def g(self, n) -> int:
return 0
def h(self, n) -> int:
return 0
def __str__(self) -> str:
return 'dfs'
| 13.357143
| 29
| 0.518717
| 26
| 187
| 3.576923
| 0.576923
| 0.107527
| 0.172043
| 0.301075
| 0.387097
| 0.387097
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.358289
| 187
| 13
| 30
| 14.384615
| 0.758333
| 0
| 0
| 0.25
| 0
| 0
| 0.016043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.375
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
99dde7b3f5fcb0483ace26f84365fe2ed11d4cb8
| 24
|
py
|
Python
|
src/clusto/drivers/locations/racks/__init__.py
|
thekad/clusto
|
c141ea3ef4931c6a21fdf42845c6e9de5ee08caa
|
[
"BSD-3-Clause"
] | 216
|
2015-01-10T17:03:25.000Z
|
2022-03-24T07:23:41.000Z
|
src/clusto/drivers/locations/racks/__init__.py
|
thekad/clusto
|
c141ea3ef4931c6a21fdf42845c6e9de5ee08caa
|
[
"BSD-3-Clause"
] | 23
|
2015-01-08T16:51:22.000Z
|
2021-03-13T12:56:04.000Z
|
src/clusto/drivers/locations/racks/__init__.py
|
thekad/clusto
|
c141ea3ef4931c6a21fdf42845c6e9de5ee08caa
|
[
"BSD-3-Clause"
] | 49
|
2015-01-08T00:13:17.000Z
|
2021-09-22T02:01:20.000Z
|
from basicrack import *
| 12
| 23
| 0.791667
| 3
| 24
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
99ff041d1b8830dbc1ce3b632b548fc922a8713f
| 44
|
py
|
Python
|
qiwi_handler/qiwi_handler/samples/__init__.py
|
bezumnui/qiwi_handler
|
9562b1a8c8fcc1910dbc722278cb6f5af313fa02
|
[
"MIT"
] | null | null | null |
qiwi_handler/qiwi_handler/samples/__init__.py
|
bezumnui/qiwi_handler
|
9562b1a8c8fcc1910dbc722278cb6f5af313fa02
|
[
"MIT"
] | null | null | null |
qiwi_handler/qiwi_handler/samples/__init__.py
|
bezumnui/qiwi_handler
|
9562b1a8c8fcc1910dbc722278cb6f5af313fa02
|
[
"MIT"
] | null | null | null |
from qiwi_handler.samples.checkPay import *
| 22
| 43
| 0.840909
| 6
| 44
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
82061fd674b4b4f92d6437e0a3b7b4fdc08d22e9
| 1,031
|
py
|
Python
|
tests/test_propensity.py
|
yuxinchenNU/causalml
|
b0c280d99c0d5ab70e3b3d1a86c6f9d4170d53b1
|
[
"Apache-2.0"
] | 1
|
2020-08-20T13:58:25.000Z
|
2020-08-20T13:58:25.000Z
|
tests/test_propensity.py
|
yuxinchenNU/causalml
|
b0c280d99c0d5ab70e3b3d1a86c6f9d4170d53b1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_propensity.py
|
yuxinchenNU/causalml
|
b0c280d99c0d5ab70e3b3d1a86c6f9d4170d53b1
|
[
"Apache-2.0"
] | null | null | null |
from causalml.propensity import ElasticNetPropensityModel, GradientBoostedPropensityModel
from causalml.metrics import roc_auc_score
from .const import RANDOM_SEED
def test_elasticnet_propensity_model(generate_regression_data):
y, X, treatment, tau, b, e = generate_regression_data()
pm = ElasticNetPropensityModel(random_state=RANDOM_SEED)
ps = pm.fit_predict(X, treatment)
assert roc_auc_score(treatment, ps) > .5
def test_gradientboosted_propensity_model(generate_regression_data):
y, X, treatment, tau, b, e = generate_regression_data()
pm = GradientBoostedPropensityModel(random_state=RANDOM_SEED)
ps = pm.fit_predict(X, treatment)
assert roc_auc_score(treatment, ps) > .5
def test_gradientboosted_propensity_model_earlystopping(generate_regression_data):
y, X, treatment, tau, b, e = generate_regression_data()
pm = GradientBoostedPropensityModel(random_state=RANDOM_SEED, early_stop=True)
ps = pm.fit_predict(X, treatment)
assert roc_auc_score(treatment, ps) > .5
| 33.258065
| 89
| 0.785645
| 130
| 1,031
| 5.915385
| 0.292308
| 0.140442
| 0.171652
| 0.089727
| 0.724317
| 0.724317
| 0.724317
| 0.724317
| 0.724317
| 0.724317
| 0
| 0.003367
| 0.13579
| 1,031
| 30
| 90
| 34.366667
| 0.859708
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
82211888cd67f8fad5037e098394c479e87b5ab3
| 6,796
|
py
|
Python
|
source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py
|
j-erickson/aws-security-hub-automated-response-and-remediation
|
f1722c00817e1358a1d80272b67fc226f1105965
|
[
"Apache-2.0"
] | 129
|
2020-08-11T18:18:50.000Z
|
2021-10-04T20:00:35.000Z
|
source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py
|
j-erickson/aws-security-hub-automated-response-and-remediation
|
f1722c00817e1358a1d80272b67fc226f1105965
|
[
"Apache-2.0"
] | 39
|
2020-08-11T18:07:58.000Z
|
2021-10-15T16:26:24.000Z
|
source/remediation_runbooks/scripts/test/test_enableautoscalinggroupelbhealthcheck.py
|
j-erickson/aws-security-hub-automated-response-and-remediation
|
f1722c00817e1358a1d80272b67fc226f1105965
|
[
"Apache-2.0"
] | 35
|
2020-08-15T04:57:27.000Z
|
2021-09-21T06:23:17.000Z
|
#!/usr/bin/python
###############################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not #
# use this file except in compliance with the License. A copy of the License #
# is located at #
# #
# http://www.apache.org/licenses/LICENSE-2.0/ #
# #
# or in the "license" file accompanying this file. This file is distributed #
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express #
# or implied. See the License for the specific language governing permis- #
# sions and limitations under the License. #
###############################################################################
import boto3
import json
import botocore.session
from botocore.stub import Stubber
from botocore.config import Config
import pytest
from pytest_mock import mocker
import EnableAutoScalingGroupELBHealthCheck_validate as validate
my_session = boto3.session.Session()
my_region = my_session.region_name
#=====================================================================================
# EnableAutoScalingGroupELBHealthCheck_remediation SUCCESS
#=====================================================================================
def test_validation_success(mocker):
event = {
'SolutionId': 'SO0000',
'SolutionVersion': '1.2.3',
'AsgName': 'my_asg',
'region': my_region
}
good_response = {
"AutoScalingGroups": [
{
"AutoScalingGroupName": "sharr-test-autoscaling-1",
"AutoScalingGroupARN": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785d81e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1",
"LaunchTemplate": {
"LaunchTemplateId": "lt-05ad2fca4f4ea7d2f",
"LaunchTemplateName": "sharrtest",
"Version": "$Default"
},
"MinSize": 0,
"MaxSize": 1,
"DesiredCapacity": 0,
"DefaultCooldown": 300,
"AvailabilityZones": [
"us-east-1b"
],
"LoadBalancerNames": [],
"TargetGroupARNs": [
"arn:aws:elasticloadbalancing:us-east-1:111111111111:targetgroup/WebDemoTarget/fc9a82512b92af62"
],
"HealthCheckType": "ELB",
"HealthCheckGracePeriod": 300,
"Instances": [],
"CreatedTime": "2021-01-27T14:08:16.949000+00:00",
"SuspendedProcesses": [],
"VPCZoneIdentifier": "subnet-86a594ab",
"EnabledMetrics": [],
"Tags": [],
"TerminationPolicies": [
"Default"
],
"NewInstancesProtectedFromScaleIn": False,
"ServiceLinkedRoleARN": "arn:aws:iam::111111111111:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling"
}
]
}
BOTO_CONFIG = Config(
retries ={
'mode': 'standard'
},
region_name=my_region
)
asg_client = botocore.session.get_session().create_client('autoscaling', config=BOTO_CONFIG)
asg_stubber = Stubber(asg_client)
asg_stubber.add_response(
'describe_auto_scaling_groups',
good_response
)
asg_stubber.activate()
mocker.patch('EnableAutoScalingGroupELBHealthCheck_validate.connect_to_autoscaling', return_value=asg_client)
assert validate.verify(event, {}) == {
"response": {
"message": "Autoscaling Group health check type updated to ELB",
"status": "Success"
}
}
asg_stubber.deactivate()
def test_validation_failed(mocker):
event = {
'SolutionId': 'SO0000',
'SolutionVersion': '1.2.3',
'AsgName': 'my_asg',
'region': my_region
}
bad_response = {
"AutoScalingGroups": [
{
"AutoScalingGroupName": "sharr-test-autoscaling-1",
"AutoScalingGroupARN": "arn:aws:autoscaling:us-east-1:111111111111:autoScalingGroup:785d81e1-cd66-435d-96de-d6ed5416defd:autoScalingGroupName/sharr-test-autoscaling-1",
"LaunchTemplate": {
"LaunchTemplateId": "lt-05ad2fca4f4ea7d2f",
"LaunchTemplateName": "sharrtest",
"Version": "$Default"
},
"MinSize": 0,
"MaxSize": 1,
"DesiredCapacity": 0,
"DefaultCooldown": 300,
"AvailabilityZones": [
"us-east-1b"
],
"LoadBalancerNames": [],
"TargetGroupARNs": [
"arn:aws:elasticloadbalancing:us-east-1:111111111111:targetgroup/WebDemoTarget/fc9a82512b92af62"
],
"HealthCheckType": "EC2",
"HealthCheckGracePeriod": 300,
"Instances": [],
"CreatedTime": "2021-01-27T14:08:16.949000+00:00",
"SuspendedProcesses": [],
"VPCZoneIdentifier": "subnet-86a594ab",
"EnabledMetrics": [],
"Tags": [],
"TerminationPolicies": [
"Default"
],
"NewInstancesProtectedFromScaleIn": False,
"ServiceLinkedRoleARN": "arn:aws:iam::111111111111:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling"
}
]
}
BOTO_CONFIG = Config(
retries ={
'mode': 'standard'
},
region_name=my_region
)
asg_client = botocore.session.get_session().create_client('autoscaling', config=BOTO_CONFIG)
asg_stubber = Stubber(asg_client)
asg_stubber.add_response(
'describe_auto_scaling_groups',
bad_response
)
asg_stubber.activate()
mocker.patch('EnableAutoScalingGroupELBHealthCheck_validate.connect_to_autoscaling', return_value=asg_client)
assert validate.verify(event, {}) == {
"response": {
"message": "Autoscaling Group health check type is not ELB",
"status": "Failed"
}
}
asg_stubber.deactivate()
| 39.283237
| 184
| 0.516922
| 504
| 6,796
| 6.849206
| 0.375
| 0.023175
| 0.033604
| 0.04635
| 0.74044
| 0.74044
| 0.74044
| 0.74044
| 0.74044
| 0.74044
| 0
| 0.055101
| 0.33505
| 6,796
| 172
| 185
| 39.511628
| 0.708785
| 0.172749
| 0
| 0.685714
| 0
| 0.028571
| 0.404893
| 0.200147
| 0
| 0
| 0
| 0
| 0.014286
| 1
| 0.014286
| false
| 0
| 0.057143
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
822d0bbd584abe23edb59c54b12576de7bb8ef74
| 58,734
|
py
|
Python
|
crawler.py
|
bernardhu/gzlianjia
|
a8fa3c237123079c12b8890cbece099b813bfc4c
|
[
"MIT"
] | 2
|
2017-05-11T09:41:13.000Z
|
2017-07-24T11:46:59.000Z
|
crawler.py
|
bernardhu/gzlianjia
|
a8fa3c237123079c12b8890cbece099b813bfc4c
|
[
"MIT"
] | null | null | null |
crawler.py
|
bernardhu/gzlianjia
|
a8fa3c237123079c12b8890cbece099b813bfc4c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pickle
import math
import os.path
import shutil
import datetime
import time
import random
import json
import re
import chardet
import string
import base64
import requests
from bs4 import BeautifulSoup
from model import TradedHouse, DistricHouse, BidHouse, RentHouse, create_table, clear_table
grabedPool = {}
gz_district = ['tianhe', 'yuexiu', 'liwan', 'haizhu', 'panyu', 'baiyun', 'huangpugz', 'conghua', 'zengcheng', 'huadou', 'luogang', 'nansha']
gz_district_name = {"tianhe":u"天河", "yuexiu":u"越秀", "liwan":u"荔湾", "haizhu":u"海珠",
"panyu":u"番禺", "baiyun":u"白云", "huangpugz":u"黄埔", "conghua": u"从化", "zengcheng": u"增城",
"huadou":u"花都", "luogang": u"萝岗","nansha":u"南沙"}
global start_offset
start_offset = 1
user_agent_list = [
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10",
"Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/533.17.8 (KHTML, like Gecko) Version/5.0.1 Safari/533.17.8",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.1.17) Gecko/20110123 (like Firefox/3.x) SeaMonkey/2.0.12",
"Mozilla/5.0 (Windows NT 5.2; rv:10.0.1) Gecko/20100101 Firefox/10.0.1 SeaMonkey/2.7.1",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-US) AppleWebKit/532.8 (KHTML, like Gecko) Chrome/4.0.302.2 Safari/532.8",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.464.0 Safari/534.3",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.186 Safari/535.1",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/15.0.874.54 Safari/535.2",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.36 Safari/535.7",
"Mozilla/5.0 (Macintosh; U; Mac OS X Mach-O; en-US; rv:2.0a) Gecko/20040614 Firefox/3.0.0 ",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.0.3) Gecko/2008092414 Firefox/3.0.3",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1) Gecko/20090624 Firefox/3.5",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.14) Gecko/20110218 AlexaToolbar/alxf-2.0 Firefox/3.6.14",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.17 (KHTML, like Gecko) Version/8.0 Mobile/13A175 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.39 (KHTML, like Gecko) Version/9.0 Mobile/13A4305g Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A344 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A344 Safari/600.1.4 (000205)",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/13A344 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A404 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/631.1.17 (KHTML, like Gecko) Version/8.0 Mobile/13A171 Safari/637.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/13A404 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/13B5110e Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A404 Safari/600.1.4 (000994)",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A404 Safari/600.1.4 (000862)",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A404 Safari/600.1.4 (000065)",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/5.2.43972 Mobile/13A452 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A452 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B5130b Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A404 Safari/600.1.4 (000539)",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A452 Safari/600.1.4 (000549)",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A452 Safari/600.1.4 (000570)",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/13A452 Safari/600.1.4 (000693)",
"Mozilla/5.0 (iPad; CPU OS 9_0_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/9.0.60246 Mobile/13A404 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A452 Safari/600.1.4 (000292)",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/9.0.60246 Mobile/13A452 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B137 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13A452 Safari/600.1.4 (000996)",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/46.0.2490.73 Mobile/13B143 Safari/600.1.4 (000648)",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/46.0.2490.73 Mobile/13B143 Safari/600.1.4 (000119)",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/9.0.60246 Mobile/13B143 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/46.0.2490.73 Mobile/13B143 Safari/600.1.4 (000923)",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/1.2 Mobile/13B143 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A340 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13B143",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/10.0.63022 Mobile/13B143 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.17 (KHTML, like Gecko) Version/8.0 Mobile/13A175 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1.56 (KHTML, like Gecko) Version/9.0 Mobile/13c75 Safari/601.1.56",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B144 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.70 Mobile/13C75 Safari/601.1.46 (000144)",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.70 Mobile/13C75 Safari/601.1.46 (000042)",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13C75 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) CriOS/38.0.2125.59 Mobile/11D201 Safari/9537.53",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/11.0.65374 Mobile/13B143 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.70 Mobile/13C75 Safari/601.1.46 (000468)",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/11.0.65374 Mobile/13C75 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.16 (KHTML, like Gecko) Version/8.0 Mobile/13A171a Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/11.1.66360 Mobile/13C75 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.83 Mobile/13C75 Safari/601.1.46 (000468)",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.107 Mobile/13C75 Safari/601.1.46 (000702)",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/10A403 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B14 3 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13D15 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.107 Mobile/13A452 Safari/601.1.46 (000412)",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.107 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/12.0.68608 Mobile/13D15 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/48.0.2564.87 Mobile/13A452 Safari/601.1.46 (000715)",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/48.0.2564.87 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.89 Mobile/13B143 Safari/600.1.4 (000381)",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E5200d Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E5200d Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/11.1.66360 Mobile/13D15 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/48.0.2564.104 Mobile/13B143 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/48.0.2564.104 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E5200d Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/48.0.2564.104 Mobile/13E5200d Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.83 Mobile/13C75 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/47.0.2526.83 Mobile/13C75 Safari/601.1.46 (000381)",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13A344 Shelter/1.0.0 (YmqLQeAh3Z-nBdz2i87Rf) ",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/46.0.2490.73 Mobile/13C143 Safari/600.1.4 (000718)",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A143 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/1.4 Mobile/13E5181f Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/49.0.2623.73 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/49.0.2623.73 Mobile/13A15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E233 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/13.1.72140 Mobile/13E233 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/49.0.2623.73 Mobile/13E233 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E238 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/49.0.2623.109 Mobile/13E238 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/1.4 Mobile/13A452 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/13B143 Safari/600.1.4 (000073)",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/3.0 Mobile/13E238 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/14.1.119979954 Mobile/13E238 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/50.0.2661.95 Mobile/13E238 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E234 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F69 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E237 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/50.0.2661.95 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/15.1.122860578 Mobile/13F69 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/51.0.2704.64 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F72 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/51.0.2704.104 Mobile/13E238 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/50.0.2661.77 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/4.0 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/51.0.2704.104 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/16.0.124986583 Mobile/13F69 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/2.0 Mobile/13E5200d Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13G34 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/52.0.2743.84 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E188a Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/17.0.128207670 Mobile/13G35 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_3 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/50.0.2661.95 Mobile/13G34 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13G35 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13G35",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/52.0.2743.84 Mobile/13G35 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/5.0 Mobile/13G35 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69 iPadApp",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13G35 Safari/601.1 MXiOS/4.9.0.60",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69",
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/18.0.130791545 Mobile/13G35 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13G36 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/18.0.130791545 Mobile/13G36 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 7_1 like Mac OS X) AppleWebKit/537.51.3 (KHTML, like Gecko) Version/7.0 Mobile/11A4149 Safari/9537.72",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.17 (KHTML, like Gecko) Version/8.0 Mobile/13A175 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/18.1.132077863 Mobile/13G36 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/53.0.2785.86 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/53.0.2785.109 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/53.0.2785.109 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OSX) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A452 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13D11",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13G36 Safari/601.1.46 Sleipnir/4.3.0m",
"Mozilla/5.0 (iPad; CPU OS 9_0_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/53.0.2785.86 Mobile/13A452 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46.140 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/54.0.2840.66 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/54.0.2840.91 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69 Safari/601.1.46 Sleipnir/4.3.2m",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13G36",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/5.3.48993 Mobile/13D15 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/54.0.2840.66 Mobile/13E238 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/50.0.2661.77 Mobile/13E238 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/55.0.2883.79 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/55.0.2883.79 Mobile/13F69 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/5.3 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/22.0.141836113 Mobile/13G36 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/56.0.2924.79 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/56.0.2924.79 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/56.0.2924.79 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/57.0.2987.100 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) FxiOS/6.1 Mobile/13D15 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13BC75 Safari/601.1",
"Mozilla/5.0 (iPad; CPU OS 9_3_3 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/56.0.2924.79 Mobile/13G34 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/57.0.2987.137 Mobile/13G36 Safari/601.1.46",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46(KHTML, like Gecko) FxiOS/6.1 Mobile/13G36 Safari/601.1.46",
"Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25",
"Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/9.0 Mobile/13A340 Safari/600.1.4",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) CriOS/36.0.1985.49 Mobile/13G36 Safari/9537.53",
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1 (KHTML, like Gecko) CriOS/59.0.3071.102 Mobile/13G36 Safari/601.1.46"
]
def get_header():
i = random.randint(0,len(user_agent_list)-1)
headers = {
'User-Agent': user_agent_list[i],
'x-forearded-for': "1.2.3.4"
}
return headers
def get_multipart_formdata(data, bondary):
post_data = []
for key, value in data.iteritems():
if value is None:
continue
post_data.append('--' + bondary )
post_data.append('Content-Disposition: form-data; name="{0}"'.format(key))
post_data.append('')
if isinstance(value, int):
value = str(value)
post_data.append(value)
post_data.append('--' + bondary + '--')
post_data.append('')
body = '\r\n'.join(post_data)
return body.encode('utf-8')
def verify_captcha():
url = "http://captcha.lianjia.com"
r = requests.get(url, headers= get_header(), timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
pages = soup.find("form", class_="human").find_all("input")
print pages[2]['value'], pages[2]['name']
csrf = pages[2]['value']
time.sleep(1)
url = "http://captcha.lianjia.com/human"
r = requests.get(url, headers= get_header(), timeout= 30)
cookie = r.headers['Set-Cookie']
soup = BeautifulSoup(r.content, "lxml")
images = json.loads(r.content)['images']
uuid = json.loads(r.content)['uuid']
#print images
for idx in xrange(0, len(images)):
fh = open("%d.jpg"%idx, "wb")
data = images['%d'%idx].split(',', 1)
fh.write(base64.b64decode(data[1]))
fh.close()
step = 0
mask = 0
while 1:
if step == 0:
val = raw_input("check 0.jpg reverse,(y/n):\t")
if val == 'y' or val == 'Y':
mask = mask + 1
step = 1
elif step == 1:
val = raw_input("check 1.jpg reverse,(y/n):\t")
if val == 'y' or val == 'Y':
mask = mask + 2
step = 2
elif step == 2:
val = raw_input("check 2.jpg reverse,(y/n):\t")
if val == 'y' or val == 'Y':
mask = mask + 4
step = 3
elif step == 3:
val = raw_input("check 3.jpg reverse,(y/n):\t")
if val == 'y' or val == 'Y':
mask = mask + 8
break
print mask
boundary='----WebKitFormBoundary7MA4YWxkTrZu0gW'
headers = get_header()
headers['content-type'] = "multipart/form-data; boundary={0}".format(boundary)
headers['Cookie'] = cookie
print get_multipart_formdata({'uuid':uuid, 'bitvalue': mask, '_csrf': csrf}, boundary)
print headers
r = requests.post(url, headers=headers, data=get_multipart_formdata({'uuid':uuid, 'bitvalue': mask, '_csrf': csrf}, boundary))
print r.request
print r.content
def get_distric_rent_cnt(distric):
print "try to grab %s community rent cnt "%distric
url = "http://gz.lianjia.com/zufang/%s/"%distric
r = requests.get(url, headers= get_header(), timeout= 30)
#print r.text.encode("utf-8")
soup = BeautifulSoup(r.content, "lxml")
pages = soup.find("div", class_="page-box house-lst-page-box")
time.sleep(random.randint(5,10))
try:
pageStr = pages["page-data"]
except Exception, e:
print e,r.content
os._exit(0)
jo = json.loads(pageStr)
return jo['totalPage']
def get_distric_community_cnt(distric):
print "try to grab %s community cnt "%distric
url = "http://gz.lianjia.com/xiaoqu/%s/"%distric
r = requests.get(url, headers= get_header(), timeout= 30)
#print r.text.encode("utf-8")
soup = BeautifulSoup(r.content, "lxml")
pages = soup.find("div", class_="page-box house-lst-page-box")
time.sleep(random.randint(5,10))
try:
pageStr = pages["page-data"]
except Exception, e:
print e,r.content,r.text
os._exit(0)
jo = json.loads(pageStr)
return jo['totalPage']
def grab_distric(url):
print "try to grab distric page ", url
r = requests.get(url, headers= get_header(), timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
try:
districList = soup.find("ul", class_="listContent").find_all('li')
except Exception, e:
print e,r.content
os._exit(0)
if not districList:
return
for item in districList:
# 房屋详情链接,唯一标识符
distUrl = item.a["href"] or ''
#if distUrl in grabedPool["data"]:
# print distUrl, "already exits,skip"
# continue
print "start to crawl" , distUrl
# 抓取 历史成交
title = item.find("div", class_="title").a.string.encode("utf-8").rstrip()
historyList = item.find("div", class_="houseInfo").find_all('a')
history = historyList[0].string.encode("utf-8")
m = re.match(r"(\d+)天成交(\d+)套", history)
print m, history
historyRange = 0
historySell = 0
if m:
historyRange = m.group(1)
historySell = m.group(2)
print title, history, historyRange, historySell
# 抓取 区&商圈
pos = item.find("div", class_="positionInfo").find_all('a')
dis = pos[0].string.encode("utf-8")
bizcircle = pos[1].string.encode("utf-8")
print dis, bizcircle
#抓取成交均价噢
avgStr = item.find("div", class_="totalPrice").span.string.encode("utf-8")
m = re.match(r"(\d+)", avgStr)
if m:
avg = int(avgStr)
else:
avg = 0
print avg
#抓取在售
onSell = int(item.find("div", class_="xiaoquListItemSellCount").a.span.string)
print onSell
# 通过 ORM 存储到 sqlite
distItem = DistricHouse(
name = title,
district = dis,
bizcircle = bizcircle,
historyRange = historyRange,
historySell = historySell,
ref = distUrl,
avgpx = avg,
onsell = onSell,
)
distItem.save()
# 添加到已经抓取的池
#grabedPool["data"].add(distUrl)
# 抓取完成后,休息几秒钟,避免给对方服务器造成大负担
time.sleep(random.randint(1,3))
def get_distric_chengjiao_cnt(distric, proxy):
print "try to grab %s chengjiao cnt "%distric
url = "http://gz.lianjia.com/chengjiao/%s/"%distric
r = requests.get(url, headers= get_header(), timeout= 30)
#print r.text.encode("utf-8")
soup = BeautifulSoup(r.content, "lxml")
try:
pages = soup.find("div", class_="page-box house-lst-page-box")
time.sleep(random.randint(5,10))
pageStr = pages["page-data"]
jo = json.loads(pageStr)
return jo['totalPage']
except Exception, e:
print e,r.content
os._exit(0)
def get_distric_bid_cnt(distric, proxy):
print "try to grab %s bid cnt "%distric
url = "http://gz.lianjia.com/ershoufang/%s/"%distric
r = requests.get(url, headers= get_header(), timeout= 30)
#print r.text.encode("utf-8")
soup = BeautifulSoup(r.content, "lxml")
try:
pages = soup.find("div", class_="page-box house-lst-page-box")
time.sleep(random.randint(5,10))
pageStr = pages["page-data"]
jo = json.loads(pageStr)
return jo['totalPage']
except Exception, e:
print e,r.content
os._exit(0)
#i = random.randint(0,len(proxy)-1)
#proxies = {
# "http": proxy[i]
# }
#print "try proxy", proxy[i]
#r = requests.get(url, headers= get_header(), proxies=proxies, timeout= 30)
#soup = BeautifulSoup(r.content, "lxml")
#pages = soup.find("div", class_="page-box house-lst-page-box")
#time.sleep(random.randint(5,10))
#pageStr = pages["page-data"]
#jo = json.loads(pageStr)
#return jo['totalPage']
def get_xici_proxy(url, proxys):
print "get proxy", url
r = requests.get(url, headers= get_header(), timeout= 10)
soup = BeautifulSoup(r.content, "lxml")
pages = soup.find_all("tr", class_="odd")
for page in pages:
items = page.find_all("td")
proxy ="http://%s:%s"%(items[1].string, items[2].string)
url = "http://gz.lianjia.com/chengjiao/tianhe/"
proxies = {
"http": proxy
}
try:
r = requests.get(url, headers= get_header(), proxies=proxies, timeout= 3)
soup = BeautifulSoup(r.content, "lxml")
tradedHoustList = soup.find("ul", class_="listContent")
if not tradedHoustList:
continue
proxys.append(proxy)
print proxy, proxys
except Exception, e:
#print Exception,":",e
continue
def get_kuaidaili_proxy(url, proxys):
print "get proxy", url
r = requests.get(url, headers= get_header(), timeout= 10)
soup = BeautifulSoup(r.content, "lxml")
pages = soup.find("tbody").find_all("tr")
for page in pages:
items = page.find_all("td")
proxy ="http://%s:%s"%(items[0].string, items[1].string)
print proxy
url = "http://gz.lianjia.com/chengjiao/tianhe/"
proxies = {
"http": proxy
}
try:
r = requests.get(url, headers= get_header(), proxies=proxies, timeout= 3)
soup = BeautifulSoup(r.content, "lxml")
tradedHoustList = soup.find("ul", class_="listContent")
if not tradedHoustList:
continue
proxys.append(proxy)
print proxy, proxys
except Exception, e:
#print Exception,":",e
continue
def get_youdaili_proxy(url, proxys):
print "get proxy", url
r = requests.get(url, headers= get_header(), timeout= 10)
soup = BeautifulSoup(r.content, "lxml")
pages = soup.find("div", class_="chunlist").find_all("a")
page = pages[0]
u = page["href"]
html = requests.get(u, headers= get_header(), timeout= 3).content
proxy_list = re.findall(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}', html)
for proxy in proxy_list:
url = "http://gz.lianjia.com/chengjiao/tianhe/"
proxies = {
"http": proxy
}
try:
r = requests.get(url, headers= get_header(), proxies=proxies, timeout= 3)
soup = BeautifulSoup(r.content, "lxml")
tradedHoustList = soup.find("ul", class_="listContent")
if not tradedHoustList:
continue
proxys.append(proxy)
print proxy, proxys
except Exception, e:
#print Exception,":",e
continue
def build_proxy():
proxys = []
#get_xici_proxy("http://www.xicidaili.com/nn/1", proxys)
#get_xici_proxy("http://www.xicidaili.com/nn/2", proxys)
#get_kuaidaili_proxy("http://www.kuaidaili.com/proxylist/1", proxys)
#get_kuaidaili_proxy("http://www.kuaidaili.com/proxylist/2", proxys)
#get_kuaidaili_proxy("http://www.kuaidaili.com/proxylist/3", proxys)
#get_kuaidaili_proxy("http://www.kuaidaili.com/proxylist/4", proxys)
#get_youdaili_proxy("http://www.youdaili.net/Daili/http", proxys)
r = requests.get("http://127.0.0.1:5000/get_all/", headers= get_header(), timeout= 10)
print r.content
proxys= json.loads(r.content)
print proxys
return proxys
def grabRent(url, proxy, disName, priceDic, bizDic):
print "try to grab page ", url
r = requests.get(url, headers= get_header(), timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
try:
bidHoustList = soup.find("ul", class_="house-lst").find_all('li')
except Exception, e:
print e,r.content
os._exit(0)
if not bidHoustList:
return
storge = []
for item in bidHoustList:
# 房屋详情链接,唯一标识符
houseUrl = item.a["href"] or ''
#if houseUrl in grabedPool["data"]:
# print houseUrl, "already exit, skip"
# continue
print 'start to crawl' , houseUrl
# 抓取 小区,户型,面积 朝向,装修,电梯
xiaoqu = item.find("div", class_="where").a.string.rstrip().encode("utf-8")
houseType = item.find("span", class_="zone").span.string.rstrip().encode("utf-8")
squareStr = item.find("span", class_="meters").string.rstrip().encode("utf-8")
orientation = item.find("div", class_="where").findAll("span")[4].string.encode("utf-8").rstrip()
print xiaoqu, houseType, squareStr, orientation
m = re.match(r"\b[0-9]+(\.[0-9]+)?", squareStr)
square = 0
if m:
square = string.atof(m.group(0))
print squareStr, square
#楼层,楼龄
posInfo = item.find("div", class_="con").contents[2]
m = re.match(ur"(.*)楼层\(共(\d+)层\)", posInfo)
floorLevel = 'Nav'
floorTotal = -1
if m:
floorLevel = m.group(1)
floorTotal = m.group(2)
print m.group(1).encode("utf-8"), m.group(2)
print floorLevel.encode("utf-8"), floorTotal
#挂牌价
priceInfo = item.find("div", class_="price").span
if priceInfo:
price = string.atof(priceInfo.string)
else :
price = 0
print price
pricePre = item.find("div", class_="price-pre").string
priceUpdate, misc = ([x.strip() for x in pricePre.split(" ")])
print priceUpdate
#关注,带看, 放盘
seenStr = item.find("div", class_="square").find("span", class_="num").string
seen = 0
if m:
seen = string.atoi(seenStr)
print seen
try:
avg = priceDic[xiaoqu]
except Exception, e:
print e
avg = 0
print "avg", avg
try:
biz = bizDic[xiaoqu]
except Exception, e:
print e
biz = ""
print "biz", biz
loan = 0
loan = square*avg -1500000
loanRet = 0
yearRate = 0.049
monthRate = 0.049/12
loanYear = 30
loanMonth = loanYear*12
if loan < 0 :
loan = 0
loanRet = 0
else:
loanRet = loan*monthRate*((1+monthRate)**loanMonth)/(((1+monthRate)**loanMonth)-1)
loan = round(loan/10000)
print loan, loanRet
# 通过 ORM 存储到 sqlite
BidItem = RentHouse(
xiaoqu = xiaoqu,
houseType = houseType,
square = square,
houseUrl = houseUrl,
orientation = orientation,
floorLevel = floorLevel,
floorTotal = floorTotal,
price = price,
avg = avg,
loan = loan,
loanRet = loanRet,
seen = seen,
bizcircle = biz,
district = disName,
)
storge.append(BidItem)
for s in storge:
s.save()
# 添加到已经抓取的池
#grabedPool["data"].add(s.houseUrl)
# 抓取完成后,休息几秒钟,避免给对方服务器造成大负担
time.sleep(random.randint(1,3))
def grabBid(url, proxy, disName, priceDic):
print "try to grabbid page ", url
r = requests.get(url, headers= get_header(), timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
try:
bidHoustList = soup.find("ul", class_="sellListContent").find_all('li')
except Exception, e:
print e,r.content
os._exit(0)
i = random.randint(0,len(proxy)-1)
proxies = {
"http": proxy[i]
}
print "try proxy", proxy[i]
r = requests.get(url, headers= get_header(), proxies=proxies, timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
bidHoustList = soup.find("ul", class_="sellListContent").find_all('li')
if not bidHoustList:
return
storge = []
for item in bidHoustList:
# 房屋详情链接,唯一标识符
houseUrl = item.a["href"] or ''
#if houseUrl in grabedPool["data"]:
# print houseUrl, "already exit, skip"
# continue
print 'start to crawl' , houseUrl
# 抓取 小区,户型,面积 朝向,装修,电梯
houseInfo = item.find("div", class_="houseInfo").contents[2]
xiaoqu = item.find("div", class_="houseInfo").a.string.encode("utf-8").rstrip()
if houseInfo:
if len(houseInfo.split("|")) == 5:
null, houseType, squareStr, orientation, decoration = ([x.strip() for x in houseInfo.split("|")])
elevator = 'Nav'
if len(houseInfo.split("|")) == 6:
null, houseType, squareStr, orientation, decoration, elevator = ([x.strip() for x in houseInfo.split("|")])
print xiaoqu, houseType.encode("utf-8"), orientation.encode("utf-8"), decoration.encode("utf-8"), elevator.encode("utf-8")
m = re.match(ur"\b[0-9]+(\.[0-9]+)?", squareStr)
square = 0
if m:
square = string.atof(m.group(0))
print squareStr.encode("utf-8"), square
#楼层,楼龄
posInfo = item.find("div", class_="positionInfo").contents[1]
print posInfo.encode("utf-8")
m = re.match(ur"(.*)楼层\(共(\d+)层\)(\d+)年建", posInfo)
floorLevel = 'Nav'
floorTotal = -1
build = -1
if m:
floorLevel = m.group(1)
floorTotal = m.group(2)
build = int(m.group(3))
print m.group(1).encode("utf-8"), m.group(2), m.group(3)
print floorLevel.encode("utf-8"), floorTotal, build
biz = item.find("div", class_="positionInfo").a.string
print biz
#挂牌价
priceInfo = item.find("div", class_="totalPrice").span
if priceInfo:
bid = string.atof(priceInfo.string)
else :
bid = 0
print bid
#均价
priceInfo = item.find("div", class_="unitPrice").span
priceStr = ""
if priceInfo:
priceStr = priceInfo.string
m = re.match(ur"单价(\d+)元", priceStr)
price = 0
if m:
price = m.group(1)
print price, priceStr.encode("utf-8")
#关注,带看, 放盘
followInfo = item.find("div", class_="followInfo").contents[1]
if followInfo:
watchStr, seenStr, releaseStr = ([x.strip() for x in followInfo.split("/")])
print watchStr.encode("utf-8"), seenStr.encode("utf-8"), releaseStr.encode("utf-8")
m = re.match(ur"(\d+)人", watchStr)
watch = 0
if m:
watch = m.group(1)
m = re.match(ur"共(\d+)次", seenStr)
seen = 0
if m:
seen = m.group(1)
m = re.match(ur"(\d+)天", releaseStr)
release = 0
if m:
release = int(m.group(1))
else:
m = re.match(ur"(\d+)个月", releaseStr)
if m:
release = int(m.group(1))*30
else:
m = re.match(ur"(.*)年", releaseStr)
if m:
release = m.group(1)
if release == u"一":
release = 365
try:
avg = priceDic[xiaoqu]
except Exception, e:
avg = 0
print watch, seen, release, avg
# 通过 ORM 存储到 sqlite
BidItem = BidHouse(
xiaoqu = xiaoqu,
houseType = houseType,
square = square,
houseUrl = houseUrl,
orientation = orientation,
decoration = decoration,
elevator = elevator,
floorLevel = floorLevel,
floorTotal = floorTotal,
build = build,
price = price,
avg = avg,
bid = bid,
watch = watch,
seen = seen,
release = release,
bizcircle = biz,
district = disName,
)
storge.append(BidItem)
for s in storge:
s.save()
# 添加到已经抓取的池
#grabedPool["data"].add(s.houseUrl)
# 抓取完成后,休息几秒钟,避免给对方服务器造成大负担
time.sleep(random.randint(1,3))
def grab(url, proxy, disName, bizDic, lastMarkTrade):
print "try to grab page ", url
r = requests.get(url, headers= get_header(), timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
try:
tradedHoustList = soup.find("ul", class_="listContent").find_all('li')
except Exception, e:
print e,r.content
#os._exit(0)
tradedHoustList = soup.find("li", class_="pictext")
if not tradedHoustList:
tradedHoustList = soup.find("ul", class_="listContent").find_all('li')
else:
i = random.randint(0,len(proxy)-1)
proxies = {
"http": proxy[i]
}
print "try proxy", proxy[i]
r = requests.get(url, headers= get_header(), proxies=proxies, timeout= 30)
soup = BeautifulSoup(r.content, "lxml")
tradedHoustList = soup.find("ul", class_="listContent").find_all('li')
if not tradedHoustList:
return
storge = []
stop = False
for item in tradedHoustList:
# 房屋详情链接,唯一标识符
houseUrl = item.a["href"] or ''
#if houseUrl in grabedPool["data"]:
# print houseUrl, "already exit, skip"
# continue
print 'start to crawl' , houseUrl
# 抓取 小区,户型,面积
title = item.find("div", class_="title")
if title:
print title
xiaoqu, houseType, square = (title.string.replace(" ", " ").split(" "))
m = re.match(ur"\b[0-9]+(\.[0-9]+)?", square)
if m:
square = string.atof(m.group(0))
else:
xiaoqu, houseType, square = ('Nav', 'Nav', 0)
xiaoqu = xiaoqu.encode("utf-8").rstrip()
houseType = houseType.encode("utf-8")
print xiaoqu, houseType, square
dealInfo = item.find("div", class_="totalPrice").span
try:
deal = string.atof(dealInfo.string.encode("utf-8"))
except Exception, e:
deal = -1
print deal
# 朝向,装修,电梯
houseInfo = item.find("div", class_="houseInfo").contents[1]
if houseInfo:
if len(houseInfo.split("|")) == 2:
orientation, decoration = ([x.strip() for x in houseInfo.split("|")])
elevator = 'Nav'
if len(houseInfo.split("|")) == 3:
orientation, decoration, elevator = ([x.strip() for x in houseInfo.split("|")])
print orientation.encode("utf-8"), decoration.encode("utf-8"), elevator.encode("utf-8")
#成交日期
dealDate = item.find("div", class_="dealDate")
if dealDate:
tradeDate = datetime.datetime.strptime(dealDate.string, '%Y.%m.%d') or datetime.datetime(1990, 1, 1)
print tradeDate
if lastMarkTrade >= tradeDate:
print 'break for time'
stop = True
break
#楼层,楼龄
posInfo = item.find("div", class_="positionInfo").contents[1]
if posInfo:
floor, buildStr = ([x.strip() for x in posInfo.split(" ")])
print floor.encode("utf-8"), buildStr.encode("utf-8")
m = re.match(ur"(.*)楼层\(共(\d+)层\)", floor)
floorLevel = 'Nav'
floorTotal = -1
if m:
floorLevel = m.group(1)
floorTotal = m.group(2)
print m.group(1).encode("utf-8"), m.group(2)
m = re.match(ur"(\d+)年建", buildStr)
build = -1
if m:
build = m.group(1)
print floorLevel.encode("utf-8"), floorTotal, build
#均价
priceInfo = item.find("div", class_="unitPrice").span
if priceInfo:
price = int(priceInfo.string)
else :
price = 0
print price
#挂牌价,成交周期
dealCycle = item.find("span", class_="dealCycleTxt").find_all('span')
bid = -1
cycle = -1
if dealCycle:
if len(dealCycle) == 1:
bidStr = dealCycle[0].string
cycleStr = ""
if len(dealCycle) == 2:
bidStr = dealCycle[0].string
cycleStr = dealCycle[1].string
print bidStr.encode("utf-8"), cycleStr.encode("utf-8")
m = re.match(ur"挂牌(\d+)万", bidStr)
if m:
bid = m.group(1)
m = re.match(ur"成交周期(\d+)天", cycleStr)
if m:
cycle = m.group(1)
try:
biz = bizDic[xiaoqu]
except Exception, e:
biz = "unknown"
#print bid, cycle, disName, biz
# 通过 ORM 存储到 sqlite
tradeItem = TradedHouse(
xiaoqu = xiaoqu,
houseType = houseType,
square = square,
houseUrl = houseUrl,
orientation = orientation,
decoration = decoration,
elevator = elevator,
floorLevel = floorLevel,
floorTotal = floorTotal,
build = build,
price = price,
tradeDate = tradeDate,
bid = bid,
deal = deal,
cycle = cycle,
district = disName,
bizcircle = biz,
)
storge.append(tradeItem)
for s in storge:
s.save()
# 添加到已经抓取的池
#grabedPool["data"].add(s.houseUrl)
# 抓取完成后,休息几秒钟,避免给对方服务器造成大负担
time.sleep(random.randint(1,3))
return stop
step_context = {"phase":0, "cnt":0, "offset":0, "pgoffset":1, "date":"20170705"}
def save_context():
global step_context
print "save", step_context, type(step_context)
json.dump(step_context, open('context','w'))
def load_context():
global step_context
step_context = json.load(open('context','r'))
print "load", step_context, type(step_context)
def crawl_district():
global step_context
for dis_offset in xrange(step_context['offset'], len(gz_district)):
dis = gz_district[dis_offset]
step_context['offset'] = dis_offset
save_context()
cnt = step_context['cnt']
if cnt == 0:
cnt = get_distric_community_cnt(dis)
print "get_distric_info", dis, cnt
step_context['cnt'] = cnt
save_context()
for i in xrange(step_context['pgoffset'], cnt+1):
step_context['pgoffset'] = i
save_context()
url = "http://gz.lianjia.com/xiaoqu/%s/pg%s/"%(dis, format(str(i)))
grab_distric(url)
step_context['pgoffset'] = 1
step_context['cnt'] = 0
save_context()
def crawl_district_chengjiao():
global step_context
for dis_offset in xrange(step_context['offset'], len(gz_district)):
dis = gz_district[dis_offset]
step_context['offset'] = dis_offset
save_context()
distric = DistricHouse.select(DistricHouse.name, DistricHouse.bizcircle, DistricHouse.avgpx).where(DistricHouse.district == gz_district_name[dis])
print distric
bizDic = {}
priceDic = {}
for item in distric:
name = item.name.rstrip().encode("utf-8")
biz = item.bizcircle.encode("utf-8")
bizDic[name] = biz
price = item.avgpx
priceDic[name] = price
#print name
cnt = step_context['cnt']
if cnt == 0:
cnt = get_distric_chengjiao_cnt(dis, [])
step_context['cnt'] = cnt
save_context()
ts = TradedHouse.select(TradedHouse.tradeDate).where(TradedHouse.district == gz_district_name[dis]).order_by(TradedHouse.tradeDate.desc()).limit(1)
print ts
for item in ts:
print item.tradeDate, type(item.tradeDate)
lastMarkTrade = item.tradeDate
for i in xrange(step_context['pgoffset'], cnt+1):
step_context['pgoffset'] = i
save_context()
page = "http://gz.lianjia.com/chengjiao/%s/pg%s/"%(dis, format(str(i)))
stop = grab(page, [], gz_district_name[dis], bizDic, lastMarkTrade)
if stop == True:
break
step_context['pgoffset'] = 1
step_context['cnt'] = 0
save_context()
def crawl_district_bid():
global step_context
#proxy = build_proxy()
for dis_offset in xrange(step_context['offset'], len(gz_district)):
dis = gz_district[dis_offset]
distric = DistricHouse.select(DistricHouse.name, DistricHouse.bizcircle, DistricHouse.avgpx).where(DistricHouse.district == gz_district_name[dis])
print distric
bizDic = {}
priceDic = {}
for item in distric:
name = item.name.rstrip().encode("utf-8")
biz = item.bizcircle.encode("utf-8")
bizDic[name] = biz
price = item.avgpx
priceDic[name] = price
#print name
step_context['offset'] = dis_offset
save_context()
cnt = step_context['cnt']
if cnt == 0:
cnt = get_distric_bid_cnt(dis, [])
step_context['cnt'] = cnt
save_context()
for i in xrange(step_context['pgoffset'], cnt+1):
step_context['pgoffset'] = i
save_context()
page = "http://gz.lianjia.com/ershoufang/%s/pg%s/"%(dis, format(str(i)))
grabBid(page, [], gz_district_name[dis], priceDic)
step_context['pgoffset'] = 1
step_context['cnt'] = 0
save_context()
def crawl_district_rent():
global step_context
for dis_offset in xrange(step_context['offset'], len(gz_district)):
dis = gz_district[dis_offset]
distric = DistricHouse.select(DistricHouse.name, DistricHouse.bizcircle, DistricHouse.avgpx).where(DistricHouse.district == gz_district_name[dis])
print distric
bizDic = {}
priceDic = {}
for item in distric:
name = item.name.rstrip().encode("utf-8")
biz = item.bizcircle.encode("utf-8")
bizDic[name] = biz
price = item.avgpx
priceDic[name] = price
#print name
step_context['offset'] = dis_offset
save_context()
cnt = step_context['cnt']
if cnt == 0:
cnt = get_distric_rent_cnt(dis)
step_context['cnt'] = cnt
save_context()
for i in xrange(step_context['pgoffset'], cnt+1):
step_context['pgoffset'] = i
save_context()
page = "http://gz.lianjia.com/zufang/%s/pg%s/"%(dis, format(str(i)))
grabRent(page, [], gz_district_name[dis], priceDic, bizDic)
step_context['pgoffset'] = 1
step_context['cnt'] = 0
save_context()
def process_context():
#global step_context
print step_context['phase']
if step_context['phase'] == 0:
crawl_district()
step_context['phase'] = 1
step_context['cnt'] = 0
step_context['offset'] = 0
step_context['pgoffset'] = 1
step_context['date'] = time.strftime("%Y%m%d", time.localtime())
save_context()
elif step_context['phase'] == 1:
crawl_district_chengjiao()
step_context['phase'] = 2
step_context['cnt'] = 0
step_context['offset'] = 0
step_context['pgoffset'] = 1
save_context()
elif step_context['phase'] == 2:
crawl_district_bid()
step_context['phase'] = 3
step_context['cnt'] = 0
step_context['offset'] = 0
step_context['pgoffset'] = 1
save_context()
elif step_context['phase'] == 3:
crawl_district_rent()
step_context['phase'] = -1
step_context['cnt'] = 0
step_context['offset'] = 0
step_context['pgoffset'] = 1
save_context()
elif step_context['phase'] == -1:
#shutil.copy('houseprice.db', time.strftime("houseprice_%Y%m%d.db", time.localtime()))
clear_table()
step_context['phase'] = 1
if __name__== "__main__":
#save_context()
load_context()
#verify_captcha()
if step_context['phase'] == -1:
process_context()
while step_context['phase'] != -1:
process_context()
| 48.142623
| 155
| 0.594885
| 8,898
| 58,734
| 3.860868
| 0.071926
| 0.021773
| 0.047156
| 0.054113
| 0.788787
| 0.75671
| 0.732578
| 0.702946
| 0.687868
| 0.666385
| 0
| 0.115736
| 0.263272
| 58,734
| 1,219
| 156
| 48.182116
| 0.678191
| 0.036606
| 0
| 0.45605
| 0
| 0.188211
| 0.45642
| 0.003912
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.015512
| null | null | 0.086867
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
412db9c7cef044c2683619df8e0ed4cea588f793
| 147
|
py
|
Python
|
polls/admin.py
|
harshit-j/VotePlex-Django-Project
|
3ceae098ae29be79f27f15f0ef50bb3dca6a2f31
|
[
"Unlicense",
"MIT"
] | null | null | null |
polls/admin.py
|
harshit-j/VotePlex-Django-Project
|
3ceae098ae29be79f27f15f0ef50bb3dca6a2f31
|
[
"Unlicense",
"MIT"
] | null | null | null |
polls/admin.py
|
harshit-j/VotePlex-Django-Project
|
3ceae098ae29be79f27f15f0ef50bb3dca6a2f31
|
[
"Unlicense",
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Poll,Choice
# Register your models here.
admin.site.register(Poll)
admin.site.register(Choice)
| 29.4
| 32
| 0.816327
| 22
| 147
| 5.454545
| 0.545455
| 0.15
| 0.283333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 147
| 5
| 33
| 29.4
| 0.902256
| 0.176871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4137ff96de7ba46bd83b1d001d68f27777635219
| 187
|
py
|
Python
|
geometry/__init__.py
|
FRidh/python-geometry
|
62cb6210bcad3b1e4c1a7e0516ca17138793c1b3
|
[
"BSD-3-Clause"
] | 8
|
2015-12-16T04:39:12.000Z
|
2021-04-08T15:49:23.000Z
|
geometry/__init__.py
|
FRidh/python-geometry
|
62cb6210bcad3b1e4c1a7e0516ca17138793c1b3
|
[
"BSD-3-Clause"
] | 1
|
2015-08-07T15:03:02.000Z
|
2015-08-07T15:03:02.000Z
|
geometry/__init__.py
|
FRidh/python-geometry
|
62cb6210bcad3b1e4c1a7e0516ca17138793c1b3
|
[
"BSD-3-Clause"
] | 2
|
2015-03-23T02:03:04.000Z
|
2020-01-09T05:01:50.000Z
|
from .quat import Quat
from .point import Point
from .vector import Vector
from .plane import Plane
from .polygon import Polygon
from .edge import Edge
from .pointlist import PointList
| 18.7
| 32
| 0.802139
| 28
| 187
| 5.357143
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160428
| 187
| 9
| 33
| 20.777778
| 0.955414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
41547329fa8a963ce10488a8647273da8ecf5058
| 184
|
py
|
Python
|
vedaseg/utils/__init__.py
|
E18301194/vedaseg
|
c62c8ea46dbba12f03262452dd7bed22969cfe4e
|
[
"Apache-2.0"
] | 2
|
2020-07-15T02:36:46.000Z
|
2021-03-08T03:18:26.000Z
|
vedaseg/utils/__init__.py
|
E18301194/vedaseg
|
c62c8ea46dbba12f03262452dd7bed22969cfe4e
|
[
"Apache-2.0"
] | null | null | null |
vedaseg/utils/__init__.py
|
E18301194/vedaseg
|
c62c8ea46dbba12f03262452dd7bed22969cfe4e
|
[
"Apache-2.0"
] | 1
|
2021-09-16T09:40:12.000Z
|
2021-09-16T09:40:12.000Z
|
from .config import ConfigDict, Config
from .common import build_from_cfg, get_root_logger, set_random_seed
from .registry import Registry
from .metrics import MetricMeter, dice_score
| 36.8
| 68
| 0.847826
| 27
| 184
| 5.518519
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 184
| 4
| 69
| 46
| 0.908537
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
417ac447cea1375c78db7c41bdda9318c895f13c
| 50
|
py
|
Python
|
projects/DistillReID/kdreid/modeling/__init__.py
|
asvk/fast-reid
|
cf246e9bee5b5e5d154de98ba0395b7a5d0d0ab7
|
[
"Apache-2.0"
] | 71
|
2021-03-12T07:43:43.000Z
|
2022-03-30T03:28:16.000Z
|
projects/DistillReID/kdreid/modeling/__init__.py
|
asvk/fast-reid
|
cf246e9bee5b5e5d154de98ba0395b7a5d0d0ab7
|
[
"Apache-2.0"
] | 8
|
2021-04-06T03:02:58.000Z
|
2022-02-16T14:05:47.000Z
|
projects/DistillReID/kdreid/modeling/__init__.py
|
asvk/fast-reid
|
cf246e9bee5b5e5d154de98ba0395b7a5d0d0ab7
|
[
"Apache-2.0"
] | 7
|
2021-04-19T02:55:58.000Z
|
2021-11-11T12:39:09.000Z
|
from .backbones import build_shufflenetv2_backbone
| 50
| 50
| 0.92
| 6
| 50
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.06
| 50
| 1
| 50
| 50
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
68d81d1d60b2e941cd671a96decb29d75ce45d87
| 159
|
py
|
Python
|
web_services/admin.py
|
berv-uni-project/audio-watermarik-web-services
|
0eb445b4fbd35ee564b910f90419c67cc8380604
|
[
"MIT"
] | 1
|
2021-12-13T01:32:02.000Z
|
2021-12-13T01:32:02.000Z
|
web_services/admin.py
|
berv-uni-project/audio-watermarik-web-services
|
0eb445b4fbd35ee564b910f90419c67cc8380604
|
[
"MIT"
] | 4
|
2021-12-13T23:14:27.000Z
|
2022-01-11T11:40:04.000Z
|
web_services/admin.py
|
berv-uni-project/audio-watermark-web-services
|
997fac664e1838210eaad64fe8951bb458fdfb63
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Embed,Extract
# Register your models here.
admin.site.register(Embed)
admin.site.register(Extract)
| 22.714286
| 34
| 0.779874
| 22
| 159
| 5.636364
| 0.545455
| 0.145161
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138365
| 159
| 7
| 35
| 22.714286
| 0.905109
| 0.163522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ec115da1b0f8b7f3f9bbd10c81b0458af3536f04
| 200
|
py
|
Python
|
micromagneticmodel/dynamics/__init__.py
|
ubermag/micromagneticmodel
|
91ad92d26cdbec369a5a41f7b90a17ca5328cd07
|
[
"BSD-3-Clause"
] | 5
|
2019-10-21T01:12:16.000Z
|
2021-09-24T03:52:30.000Z
|
micromagneticmodel/dynamics/__init__.py
|
ubermag/micromagneticmodel
|
91ad92d26cdbec369a5a41f7b90a17ca5328cd07
|
[
"BSD-3-Clause"
] | 11
|
2019-08-12T22:38:17.000Z
|
2022-03-15T00:08:47.000Z
|
micromagneticmodel/dynamics/__init__.py
|
ubermag/micromagneticmodel
|
91ad92d26cdbec369a5a41f7b90a17ca5328cd07
|
[
"BSD-3-Clause"
] | 4
|
2020-06-27T15:36:28.000Z
|
2021-12-06T15:08:04.000Z
|
from .dynamicsterm import DynamicsTerm
from .precession import Precession
from .damping import Damping
from .zhangli import ZhangLi
from .slonczewski import Slonczewski
from .dynamics import Dynamics
| 28.571429
| 38
| 0.85
| 24
| 200
| 7.083333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 200
| 6
| 39
| 33.333333
| 0.965909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6b74209eb5c6de464ddb2c029dcfaf0f83a380eb
| 5,692
|
py
|
Python
|
src/quantrt/models/order.py
|
ardieb/quantrt
|
4a65b97218711ce8ebc7ae90588546cbee72e646
|
[
"MIT"
] | null | null | null |
src/quantrt/models/order.py
|
ardieb/quantrt
|
4a65b97218711ce8ebc7ae90588546cbee72e646
|
[
"MIT"
] | null | null | null |
src/quantrt/models/order.py
|
ardieb/quantrt
|
4a65b97218711ce8ebc7ae90588546cbee72e646
|
[
"MIT"
] | null | null | null |
import asyncio
import asyncpg
import quantrt.common.config
import quantrt.common.log
import quantrt.util.database
import quantrt.util.time
from dataclasses import dataclass
from datetime import datetime
from decimal import Decimal
from enum import Enum
from typing import Optional, Iterable
__all__ = ["OrderStatus", "Order", "save", "save_batch", "fetch", "fetch_batch", "fetch_open"]
class OrderStatus(Enum):
Open = "open"
Maker = "maker"
Taker = "taker"
Canceled = "canceled"
@dataclass
class Order:
# order id from coinbase
order_id: str
# Product ticker.
product: str
# Timestamp of the trade.
tstamp: datetime
# Order status
status: OrderStatus
# Which side?
side: str
# What was the trade size?
amount: Decimal
# Price.
price: Decimal
async def save(order: Order, pool: Optional[asyncpg.Pool] = None):
if not pool:
pool = quantrt.common.config.db_conn_pool
if not pool:
quantrt.common.log.QuantrtLog.exception("No connection pool has been configured.")
raise EnvironmentError("No connection pool has been configured.")
async with pool.acquire() as conn:
sql = """
INSERT INTO order
(id, product, tstamp, status, side, amount, price)
VALUES
($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT
(product, order_id)
DO UPDATE
SET
tstamp = EXCLUDED.tstamp,
status = EXCLUDED.status,
side = EXCLUDED.side,
amount = EXCLUDED.amount,
price = EXCLUDED.price
"""
statement = await quantrt.util.database.prepare_sql(sql, conn)
await statement.executemany((
order.order_id,
order.product,
order.tstamp,
order.status.name,
order.side,
order.amount,
order.price))
async def save_batch(orders: Iterable[Order], pool: Optional[asyncpg.Pool] = None):
if not pool:
pool = quantrt.common.config.db_conn_pool
if not pool:
quantrt.common.log.QuantrtLog.exception("No connection pool has been configured.")
raise EnvironmentError("No connection pool has been configured.")
async with pool.acquire() as conn:
sql = """
INSERT INTO order
(id, product, tstamp, status, side, amount, price)
VALUES
($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT
(product, order_id)
DO UPDATE
SET
tstamp = EXCLUDED.tstamp,
status = EXCLUDED.status,
side = EXCLUDED.side,
amount = EXCLUDED.amount,
price = EXCLUDED.price
"""
statement = await quantrt.util.database.prepare_sql(sql, conn)
await statement.executemany([(
order.order_id,
order.product,
order.tstamp,
order.status.name,
order.side,
order.amount,
order.price) for order in orders])
async def fetch(id: str, pool: Optional[asyncpg.Pool] = None) -> Order:
if not pool:
pool = quantrt.common.config.db_conn_pool
if not pool:
quantrt.common.log.QuantrtLog.exception("No connection pool has been configured.")
raise EnvironmentError("No connection pool has been configured.")
async with pool.acquire() as conn:
sql = """
SELECT * FROM order WHERE order_id = $1
"""
statement = await quantrt.util.database.prepare_sql(sql, conn)
row = await statement.fetch(id)
return Order(
order_id=row[0]["order_id"],
product=row[0]["product"],
tstamp=row[0]["tstamp"],
status=row[0]["status"],
side=row[0]["side"],
amount=row[0]["amount"],
price=row[0]["amount"]
)
async def fetch_batch(ids: Iterable[str], pool: Optional[asyncpg.Pool] = None) -> Iterable[Order]:
if not pool:
pool = quantrt.common.config.db_conn_pool
if not pool:
quantrt.common.log.QuantrtLog.exception("No connection pool has been configured.")
raise EnvironmentError("No connection pool has been configured.")
async with pool.acquire() as conn:
sql = """
SELECT * FROM order WHERE order_id = $1
"""
statement = await quantrt.util.database.prepare_sql(sql, conn)
rows = [await statement.fetchrow(id) for order_id in ids]
return [Order(
order_id=row["order_id"],
product=row["product"],
tstamp=row["tstamp"],
status=row["status"],
side=row["side"],
amount=row["amount"],
price=row["amount"]
) for row in rows]
async def fetch_open(product_id: str, pool: Optional[asyncpg.Pool] = None) -> Iterable[Order]:
if not pool:
pool = quantrt.common.config.db_conn_pool
if not pool:
quantrt.common.log.QuantrtLog.exception("No connection pool has been configured.")
raise EnvironmentError("No connection pool has been configured.")
async with pool.acquire() as conn:
sql = """
SELECT * FROM order WHERE product = $1 AND status = "open"
"""
statement = await quantrt.util.database.prepare_sql(sql, conn)
rows = await statement.fetch(product_id)
return [Order(
order_id=row["order_id"],
product=row["product"],
tstamp=row["tstamp"],
status=row["status"],
side=row["side"],
amount=row["amount"],
price=row["amount"]
) for row in rows]
| 31.622222
| 98
| 0.589248
| 660
| 5,692
| 5.019697
| 0.154545
| 0.035919
| 0.027166
| 0.05735
| 0.735285
| 0.735285
| 0.727739
| 0.717477
| 0.717477
| 0.717477
| 0
| 0.006047
| 0.302706
| 5,692
| 179
| 99
| 31.798883
| 0.828672
| 0.020907
| 0
| 0.682119
| 0
| 0.013245
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.072848
| 0
| 0.178808
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6bf9811632f4fe69277437cdd9d4d69bba02f031
| 1,296
|
py
|
Python
|
Conteudo das Aulas/009/Balada2-vR.py
|
cerberus707/lab-python
|
ebba3c9cde873d70d4bb61084f79ce30b7f9e047
|
[
"Apache-2.0"
] | null | null | null |
Conteudo das Aulas/009/Balada2-vR.py
|
cerberus707/lab-python
|
ebba3c9cde873d70d4bb61084f79ce30b7f9e047
|
[
"Apache-2.0"
] | null | null | null |
Conteudo das Aulas/009/Balada2-vR.py
|
cerberus707/lab-python
|
ebba3c9cde873d70d4bb61084f79ce30b7f9e047
|
[
"Apache-2.0"
] | null | null | null |
"""
Estruturas de decisao: IF e Else (se nao ou caso contrario)
':' no python, substitui o then
a condicao if so depende de resposta True ou Fale, exeplo case 2
"""
#Case 1
idade = int(input('Quantos Anos voce tem?'))
resp = idade >= 18
if resp == True:
print('Voce pode beber a vontade')
if resp == False:
print ('Voce so pode beber refrigerante')
print (resp)
#Case 2
idade = int(input('Quantos Anos voce tem?'))
resp = idade >= 18
if resp:
print('Voce pode beber a vontade')
if resp != True:
print ('Voce so pode beber refrigerante')
print (resp)
#Case 3
idade = int(input('Quantos Anos voce tem?'))
if idade >= 18:
print('Voce pode beber a vontade')
if idade < 18:
print ('Voce so pode beber refrigerante')
#Case 4
idade = int(input('Quantos Anos voce tem?'))
if idade >= 18:
print('Voce pode beber a vontade')
if idade >= 21:
print ('Voce é cliente VIP')
if idade < 18:
print ('Voce so pode beber refrigerante')
#Adocao do ELSE
#Case 1
idade = int(input('Quantos Anos voce tem?'))
if idade >= 18:
print('Voce pode beber a vontade')
if idade >= 21:
print ('Voce é cliente VIP')
else:
print ('Voce so pode beber refrigerante')
#Case 2
if 1 > 1:
Print('Sim')
else:
print('nao')
| 18.514286
| 64
| 0.628086
| 200
| 1,296
| 4.07
| 0.255
| 0.132678
| 0.079853
| 0.12285
| 0.793612
| 0.783784
| 0.783784
| 0.734644
| 0.65602
| 0.441032
| 0
| 0.027721
| 0.248457
| 1,296
| 70
| 65
| 18.514286
| 0.808008
| 0.162809
| 0
| 0.810811
| 0
| 0
| 0.402985
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.405405
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d407fa67f9b4da2e9a43ce405caf45b9fba0e258
| 203
|
py
|
Python
|
app/models/__init__.py
|
ZanMax/wrs
|
b62bcb50f305a83b7fe08f83f5e2d9f1c2cf1ec5
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
ZanMax/wrs
|
b62bcb50f305a83b7fe08f83f5e2d9f1c2cf1ec5
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
ZanMax/wrs
|
b62bcb50f305a83b7fe08f83f5e2d9f1c2cf1ec5
|
[
"MIT"
] | null | null | null |
from .users import Users
from .tokens import Tokens
from .groups import Groups
from .openedlogs import OpenedLogs
from .reports import Reports
from .worktime import WorkTime
from .version import Version
| 25.375
| 34
| 0.827586
| 28
| 203
| 6
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 203
| 7
| 35
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d40a7a607ea0772b28f5e4494ede8d2c08c8f234
| 317
|
py
|
Python
|
thonny/plugins/microbit/api_stubs/radio.py
|
shreyas202/thonny
|
ef894c359200b0591cf98451907243395b817c63
|
[
"MIT"
] | 2
|
2020-02-13T06:41:07.000Z
|
2022-02-14T09:28:02.000Z
|
Thonny/Lib/site-packages/thonny/plugins/microbit/api_stubs/radio.py
|
Pydiderot/pydiderotIDE
|
a42fcde3ea837ae40c957469f5d87427e8ce46d3
|
[
"MIT"
] | 30
|
2019-01-04T10:14:56.000Z
|
2020-10-12T14:00:31.000Z
|
Thonny/Lib/site-packages/thonny/plugins/microbit/api_stubs/radio.py
|
Pydiderot/pydiderotIDE
|
a42fcde3ea837ae40c957469f5d87427e8ce46d3
|
[
"MIT"
] | 3
|
2018-11-24T14:00:30.000Z
|
2019-07-02T02:32:26.000Z
|
RATE_1MBIT = 0
RATE_250KBIT = 2
RATE_2MBIT = 1
def config():
pass
def off():
pass
def on():
pass
def receive():
pass
def receive_bytes():
pass
def receive_bytes_into():
pass
def receive_full():
pass
def reset():
pass
def send():
pass
def send_bytes():
pass
| 7.204545
| 25
| 0.583596
| 44
| 317
| 4.022727
| 0.409091
| 0.355932
| 0.316384
| 0.214689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036697
| 0.312303
| 317
| 43
| 26
| 7.372093
| 0.775229
| 0
| 0
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.434783
| false
| 0.434783
| 0
| 0
| 0.434783
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d42f63a9dc78db8f405b571060d5d20d5fe2ac9e
| 123
|
py
|
Python
|
__init__.py
|
maiconandsilva/receitas
|
924aa3acbd5b24286fb2a0527c2a2e133f904937
|
[
"MIT"
] | null | null | null |
__init__.py
|
maiconandsilva/receitas
|
924aa3acbd5b24286fb2a0527c2a2e133f904937
|
[
"MIT"
] | null | null | null |
__init__.py
|
maiconandsilva/receitas
|
924aa3acbd5b24286fb2a0527c2a2e133f904937
|
[
"MIT"
] | null | null | null |
from sqlalchemy_utils.listeners import force_auto_coercion
# Chama antes da definicao das entidades
force_auto_coercion()
| 24.6
| 58
| 0.861789
| 17
| 123
| 5.941176
| 0.823529
| 0.178218
| 0.336634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105691
| 123
| 4
| 59
| 30.75
| 0.918182
| 0.308943
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d4331dde745665d41097acdf474f921c6a622045
| 73
|
py
|
Python
|
8_kyu/Reversed_Words.py
|
JoaoVitorLeite/CodeWars
|
156feda7273b37fdc90d007e1f638cf0dc73959f
|
[
"MIT"
] | null | null | null |
8_kyu/Reversed_Words.py
|
JoaoVitorLeite/CodeWars
|
156feda7273b37fdc90d007e1f638cf0dc73959f
|
[
"MIT"
] | null | null | null |
8_kyu/Reversed_Words.py
|
JoaoVitorLeite/CodeWars
|
156feda7273b37fdc90d007e1f638cf0dc73959f
|
[
"MIT"
] | null | null | null |
# 8 kyu
def reverse_words(s):
return " ".join(reversed(s.split()))
| 12.166667
| 40
| 0.616438
| 11
| 73
| 4
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0.191781
| 73
| 5
| 41
| 14.6
| 0.728814
| 0.068493
| 0
| 0
| 0
| 0
| 0.015385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
2e04be4c31c4273fd8a956bbb4b7b56890f8872b
| 233
|
py
|
Python
|
src/postprocess/base_processor.py
|
jwpttcg66/ExcelToTransfer
|
3afc0cf088f4c991bbf4dc2d6d1f395a71cbc3c7
|
[
"Apache-2.0"
] | 47
|
2017-06-23T07:47:50.000Z
|
2022-03-07T22:36:19.000Z
|
xl2code/postprocess/base_processor.py
|
twjitm/ExcelToCode
|
d160c75b9b7a305f4b3367d85ee0550572869d3e
|
[
"MIT"
] | 1
|
2019-03-12T06:12:50.000Z
|
2019-04-03T00:50:01.000Z
|
xl2code/postprocess/base_processor.py
|
twjitm/ExcelToCode
|
d160c75b9b7a305f4b3367d85ee0550572869d3e
|
[
"MIT"
] | 23
|
2017-05-12T07:46:07.000Z
|
2022-01-22T03:19:50.000Z
|
# -*- coding: utf-8 -*-
class BaseProcessor(object):
def __init__(self, exporter, generator_info):
super(BaseProcessor, self).__init__()
self.exporter = exporter
self.generator_info = generator_info
def run(self):
pass
| 17.923077
| 46
| 0.716738
| 28
| 233
| 5.571429
| 0.535714
| 0.25
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005076
| 0.154506
| 233
| 12
| 47
| 19.416667
| 0.786802
| 0.090129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
2e77d1a9d6aa8a2b551fbd613f310f1de5e10ebf
| 215
|
py
|
Python
|
stitch/__init__.py
|
pystitch/stitch
|
09a16da2f2af2be6a960e2338de488c8de2c2271
|
[
"MIT"
] | 468
|
2016-08-31T19:17:17.000Z
|
2022-03-07T13:51:53.000Z
|
stitch/__init__.py
|
hschuett/stitch
|
09a16da2f2af2be6a960e2338de488c8de2c2271
|
[
"MIT"
] | 40
|
2016-08-29T20:34:47.000Z
|
2020-09-21T03:25:49.000Z
|
stitch/__init__.py
|
TomAugspurger/stitch
|
09a16da2f2af2be6a960e2338de488c8de2c2271
|
[
"MIT"
] | 29
|
2016-08-31T19:44:19.000Z
|
2019-05-16T14:37:44.000Z
|
from .stitch import ( # noqa
convert, convert_file, kernel_factory, run_code, Stitch
)
from .cli import cli # noqa
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| 21.5
| 59
| 0.753488
| 29
| 215
| 5.206897
| 0.517241
| 0.218543
| 0.238411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 215
| 9
| 60
| 23.888889
| 0.838889
| 0.04186
| 0
| 0
| 0
| 0
| 0.034483
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cf126e7af080062504718d41e07a5ea043576365
| 21,441
|
py
|
Python
|
survos/lib/io.py
|
paskino/SuRVoS
|
e01e784442e2e9f724826cdb70f3a50c034c6455
|
[
"Apache-2.0"
] | 22
|
2016-09-30T08:04:42.000Z
|
2022-03-05T07:24:18.000Z
|
survos/lib/io.py
|
paskino/SuRVoS
|
e01e784442e2e9f724826cdb70f3a50c034c6455
|
[
"Apache-2.0"
] | 81
|
2016-11-21T15:32:14.000Z
|
2022-02-20T00:22:27.000Z
|
survos/lib/io.py
|
paskino/SuRVoS
|
e01e784442e2e9f724826cdb70f3a50c034c6455
|
[
"Apache-2.0"
] | 6
|
2018-11-22T10:19:59.000Z
|
2022-02-04T06:15:48.000Z
|
import os
import numpy as np
import six
import mrcfile as mrc
rhd = [
("nx", "i4"), # Number of columns
("ny", "i4"), # Number of rows
("nz", "i4"),
("mode", "i4"), # Types of pixels in the image. Values used by IMOD:
# 0 = unsigned or signed bytes depending on flag in imodFlags
# 1 = signed short integers (16 bits)
# 2 = float (32 bits)
# 3 = short * 2, (used for complex data)
# 4 = float * 2, (used for complex data)
# 6 = unsigned 16-bit integers (non-standard)
# 16 = unsigned char * 3 (for rgb data, non-standard)
("nxstart", "i4"), # Starting point of sub-image (not used in IMOD)
("nystart", "i4"),
("nzstart", "i4"),
("mx", "i4"), # Grid size in X, Y and Z
("my", "i4"),
("mz", "i4"),
("xlen", "f4"), # Cell size; pixel spacing = xlen/mx, ylen/my, zlen/mz
("ylen", "f4"),
("zlen", "f4"),
("alpha", "f4"), # Cell angles - ignored by IMOD
("beta", "f4"),
("gamma", "f4"),
# These need to be set to 1, 2, and 3 for pixel spacing to be interpreted correctly
("mapc", "i4"), # map column 1=x,2=y,3=z.
("mapr", "i4"), # map row 1=x,2=y,3=z.
("maps", "i4"), # map section 1=x,2=y,3=z.
# These need to be set for proper scaling of data
("amin", "f4"), # Minimum pixel value
("amax", "f4"), # Maximum pixel value
("amean", "f4"), # Mean pixel value
("ispg", "i4"), # space group number (ignored by IMOD)
("next", "i4"), # number of bytes in extended header (called nsymbt in MRC standard)
("creatid", "i2"), # used to be an ID number, is 0 as of IMOD 4.2.23
("extra_data", "V30"), # (not used, first two bytes should be 0)
# These two values specify the structure of data in the extended header; their meaning depend on whether the
# extended header has the Agard format, a series of 4-byte integers then real numbers, or has data
# produced by SerialEM, a series of short integers. SerialEM stores a float as two shorts, s1 and s2, by:
# value = (sign of s1)*(|s1|*256 + (|s2| modulo 256)) * 2**((sign of s2) * (|s2|/256))
("nint", "i2"), # Number of integers per section (Agard format) or number of bytes per section (SerialEM format)
("nreal", "i2"), # Number of reals per section (Agard format) or bit
# Number of reals per section (Agard format) or bit
# flags for which types of short data (SerialEM format):
# 1 = tilt angle * 100 (2 bytes)
# 2 = piece coordinates for montage (6 bytes)
# 4 = Stage position * 25 (4 bytes)
# 8 = Magnification / 100 (2 bytes)
# 16 = Intensity * 25000 (2 bytes)
# 32 = Exposure dose in e-/A2, a float in 4 bytes
# 128, 512: Reserved for 4-byte items
# 64, 256, 1024: Reserved for 2-byte items
# If the number of bytes implied by these flags does
# not add up to the value in nint, then nint and nreal
# are interpreted as ints and reals per section
("extra_data2", "V20"), # extra data (not used)
("imodStamp", "i4"), # 1146047817 indicates that file was created by IMOD
("imodFlags", "i4"), # Bit flags: 1 = bytes are stored as signed
# Explanation of type of data
("idtype", "i2"), # ( 0 = mono, 1 = tilt, 2 = tilts, 3 = lina, 4 = lins)
("lens", "i2"),
("nd1", "i2"), # for idtype = 1, nd1 = axis (1, 2, or 3)
("nd2", "i2"),
("vd1", "i2"), # vd1 = 100. * tilt increment
("vd2", "i2"), # vd2 = 100. * starting angle
# Current angles are used to rotate a model to match a new rotated image. The three values in each set are
# rotations about X, Y, and Z axes, applied in the order Z, Y, X.
("triangles", "f4", 6), # 0,1,2 = original: 3,4,5 = current
("xorg", "f4"), # Origin of image
("yorg", "f4"),
("zorg", "f4"),
("cmap", "S4"), # Contains "MAP "
#("stamp", "U4", 4), # First two bytes have 17 and 17 for big-endian or 68 and 65 for little-endian
("stamp", 'f4', 4),
("rms", "f4"), # RMS deviation of densities from mean density
("nlabl", "i4"), # Number of labels with useful data
]
class MRC(object):
def __init__(self, X, stats=None):
self.header = self.header_dict = self.data = None
self.yz_swapped = False
if isinstance(X, six.string_types):
self.read(X)
else:
# assuming X to be a numpy array
self.parse(X, stats=stats)
def __getitem__(self, item):
if self.header_dict is not None and \
item in self.header_dict:
return self.header_dict[item]
return None
def keys(self):
if self.header_dict is not None:
return self.header_dict.keys()
def values(self):
if self.header_dict is not None:
return self.header_dict.values()
def items(self):
if self.header_dict is not None:
return self.header_dict.items()
def parse(self, X, stats=None):
self.data = np.ascontiguousarray(X)
return self
def read(self, filename):
print("Reading MRC file {}".format(filename))
with mrc.open(filename, mode='r+') as mrc_file:
print("Read data of len: {}".format(len(mrc_file.data)))
self.header = mrc_file.header
self.data = mrc_file.data
self.header_dict = {} # FIX: empty dictionary
def save(self, filename):
print("Saving MRC file: {}".format(filename))
new_mrcfile = mrc.new(filename)
new_mrcfile.set_data(self.data)
new_mrcfile.update_header_from_data()
new_mrcfile.update_header_stats()
new_mrcfile.close()
# The previous MRC export class broke with python/numpy version changes
# current bug: the numpy structured array is giving
# "underlying view is not C-contiguous" array writing error
class MRC_old(object):
def __init__(self, X, stats=None):
self.header = self.header_dict = self.data = None
self.yz_swapped = False
if isinstance(X, six.string_types):
self.read(X)
else:
# assuming X to be a numpy array
self.parse(X, stats=stats)
def __getitem__(self, item):
if self.header_dict is not None and \
item in self.header_dict:
return self.header_dict[item]
return None
def keys(self):
if self.header_dict is not None:
return self.header_dict.keys()
def values(self):
if self.header_dict is not None:
return self.header_dict.values()
def items(self):
if self.header_dict is not None:
return self.header_dict.items()
def parse(self, X, stats=None):
if stats is not None:
amin, amax, amean = stats
else:
amin = X.min(); amax = X.max(); amean = X.mean();
dt = np.dtype(rhd)
imodFlags = 0
if X.dtype in [np.uint8, np.int8]:
mode = 0
imodFlags = (X.dtype == np.int8)
elif X.dtype == np.int16:
mode = 1
elif X.dtype == np.float32:
mode = 2
elif X.dtype == np.complex64:
mode = 4
elif X.dtype == np.uint16:
mode = 6
else:
mode = 16
values = (
X.shape[2], # Number of columns
X.shape[1], # Number of rows
X.shape[0], # Number of sections
mode, # Types of pixels in the image. Values used by IMOD:
# 0 = unsigned or signed bytes depending on flag in imodFlags
# 1 = signed short integers (16 bits)
# 2 = float (32 bits)
# 3 = short * 2, (used for complex data)
# 4 = float * 2, (used for complex data)
# 6 = unsigned 16-bit integers (non-standard)
# 16 = unsigned char * 3 (for rgb data, non-standard)
0, # Starting point of sub-image (not used in IMOD)
0,
0,
X.shape[2], # Grid size in X, Y and Z
X.shape[1],
X.shape[0],
X.shape[2], # Cell size; pixel spacing = xlen/mx, ylen/my, zlen/mz
X.shape[1],
X.shape[0],
90.0, # Cell angles - ignored by IMOD
90.0,
90.0,
# These need to be set to 1, 2, and 3 for pixel spacing to be interpreted correctly
1, # map column 1=x,2=y,3=z.
2, # map row 1=x,2=y,3=z.
3, # map section 1=x,2=y,3=z.
# These need to be set for proper scaling of data
amin, # Minimum pixel value
amax, # Maximum pixel value
amean, # Mean pixel value
1, # space group number (ignored by IMOD)
0, # number of bytes in extended header (called nsymbt in MRC standard)
0, # used to be an ID number, is 0 as of IMOD 4.2.23
"", # (not used, first two bytes should be 0)
# These two values specify the structure of data in the extended header; their meaning depend on whether the
# extended header has the Agard format, a series of 4-byte integers then real numbers, or has data
# produced by SerialEM, a series of short integers. SerialEM stores a float as two shorts, s1 and s2, by:
# value = (sign of s1)*(|s1|*256 + (|s2| modulo 256)) * 2**((sign of s2) * (|s2|/256))
0, # Number of integers per section (Agard format) or number of bytes per section (SerialEM format)
0, # Number of reals per section (Agard format) or bit
# Number of reals per section (Agard format) or bit
# flags for which types of short data (SerialEM format):
# 1 = tilt angle * 100 (2 bytes)
# 2 = piece coordinates for montage (6 bytes)
# 4 = Stage position * 25 (4 bytes)
# 8 = Magnification / 100 (2 bytes)
# 16 = Intensity * 25000 (2 bytes)
# 32 = Exposure dose in e-/A2, a float in 4 bytes
# 128, 512: Reserved for 4-byte items
# 64, 256, 1024: Reserved for 2-byte items
# If the number of bytes implied by these flags does
# not add up to the value in nint, then nint and nreal
# are interpreted as ints and reals per section
"", # extra data (not used)
0, # 1146047817 indicates that file was created by IMOD
imodFlags, # Bit flags: 1 = bytes are stored as signed
# Explanation of type of data
0, # ( 0 = mono, 1 = tilt, 2 = tilts, 3 = lina, 4 = lins)
0,
0, # for idtype = 1, nd1 = axis (1, 2, or 3)
0,
0, # vd1 = 100. * tilt increment
0, # vd2 = 100. * starting angle
# Current angles are used to rotate a model to match a new rotated image. The three values in each set are
# rotations about X, Y, and Z axes, applied in the order Z, Y, X.
[ 0., 0., 0., 90., 0., 0.], # 0,1,2 = original: 3,4,5 = current
0., # Origin of image
X.shape[1] / 2.,
0.,
'MAP ', # Contains "MAP "
[68, 65, 0, 0], # First two bytes have 17 and 17 for big-endian or 68 and 65 for little-endian
0.0, # RMS deviation of densities from mean density
6, # Number of labels with useful data
[ 'tif2mrc: Converted to mrc format. 21-Oct-15 13:02:25 ',
'CCDERASER: Bad points replaced with interpolated values 21-Oct-15 13:03:42 ',
'NEWSTACK: Images copied, transformed 21-Oct-15 13:21:42 ',
'TILT: Tomographic reconstruction 30-Nov-15 12:29:39 ',
'NEWSTACK: Images copied , densities scaled 30-Nov-15 13:59:05 ',
'clip: flipyz 30-Nov-15 13:59:10 ',
'', '', '', ''] # 10 labels of 80 charactors
)
values = (
X.shape[2], # Number of columns
X.shape[1], # Number of rows
X.shape[0],
mode, # Types of pixels in the image. Values used by IMOD:
# 0 = unsigned or signed bytes depending on flag in imodFlags
# 1 = signed short integers (16 bits)
# 2 = float (32 bits)
# 3 = short * 2, (used for complex data)
# 4 = float * 2, (used for complex data)
# 6 = unsigned 16-bit integers (non-standard)
# 16 = unsigned char * 3 (for rgb data, non-standard)
0, # Starting point of sub-image (not used in IMOD)
0,
0,
X.shape[2], # Grid size in X, Y and Z
X.shape[1],
X.shape[0],
X.shape[2], # Cell size; pixel spacing = xlen/mx, ylen/my, zlen/mz
X.shape[1],
X.shape[0],
90.0, # Cell angles - ignored by IMOD
90.0,
90.0,
# These need to be set to 1, 2, and 3 for pixel spacing to be interpreted correctly
1, # map column 1=x,2=y,3=z.
2, # map row 1=x,2=y,3=z.
3, # map section 1=x,2=y,3=z.
# These need to be set for proper scaling of data
amin, # Minimum pixel value
amax, # Maximum pixel value
amean, # Mean pixel value
1, # space group number (ignored by IMOD)
0, # number of bytes in extended header (called nsymbt in MRC standard)
0, # used to be an ID number, is 0 as of IMOD 4.2.23
bytes([0x00] * 30), # (not used, first two bytes should be 0)
# These two values specify the structure of data in the extended header; their meaning depend on whether the
# extended header has the Agard format, a series of 4-byte integers then real numbers, or has data
# produced by SerialEM, a series of short integers. SerialEM stores a float as two shorts, s1 and s2, by:
# value = (sign of s1)*(|s1|*256 + (|s2| modulo 256)) * 2**((sign of s2) * (|s2|/256))
0, # Number of integers per section (Agard format) or number of bytes per section (SerialEM format)
0, # Number of reals per section (Agard format) or bit
# Number of reals per section (Agard format) or bit
# flags for which types of short data (SerialEM format):
# 1 = tilt angle * 100 (2 bytes)
# 2 = piece coordinates for montage (6 bytes)
# 4 = Stage position * 25 (4 bytes)
# 8 = Magnification / 100 (2 bytes)
# 16 = Intensity * 25000 (2 bytes)
# 32 = Exposure dose in e-/A2, a float in 4 bytes
# 128, 512: Reserved for 4-byte items
# 64, 256, 1024: Reserved for 2-byte items
# If the number of bytes implied by these flags does
# not add up to the value in nint, then nint and nreal
# are interpreted as ints and reals per section
bytes([0x00] * 20),
#"", # extra data (not used)
0, # 1146047817 indicates that file was created by IMOD
imodFlags, # Bit flags: 1 = bytes are stored as signed
# Explanation of type of data
0, # ( 0 = mono, 1 = tilt, 2 = tilts, 3 = lina, 4 = lins)
0,
0, # for idtype = 1, nd1 = axis (1, 2, or 3)
0,
0, # vd1 = 100. * tilt increment
0, # vd2 = 100. * starting angle
# Current angles are used to rotate a model to match a new rotated image. The three values in each set are
# rotations about X, Y, and Z axes, applied in the order Z, Y, X.
( 0., 0., 0., 90., 0., 0.), # 0,1,2 = original: 3,4,5 = current
0., # Origin of image
X.shape[1] / 2.,
0.,
'MAP ', # Contains "MAP "
(68.0, 65.0, 0.0, 0.0), # First two bytes have 17 and 17 for big-endian or 68 and 65 for little-endian
0.0, # RMS deviation of densities from mean density
6, # Number of labels with useful data
)
header = np.array(values, dtype=dt)
dt= np.dtype(rhd)
header_dict = {}
for name in dt.names:
header_dict[name] = header[name]
self.header = header
self.data = np.ascontiguousarray(X)
self.header_dict = header_dict
return self
def read(self, filename):
rec_header_dtype = np.dtype(rhd)
assert rec_header_dtype.itemsize == 1024
fd = open(filename, 'rb')
stats = os.stat(filename)
header = np.fromfile(fd, dtype=rhd, count=1)
# Seek header
#fd.seek(header.itemsize)
if header['next'] > 0:
fd.seek(header['next']) # ignore extended header
mode = int(header['mode'])
bo = "<" if header['stamp'][0, 0] == 68 and header['stamp'][0, 1] == 65 else "<" # BitOrder: little or big endian
sign = "i1" if header['imodFlags'] == 1 else "u1" # signed or unsigned
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
dtype = [sign, "i2", "f", "c4", "c8", None, "u2", None, None, None, None, None, None, None, None, None, "u1"][mode]
dsize = [ 1, 2, 4, 4, 8, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3][mode]
# data dimensions
nx, ny, nz = int(header['nx']), int(header['ny']), int(header['nz'])
img_size = nx * ny * nz * dsize
img_str = fd.read(img_size)
dtype = bo + dtype
# Make sure that we have readed the whole file
assert not fd.read(), "Error loading the file"
#assert stats.st_size == header.itemsize + img_size
fd.close()
if mode == 16:
data = np.ndarray(shape=(nx, ny, nz, 3), dtype=dtype, buffer=img_str, order='F')
else:
data = np.ndarray(shape=(nx, ny, nz), dtype=dtype, buffer=img_str, order='F')
data = np.swapaxes(data, 0, 2)
header_dict = {}
for name in header.dtype.names:
header_dict[name] = header[name][0] if len(header[name]) == 1 else header[name]
self.header = header
self.data = np.ascontiguousarray(data)
self.header_dict = header_dict
return self
def save(self, filename):
fd = open(filename, 'wb')
fd.write(self.header.data)
data = self.data
data = np.swapaxes(data, 0, 2)
if self.yz_swapped:
data = np.swapaxes(data, 1, 2)
data = np.asfortranarray(data)
fd.write(data.data)
fd.close()
| 46.109677
| 127
| 0.47773
| 2,685
| 21,441
| 3.785475
| 0.146369
| 0.006297
| 0.03168
| 0.003542
| 0.774203
| 0.75364
| 0.734947
| 0.719402
| 0.702971
| 0.68172
| 0
| 0.061811
| 0.424281
| 21,441
| 464
| 128
| 46.209052
| 0.761585
| 0.418777
| 0
| 0.54417
| 0
| 0
| 0.076032
| 0
| 0
| 0
| 0.000653
| 0
| 0.007067
| 1
| 0.056537
| false
| 0
| 0.014134
| 0
| 0.123675
| 0.010601
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cf468a8f614c8a4f2f5bc7bca1cfa9750a4aa03f
| 58
|
py
|
Python
|
quiz_bot/cli/group.py
|
livestreamx/quiz-bot
|
e08e9161d908ce9cb851cd6c689f04703db1928f
|
[
"MIT"
] | 1
|
2022-03-05T13:42:08.000Z
|
2022-03-05T13:42:08.000Z
|
quiz_bot/cli/group.py
|
livestreamx/quiz-bot
|
e08e9161d908ce9cb851cd6c689f04703db1928f
|
[
"MIT"
] | null | null | null |
quiz_bot/cli/group.py
|
livestreamx/quiz-bot
|
e08e9161d908ce9cb851cd6c689f04703db1928f
|
[
"MIT"
] | 2
|
2021-06-20T10:40:25.000Z
|
2022-02-15T04:26:58.000Z
|
import click
@click.group()
def app() -> None:
pass
| 8.285714
| 18
| 0.603448
| 8
| 58
| 4.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 58
| 6
| 19
| 9.666667
| 0.795455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
cf70795be65d6665d53daaacbef418d5b5a98561
| 53
|
py
|
Python
|
ExactHistogramSpecification/__init__.py
|
jkschluesener/ExactHistogramSpecification
|
ee95ee7e8a672e510aac0e67b6780722503e4b40
|
[
"Apache-2.0"
] | null | null | null |
ExactHistogramSpecification/__init__.py
|
jkschluesener/ExactHistogramSpecification
|
ee95ee7e8a672e510aac0e67b6780722503e4b40
|
[
"Apache-2.0"
] | null | null | null |
ExactHistogramSpecification/__init__.py
|
jkschluesener/ExactHistogramSpecification
|
ee95ee7e8a672e510aac0e67b6780722503e4b40
|
[
"Apache-2.0"
] | null | null | null |
from .histogram_matching import ExactHistogramMatcher
| 53
| 53
| 0.924528
| 5
| 53
| 9.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 53
| 1
| 53
| 53
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cf7a55688cee308add364424d082e86290dfb780
| 6,551
|
py
|
Python
|
recipes/site_listers/__init__.py
|
cfeenstra67/recipes
|
a2d296500b4ff70e11ff3177a1092a033498f82a
|
[
"MIT"
] | null | null | null |
recipes/site_listers/__init__.py
|
cfeenstra67/recipes
|
a2d296500b4ff70e11ff3177a1092a033498f82a
|
[
"MIT"
] | null | null | null |
recipes/site_listers/__init__.py
|
cfeenstra67/recipes
|
a2d296500b4ff70e11ff3177a1092a033498f82a
|
[
"MIT"
] | null | null | null |
from recipes.site_listers.acouplecooks import ACoupleCooksLister
from recipes.site_listers.allrecipes import AllRecipesLister
from recipes.site_listers.ambitiouskitchen import AmbitiousKitchenLister
from recipes.site_listers.archanaskitchen import ArchanasKitchenLister
from recipes.site_listers.averiecooks import AverieCooksLister
from recipes.site_listers.bakingmischief import BakingMischiefLister
from recipes.site_listers.bakingsense import BakingSenseLister
from recipes.site_listers.bbc import BBCLister
from recipes.site_listers.bbcgoodfood import BBCGoodFoodLister
from recipes.site_listers.bettycrocker import BettyCrockerLister
from recipes.site_listers.bonappetit import BonAppetitLister
from recipes.site_listers.bowlofdelicious import BowlOfDeliciousLister
from recipes.site_listers.budgetbytes import BudgetBytesLister
from recipes.site_listers.castironketo import CastIronKetoLister
from recipes.site_listers.closetcooking import ClosetCookingLister
from recipes.site_listers.cookeatshare import CookEatShareLister
from recipes.site_listers.cookieandkate import CookieAndKateLister
from recipes.site_listers.cookstr import CookStrLister
from recipes.site_listers.eatingbirdfood import EatingBirdFoodLister
from recipes.site_listers.eatsmarter import EatSmarterLister
from recipes.site_listers.eatwhattonight import EatWhatTonightLister
from recipes.site_listers.epicurious import EpicuriousLister
from recipes.site_listers.food import FoodLister
from recipes.site_listers.foodnetwork import FoodNetworkLister
from recipes.site_listers.foodrepublic import FoodRepublicLister
from recipes.site_listers.forksoverknives import ForksOverKnivesLister
from recipes.site_listers.gimmesomeoven import GimmeSomeOvenLister
from recipes.site_listers.gonnawantseconds import GonnaWantSecondsLister
from recipes.site_listers.greatbritishchefs import GreatBritishChefsLister
from recipes.site_listers.halfbakedharvest import HalfBakedHarvestLister
from recipes.site_listers.headbangerskitchen import HeadBangersKitchenLister
from recipes.site_listers.hellofresh import HelloFreshLister
from recipes.site_listers.hostthetoast import HostTheToastLister
from recipes.site_listers.indianhealthyrecipes import IndianHealthyRecipesLister
from recipes.site_listers.innit import InnitLister
from recipes.site_listers.jamieoliver import JamieOliverLister
from recipes.site_listers.jimcooksgoodfood import JimCooksGoodFoodLister
from recipes.site_listers.joyfoodsunshine import JoyFoodSunshineLister
from recipes.site_listers.justataste import JustATasteLister
from recipes.site_listers.justbento import JustBentoLister
from recipes.site_listers.kennymcgovern import KennyMcgovernLister
from recipes.site_listers.kingarthurbaking import KingArthurBakingLister
from recipes.site_listers.lecremedelacrumb import LeCremeDeLaCrumbLister
from recipes.site_listers.littlespicejar import LittleSpiceJarLister
from recipes.site_listers.livelytable import LivelyTableLister
from recipes.site_listers.lovingitvegan import LovingItVeganLister
from recipes.site_listers.marthastewart import MarthaStewartLister
from recipes.site_listers.melskitchencafe import MelsKitchenCafeLister
from recipes.site_listers.minimalistbaker import MinimalistBakerLister
from recipes.site_listers.momswithcrockpots import MomsWithCrockpotsLister
from recipes.site_listers.mybakingaddiction import MyBakingAddictionLister
from recipes.site_listers.myrecipes import MyRecipesLister
from recipes.site_listers.nourishedbynutrition import NourishedByNutritionLister
from recipes.site_listers.nutritionbynathalie import NutritionByNathalieLister
from recipes.site_listers._101cookbooks import _101CookbooksLister
from recipes.site_listers.paleorunningmomma import PaleoRunningMommaLister
from recipes.site_listers.paninihappy import PaniniHappyLister
from recipes.site_listers.practicalselfreliance import PracticalSelfRelianceLister
from recipes.site_listers.primaledgehealth import PrimalEdgeHealthLister
from recipes.site_listers.purelypope import PurelyPopeLister
from recipes.site_listers.purplecarrot import PurpleCarrotLister
from recipes.site_listers.rachlmansfield import RachlmansFieldLister
from recipes.site_listers.rainbowplantlife import RainbowPlantLifeLister
from recipes.site_listers.realsimple import RealSimpleLister
from recipes.site_listers.recipetineats import RecipeTinEatsLister
from recipes.site_listers.redhousespice import RedHouseSpiceLister
from recipes.site_listers.sallysbakingaddiction import SallysBakingAddictionLister
from recipes.site_listers.saveur import SaveurLister
from recipes.site_listers.seriouseats import SeriousEatsLister
from recipes.site_listers.simplyquinoa import SimplyQuinoaLister
from recipes.site_listers.simplyrecipes import SimplyRecipesLister
from recipes.site_listers.simplywhisked import SimplyWhiskedLister
from recipes.site_listers.skinnytaste import SkinnyTasteLister
from recipes.site_listers.southernliving import SouthernLivingLister
from recipes.site_listers.spendwithpennies import SpendWithPenniesLister
from recipes.site_listers.steamykitchen import SteamyKitchenLister
from recipes.site_listers.sunbasket import SunBasketLister
from recipes.site_listers.sweetcsdesigns import SweetCsDesignsLister
from recipes.site_listers.sweetpeasandsaffron import SweetPeasAndSaffronLister
from recipes.site_listers.tasteofhome import TasteOfHomeLister
from recipes.site_listers.tastesoflizzyt import TastesOfLizzyTLister
from recipes.site_listers.tastykitchen import TastyKitchenLister
from recipes.site_listers.theclevercarrot import TheCleverCarrotLister
from recipes.site_listers.thehappyfoodie import TheHappyFoodieLister
from recipes.site_listers.thekitchenmagpie import TheKitchenMagpieLister
from recipes.site_listers.thekitchn import TheKitchnLister
from recipes.site_listers.thenutritiouskitchen import TheNutritiousKitchenLister
from recipes.site_listers.thespruceeats import TheSpruceEatsLister
from recipes.site_listers.thevintagemixer import TheVintageMixerLister
from recipes.site_listers.thewoksoflife import TheWoksOfLifeLister
from recipes.site_listers.twopeasandtheirpod import TwoPeasAndTheirPodLister
from recipes.site_listers.vanillaandbean import VanillaAndBeanLister
from recipes.site_listers.vegrecipesofindia import VegRecipesOfIndiaLister
from recipes.site_listers.watchwhatueat import WatchWhatUEatLister
from recipes.site_listers.whatsgabycooking import WhatsGabyCookingLister
from recipes.site_listers.wholefoodsmarket import WholeFoodsMarketLister
from recipes.site_listers.wikibooks import WikibooksLister
| 66.846939
| 82
| 0.911159
| 679
| 6,551
| 8.645066
| 0.293078
| 0.181772
| 0.247871
| 0.363543
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000974
| 0.059228
| 6,551
| 97
| 83
| 67.536082
| 0.951485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cf7c17f66c443150478272cca7ffcef07ee11f88
| 328
|
py
|
Python
|
lab3_SAT/sat/satisfiability.py
|
j-adamczyk/ADPTO_templates
|
e0a4e77ba8de21fe966388ccee66ef62224a2d99
|
[
"MIT"
] | null | null | null |
lab3_SAT/sat/satisfiability.py
|
j-adamczyk/ADPTO_templates
|
e0a4e77ba8de21fe966388ccee66ef62224a2d99
|
[
"MIT"
] | null | null | null |
lab3_SAT/sat/satisfiability.py
|
j-adamczyk/ADPTO_templates
|
e0a4e77ba8de21fe966388ccee66ef62224a2d99
|
[
"MIT"
] | 1
|
2022-03-25T07:25:26.000Z
|
2022-03-25T07:25:26.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import pycosat
def calculate_SAT_probabilities_and_plot() -> None:
"""
Calculate probability of random formula being satisfiable based on it's
size n (number of variables). The result is plotted and shown.
Conclusion: TODO write conclusion
"""
pass
| 23.428571
| 76
| 0.731707
| 45
| 328
| 5.244444
| 0.844444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213415
| 328
| 13
| 77
| 25.230769
| 0.914729
| 0.518293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 1
| 0.2
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
d8554fc184769407a0a282c94adfb4300bba0cfe
| 27
|
py
|
Python
|
bindings/python/py_src/yotsuba/version.py
|
yutayamazaki/yotsuba
|
5fac8da1a9e0c9e2929a1041a51e15f104cfe1fc
|
[
"MIT"
] | null | null | null |
bindings/python/py_src/yotsuba/version.py
|
yutayamazaki/yotsuba
|
5fac8da1a9e0c9e2929a1041a51e15f104cfe1fc
|
[
"MIT"
] | 3
|
2020-08-28T18:19:07.000Z
|
2020-09-02T15:16:15.000Z
|
bindings/python/py_src/yotsuba/version.py
|
yutayamazaki/yotsuba
|
5fac8da1a9e0c9e2929a1041a51e15f104cfe1fc
|
[
"MIT"
] | null | null | null |
__version__: str = '0.1.3'
| 13.5
| 26
| 0.62963
| 5
| 27
| 2.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 0.148148
| 27
| 1
| 27
| 27
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d85ab9c51aed780355de019fe468a59f3e852eab
| 113
|
py
|
Python
|
lycheepy/configuration/configuration/validators/__init__.py
|
gabrielbazan/lycheepy
|
f314d3f591f4a449b37ead9baf26b9f5d58d9f0d
|
[
"MIT"
] | 17
|
2018-08-14T02:42:43.000Z
|
2022-02-25T00:38:47.000Z
|
lycheepy/configuration/configuration/validators/__init__.py
|
gabrielbazan/lycheepy
|
f314d3f591f4a449b37ead9baf26b9f5d58d9f0d
|
[
"MIT"
] | 1
|
2018-11-01T02:55:01.000Z
|
2018-11-01T02:55:01.000Z
|
lycheepy/configuration/configuration/validators/__init__.py
|
gabrielbazan/lycheepy
|
f314d3f591f4a449b37ead9baf26b9f5d58d9f0d
|
[
"MIT"
] | 4
|
2018-10-30T16:01:49.000Z
|
2021-06-08T20:21:07.000Z
|
from process import ProcessValidator
from chain import ChainValidator
from repository import RepositoryValidator
| 28.25
| 42
| 0.893805
| 12
| 113
| 8.416667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106195
| 113
| 3
| 43
| 37.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d861d0a05a0179a645980e9a0c937a7ec5cb1b3f
| 48
|
py
|
Python
|
data_layer/postgresql/__init__.py
|
xyla-io/data_layer
|
8d10aab6cae7f63eacf4e139e09576dd14a87354
|
[
"MIT"
] | null | null | null |
data_layer/postgresql/__init__.py
|
xyla-io/data_layer
|
8d10aab6cae7f63eacf4e139e09576dd14a87354
|
[
"MIT"
] | null | null | null |
data_layer/postgresql/__init__.py
|
xyla-io/data_layer
|
8d10aab6cae7f63eacf4e139e09576dd14a87354
|
[
"MIT"
] | null | null | null |
from .postgresql import PostgreSQLLayer as Layer
| 48
| 48
| 0.875
| 6
| 48
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 1
| 48
| 48
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d8ad805b2c80809157544a4be44a6d6431a27cb3
| 252
|
py
|
Python
|
tests/test_torchreinforce.py
|
Lucien-MG/torchreinforce
|
5ba852bb255c14140d7bc300a44e60e7b4b572ff
|
[
"MIT"
] | null | null | null |
tests/test_torchreinforce.py
|
Lucien-MG/torchreinforce
|
5ba852bb255c14140d7bc300a44e60e7b4b572ff
|
[
"MIT"
] | null | null | null |
tests/test_torchreinforce.py
|
Lucien-MG/torchreinforce
|
5ba852bb255c14140d7bc300a44e60e7b4b572ff
|
[
"MIT"
] | null | null | null |
import torchreinforce
from torchreinforce import __version__
def test_version():
assert __version__ == '0.1.0'
def test_import_agents():
assert 'agents' in dir(torchreinforce)
def test_import_io():
assert 'agents' in dir(torchreinforce)
| 21
| 42
| 0.753968
| 32
| 252
| 5.53125
| 0.40625
| 0.118644
| 0.146893
| 0.19209
| 0.350282
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.154762
| 252
| 11
| 43
| 22.909091
| 0.816901
| 0
| 0
| 0.25
| 0
| 0
| 0.06746
| 0
| 0
| 0
| 0
| 0
| 0.375
| 1
| 0.375
| true
| 0
| 0.5
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d8b42e4688f2756d39ce1179491402338af0c08c
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/future/backports/email/quoprimime.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/future/backports/email/quoprimime.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/future/backports/email/quoprimime.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/c3/dd/d6/e5781d169c86683a83049ae75c5feffe7a47e6bdb45ae0319ab033c908
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.395833
| 0
| 96
| 1
| 96
| 96
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d8ed4b4621107ed0e18965ebc96c9179f69878ce
| 44
|
py
|
Python
|
tests/addition.py
|
manniepmkam/PvZ
|
e76aff52791b47b8e6c81c8efe409920df64e000
|
[
"MIT"
] | 1
|
2021-05-20T02:31:33.000Z
|
2021-05-20T02:31:33.000Z
|
tests/addition.py
|
manniepmkam/PvZ
|
e76aff52791b47b8e6c81c8efe409920df64e000
|
[
"MIT"
] | null | null | null |
tests/addition.py
|
manniepmkam/PvZ
|
e76aff52791b47b8e6c81c8efe409920df64e000
|
[
"MIT"
] | 1
|
2019-11-03T15:14:09.000Z
|
2019-11-03T15:14:09.000Z
|
def addition(X,Y):
Z = X+Y
return Z
| 11
| 18
| 0.522727
| 9
| 44
| 2.555556
| 0.666667
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.340909
| 44
| 3
| 19
| 14.666667
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
2b3bfecd47a1cd22102736c69daa62a8f28fe45d
| 147
|
py
|
Python
|
scheduleapp/admin.py
|
MiSokol/ScheduleApp
|
01b7eec406c64de36a0fa70eee1d2c400c792ec3
|
[
"MIT"
] | null | null | null |
scheduleapp/admin.py
|
MiSokol/ScheduleApp
|
01b7eec406c64de36a0fa70eee1d2c400c792ec3
|
[
"MIT"
] | 1
|
2018-05-03T17:10:16.000Z
|
2018-05-03T17:10:16.000Z
|
scheduleapp/admin.py
|
MiSokol/ScheduleApp
|
01b7eec406c64de36a0fa70eee1d2c400c792ec3
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import User, Task
admin.site.register(User)
admin.site.register(Task)
| 18.375
| 32
| 0.789116
| 22
| 147
| 5.272727
| 0.545455
| 0.155172
| 0.293103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 147
| 7
| 33
| 21
| 0.899225
| 0.176871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2b5bbf3d58384a6e5dd4537c33623dc93592053b
| 22
|
py
|
Python
|
python/testData/completion/moduleFromNamespacePackage/a.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/moduleFromNamespacePackage/a.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/moduleFromNamespacePackage/a.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from pkg import m7
m7
| 7.333333
| 18
| 0.772727
| 5
| 22
| 3.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.227273
| 22
| 3
| 19
| 7.333333
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2b62dbc2f534750ff6d637433002dd293c01ac8d
| 150
|
py
|
Python
|
components/layer/__init__.py
|
huzexi/SADenseNet
|
95ab1b6b29f6dff973f3770ecd8b23cca37c9f0b
|
[
"BSD-3-Clause"
] | 8
|
2021-12-10T12:50:06.000Z
|
2022-03-11T21:50:02.000Z
|
components/layer/__init__.py
|
huzexi/SADenseNet
|
95ab1b6b29f6dff973f3770ecd8b23cca37c9f0b
|
[
"BSD-3-Clause"
] | null | null | null |
components/layer/__init__.py
|
huzexi/SADenseNet
|
95ab1b6b29f6dff973f3770ecd8b23cca37c9f0b
|
[
"BSD-3-Clause"
] | null | null | null |
from .Reorder import Reorder
from .AngularConv import AngularConv
from .SpatialConv import SpatialConv
from .CorrelationBlock import CorrelationBlock
| 30
| 46
| 0.866667
| 16
| 150
| 8.125
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106667
| 150
| 4
| 47
| 37.5
| 0.970149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
99498dc92245bb6f0a919d8ce41b9eeac94beb92
| 138
|
py
|
Python
|
venv/Lib/site-packages/nipype/interfaces/cat12/__init__.py
|
richung99/digitizePlots
|
6b408c820660a415a289726e3223e8f558d3e18b
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/nipype/interfaces/cat12/__init__.py
|
richung99/digitizePlots
|
6b408c820660a415a289726e3223e8f558d3e18b
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/nipype/interfaces/cat12/__init__.py
|
richung99/digitizePlots
|
6b408c820660a415a289726e3223e8f558d3e18b
|
[
"MIT"
] | null | null | null |
from .preprocess import CAT12Segment
from .surface import (
ExtractAdditionalSurfaceParameters,
ExtractROIBasedSurfaceMeasures,
)
| 23
| 39
| 0.818841
| 9
| 138
| 12.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016807
| 0.137681
| 138
| 5
| 40
| 27.6
| 0.932773
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
99680458f1ed4f9db08434e7d0ecfbc5bbab3b4a
| 209
|
py
|
Python
|
django_actionlog/handler/null.py
|
randlet/django-actionlog
|
1dee6c6d18c18312da4b34c84efe76ba01f42f69
|
[
"BSD-3-Clause"
] | 3
|
2016-12-03T05:35:26.000Z
|
2017-04-30T05:28:28.000Z
|
django_actionlog/handler/null.py
|
randlet/django-actionlog
|
1dee6c6d18c18312da4b34c84efe76ba01f42f69
|
[
"BSD-3-Clause"
] | 6
|
2016-12-29T01:00:29.000Z
|
2018-01-25T10:01:39.000Z
|
django_actionlog/handler/null.py
|
randlet/django-actionlog
|
1dee6c6d18c18312da4b34c84efe76ba01f42f69
|
[
"BSD-3-Clause"
] | 3
|
2016-12-28T14:23:15.000Z
|
2019-05-16T20:57:30.000Z
|
# -*- coding: utf-8 -*-
from . import handler_manager
class Nullout(object):
def __init__(self, config):
pass
def emit(self, data):
pass
handler_manager.register('null', Nullout)
| 13.933333
| 41
| 0.626794
| 25
| 209
| 5
| 0.76
| 0.224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.244019
| 209
| 14
| 42
| 14.928571
| 0.78481
| 0.100478
| 0
| 0.285714
| 0
| 0
| 0.021505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.285714
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
9973c0b5e197d5068f67f29078cdcee2138dee95
| 265,227
|
py
|
Python
|
prisim/delay_spectrum.py
|
LBJ-Wade/PRISim
|
04f790523dadca0a7215e3cb18e7d0f39881a96c
|
[
"MIT"
] | 11
|
2016-02-08T21:54:45.000Z
|
2021-07-15T04:36:53.000Z
|
prisim/delay_spectrum.py
|
LBJ-Wade/PRISim
|
04f790523dadca0a7215e3cb18e7d0f39881a96c
|
[
"MIT"
] | 38
|
2016-11-11T00:42:06.000Z
|
2020-06-13T03:02:23.000Z
|
prisim/delay_spectrum.py
|
LBJ-Wade/PRISim
|
04f790523dadca0a7215e3cb18e7d0f39881a96c
|
[
"MIT"
] | 7
|
2016-10-31T19:45:29.000Z
|
2020-05-29T00:01:25.000Z
|
from __future__ import division
import numpy as NP
import multiprocessing as MP
import itertools as IT
import progressbar as PGB
# import aipy as AP
import astropy
from astropy.io import fits
import astropy.cosmology as CP
import scipy.constants as FCNST
import healpy as HP
from distutils.version import LooseVersion
import yaml, h5py
from astroutils import writer_module as WM
from astroutils import constants as CNST
from astroutils import DSP_modules as DSP
from astroutils import mathops as OPS
from astroutils import geometry as GEOM
from astroutils import lookup_operations as LKP
import prisim
from prisim import primary_beams as PB
from prisim import interferometry as RI
from prisim import baseline_delay_horizon as DLY
try:
from pyuvdata import UVBeam
except ImportError:
uvbeam_module_found = False
else:
uvbeam_module_found = True
prisim_path = prisim.__path__[0]+'/'
# cosmo100 = CP.FlatLambdaCDM(H0=100.0, Om0=0.27) # Using H0 = 100 km/s/Mpc
cosmoPlanck15 = CP.Planck15 # Planck 2015 cosmology
cosmo100 = cosmoPlanck15.clone(name='Modified Planck 2015 cosmology with h=1.0', H0=100.0) # Modified Planck 2015 cosmology with h=1.0, H= 100 km/s/Mpc
#################################################################################
def _astropy_columns(cols, tabtype='BinTableHDU'):
"""
----------------------------------------------------------------------------
!!! FOR INTERNAL USE ONLY !!!
This internal routine checks for Astropy version and produces the FITS
columns based on the version
Inputs:
cols [list of Astropy FITS columns] These are a list of Astropy FITS
columns
tabtype [string] specifies table type - 'BinTableHDU' (default) for binary
tables and 'TableHDU' for ASCII tables
Outputs:
columns [Astropy FITS column data]
----------------------------------------------------------------------------
"""
try:
cols
except NameError:
raise NameError('Input cols not specified')
if tabtype not in ['BinTableHDU', 'TableHDU']:
raise ValueError('tabtype specified is invalid.')
use_ascii = False
if tabtype == 'TableHDU':
use_ascii = True
if astropy.__version__ == '0.4':
columns = fits.ColDefs(cols, tbtype=tabtype)
elif LooseVersion(astropy.__version__)>=LooseVersion('0.4.2'):
columns = fits.ColDefs(cols, ascii=use_ascii)
return columns
################################################################################
# def _gentle_clean(dd, _w, tol=1e-1, area=None, stop_if_div=True, maxiter=100,
# verbose=False, autoscale=True):
# if verbose:
# print("Performing gentle clean...")
# scale_factor = 1.0
# if autoscale:
# scale_factor = NP.nanmax(NP.abs(_w))
# dd /= scale_factor
# _w /= scale_factor
# cc, info = AP.deconv.clean(dd, _w, tol=tol, area=area, stop_if_div=False,
# maxiter=maxiter, verbose=verbose)
# #dd = info['res']
# cc = NP.zeros_like(dd)
# inside_res = NP.std(dd[area!=0])
# outside_res = NP.std(dd[area==0])
# initial_res = inside_res
# #print(inside_res,'->',)
# ncycle=0
# if verbose:
# print("inside_res outside_res")
# print(inside_res, outside_res)
# inside_res = 2*outside_res #just artifically bump up the inside res so the loop runs at least once
# while(inside_res>outside_res and maxiter>0):
# if verbose: print('.',)
# _d_cl, info = AP.deconv.clean(dd, _w, tol=tol, area=area, stop_if_div=stop_if_div, maxiter=maxiter, verbose=verbose, pos_def=True)
# res = info['res']
# inside_res = NP.std(res[area!=0])
# outside_res = NP.std(res[area==0])
# dd = info['res']
# cc += _d_cl
# ncycle += 1
# if verbose: print(inside_res*scale_factor, outside_res*scale_factor)
# if ncycle>1000: break
# info['ncycle'] = ncycle-1
# dd *= scale_factor
# _w *= scale_factor
# cc *= scale_factor
# info['initial_residual'] = initial_res * scale_factor
# info['final_residual'] = inside_res * scale_factor
# return cc, info
#################################################################################
def complex1dClean_arg_splitter(args, **kwargs):
return complex1dClean(*args, **kwargs)
def complex1dClean(inp, kernel, cbox=None, gain=0.1, maxiter=10000,
threshold=5e-3, threshold_type='relative', verbose=False,
progressbar=False, pid=None, progressbar_yloc=0):
"""
----------------------------------------------------------------------------
Hogbom CLEAN algorithm applicable to 1D complex array
Inputs:
inp [numpy vector] input 1D array to be cleaned. Can be complex.
kernel [numpy vector] 1D array that acts as the deconvolving kernel. Can
be complex. Must be of same size as inp
cbox [boolean array] 1D boolean array that acts as a mask for pixels
which should be cleaned. Same size as inp. Only pixels with values
True are to be searched for maxima in residuals for cleaning and
the rest are not searched for. Default=None (means all pixels are
to be searched for maxima while cleaning)
gain [scalar] gain factor to be applied while subtracting clean
component from residuals. This is the fraction of the maximum in
the residuals that will be subtracted. Must lie between 0 and 1.
A lower value will have a smoother convergence but take a longer
time to converge. Default=0.1
maxiter [scalar] maximum number of iterations for cleaning process. Will
terminate if the number of iterations exceed maxiter. Default=10000
threshold
[scalar] represents the cleaning depth either as a fraction of the
maximum in the input (when thershold_type is set to 'relative') or
the absolute value (when threshold_type is set to 'absolute') in
same units of input down to which inp should be cleaned. Value must
always be positive. When threshold_type is set to 'relative',
threshold mu st lie between 0 and 1. Default=5e-3 (found to work
well and converge fast) assuming threshold_type is set to 'relative'
threshold_type
[string] represents the type of threshold specified by value in
input threshold. Accepted values are 'relative' and 'absolute'. If
set to 'relative' the threshold value is the fraction (between 0
and 1) of maximum in input down to which it should be cleaned. If
set to 'asbolute' it is the actual value down to which inp should
be cleaned. Default='relative'
verbose [boolean] If set to True (default), print diagnostic and progress
messages. If set to False, no such messages are printed.
progressbar
[boolean] If set to False (default), no progress bar is displayed
pid [string or integer] process identifier (optional) relevant only in
case of parallel processing and if progressbar is set to True. If
pid is not specified, it defaults to the Pool process id
progressbar_yloc
[integer] row number where the progressbar is displayed on the
terminal. Default=0
Output:
outdict [dictionary] It consists of the following keys and values at
termination:
'termination' [dictionary] consists of information on the
conditions for termination with the following keys
and values:
'threshold' [boolean] If True, the cleaning process
terminated because the threshold was
reached
'maxiter' [boolean] If True, the cleaning process
terminated because the number of
iterations reached maxiter
'inrms<outrms'
[boolean] If True, the cleaning process
terminated because the rms inside the
clean box is below the rms outside of it
'iter' [scalar] number of iterations performed before
termination
'rms' [numpy vector] rms of the residuals as a function of
iteration
'inrms' [numpy vector] rms of the residuals inside the clean
box as a function of iteration
'outrms' [numpy vector] rms of the residuals outside the clean
box as a function of iteration
'res' [numpy array] uncleaned residuals at the end of the
cleaning process. Complex valued and same size as
inp
'cc' [numpy array] clean components at the end of the
cleaning process. Complex valued and same size as
inp
----------------------------------------------------------------------------
"""
try:
inp, kernel
except NameError:
raise NameError('Inputs inp and kernel not specified')
if not isinstance(inp, NP.ndarray):
raise TypeError('inp must be a numpy array')
if not isinstance(kernel, NP.ndarray):
raise TypeError('kernel must be a numpy array')
if threshold_type not in ['relative', 'absolute']:
raise ValueError('invalid specification for threshold_type')
if not isinstance(threshold, (int,float)):
raise TypeError('input threshold must be a scalar')
else:
threshold = float(threshold)
if threshold <= 0.0:
raise ValueError('input threshold must be positive')
inp = inp.flatten()
kernel = kernel.flatten()
kernel /= NP.abs(kernel).max()
kmaxind = NP.argmax(NP.abs(kernel))
if inp.size != kernel.size:
raise ValueError('inp and kernel must have same size')
if cbox is None:
cbox = NP.ones(inp.size, dtype=NP.bool)
elif isinstance(cbox, NP.ndarray):
cbox = cbox.flatten()
if cbox.size != inp.size:
raise ValueError('Clean box must be of same size as input')
cbox = NP.where(cbox > 0.0, True, False)
# cbox = cbox.astype(NP.int)
else:
raise TypeError('cbox must be a numpy array')
cbox = cbox.astype(NP.bool)
if threshold_type == 'relative':
lolim = threshold
else:
lolim = threshold / NP.abs(inp).max()
if lolim >= 1.0:
raise ValueError('incompatible value specified for threshold')
# inrms = [NP.std(inp[cbox])]
inrms = [NP.median(NP.abs(inp[cbox] - NP.median(inp[cbox])))]
if inp.size - NP.sum(cbox) <= 2:
outrms = None
else:
# outrms = [NP.std(inp[NP.invert(cbox)])]
outrms = [NP.median(NP.abs(inp[NP.invert(cbox)] - NP.median(inp[NP.invert(cbox)])))]
if not isinstance(gain, float):
raise TypeError('gain must be a floating point number')
else:
if (gain <= 0.0) or (gain >= 1.0):
raise TypeError('gain must lie between 0 and 1')
if not isinstance(maxiter, int):
raise TypeError('maxiter must be an integer')
else:
if maxiter <= 0:
raise ValueError('maxiter must be positive')
cc = NP.zeros_like(inp)
res = NP.copy(inp)
cond4 = False
# prevrms = NP.std(res)
# currentrms = [NP.std(res)]
prevrms = NP.median(NP.abs(res - NP.median(res)))
currentrms = [NP.median(NP.abs(res - NP.median(res)))]
itr = 0
terminate = False
if progressbar:
if pid is None:
pid = MP.current_process().name
else:
pid = '{0:0d}'.format(pid)
progressbar_loc = (0, progressbar_yloc)
writer=WM.Writer(progressbar_loc)
progress = PGB.ProgressBar(widgets=[pid+' ', PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Iterations '.format(maxiter), PGB.ETA()], maxval=maxiter, fd=writer).start()
while not terminate:
itr += 1
indmaxres = NP.argmax(NP.abs(res*cbox))
maxres = res[indmaxres]
ccval = gain * maxres
cc[indmaxres] += ccval
res = res - ccval * NP.roll(kernel, indmaxres-kmaxind)
prevrms = NP.copy(currentrms[-1])
# currentrms += [NP.std(res)]
currentrms += [NP.median(NP.abs(res - NP.median(res)))]
# inrms += [NP.std(res[cbox])]
inrms += [NP.median(NP.abs(res[cbox] - NP.median(res[cbox])))]
# cond1 = NP.abs(maxres) <= inrms[-1]
cond1 = NP.abs(maxres) <= lolim * NP.abs(inp).max()
cond2 = itr >= maxiter
terminate = cond1 or cond2
if outrms is not None:
# outrms += [NP.std(res[NP.invert(cbox)])]
outrms += [NP.median(NP.abs(res[NP.invert(cbox)] - NP.median(res[NP.invert(cbox)])))]
cond3 = inrms[-1] <= outrms[-1]
terminate = terminate or cond3
if progressbar:
progress.update(itr)
if progressbar:
progress.finish()
inrms = NP.asarray(inrms)
currentrms = NP.asarray(currentrms)
if outrms is not None:
outrms = NP.asarray(outrms)
outdict = {'termination':{'threshold': cond1, 'maxiter': cond2, 'inrms<outrms': cond3}, 'iter': itr, 'rms': currentrms, 'inrms': inrms, 'outrms': outrms, 'cc': cc, 'res': res}
return outdict
################################################################################
def dkprll_deta(redshift, cosmo=cosmo100):
"""
----------------------------------------------------------------------------
Compute jacobian to transform delays (eta or tau) to line-of-sight
wavenumbers (h/Mpc) corresponding to specified redshift(s) and cosmology
corresponding to the HI 21 cm line
Inputs:
redshift [scalar, list or numpy array] redshift(s). Must be a
scalar, list or numpy array
cosmo [instance of cosmology class from astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module. Default
uses Flat lambda CDM cosmology with Omega_m=0.27,
H0=100 km/s/Mpc
Outputs:
Jacobian to convert eta (lags) to k_parallel. Same size as redshift
----------------------------------------------------------------------------
"""
if not isinstance(redshift, (int, float, list, NP.ndarray)):
raise TypeError('redshift must be a scalar, list or numpy array')
redshift = NP.asarray(redshift)
if NP.any(redshift < 0.0):
raise ValueError('redshift(s) must be non-negative')
if not isinstance(cosmo, (CP.FLRW, CP.default_cosmology)):
raise TypeError('Input cosmology must be a cosmology class defined in Astropy')
jacobian = 2 * NP.pi * cosmo.H0.value * CNST.rest_freq_HI * cosmo.efunc(redshift) / FCNST.c / (1+redshift)**2 * 1e3
return jacobian
################################################################################
def beam3Dvol(beam, freqs, freq_wts=None, hemisphere=True):
"""
----------------------------------------------------------------------------
Compute 3D volume relevant for power spectrum given an antenna power
pattern. It is estimated by summing square of the beam in angular and
frequency coordinates and in units of "Sr Hz".
Inputs:
beam [numpy array] Antenna power pattern with peak normalized to
unity. It can be of shape (npix x nchan) or (npix x 1) or
(npix,). npix must be a HEALPix compatible value. nchan is the
number of frequency channels, same as the size of input freqs.
If it is of shape (npix x 1) or (npix,), the beam will be
assumed to be identical for all frequency channels.
freqs [list or numpy array] Frequency channels (in Hz) of size nchan
freq_wts [numpy array] Frequency weights to be applied to the
beam. Must be of shape (nchan,) or (nwin, nchan)
Keyword Inputs:
hemisphere [boolean] If set to True (default), the 3D volume will be
estimated using the upper hemisphere. If False, the full sphere
is used.
Output:
The product Omega x bandwdith (in Sr Hz) computed using the integral of
squared power pattern. It is of shape (nwin,)
----------------------------------------------------------------------------
"""
try:
beam, freqs
except NameError:
raise NameError('Both inputs beam and freqs must be specified')
if not isinstance(beam, NP.ndarray):
raise TypeError('Input beam must be a numpy array')
if not isinstance(freqs, (list, NP.ndarray)):
raise TypeError('Input freqs must be a list or numpy array')
freqs = NP.asarray(freqs).astype(NP.float).reshape(-1)
if freqs.size < 2:
raise ValueError('Input freqs does not have enough elements to determine frequency resolution')
if beam.ndim > 2:
raise ValueError('Invalid dimensions for beam')
elif beam.ndim == 2:
if beam.shape[1] != 1:
if beam.shape[1] != freqs.size:
raise ValueError('Dimensions of beam do not match the number of frequency channels')
elif beam.ndim == 1:
beam = beam.reshape(-1,1)
else:
raise ValueError('Invalid dimensions for beam')
if freq_wts is not None:
if not isinstance(freq_wts, NP.ndarray):
raise TypeError('Input freq_wts must be a numpy array')
if freq_wts.ndim == 1:
freq_wts = freq_wts.reshape(1,-1)
elif freq_wts.ndim > 2:
raise ValueError('Input freq_wts must be of shape nwin x nchan')
freq_wts = NP.asarray(freq_wts).astype(NP.float).reshape(-1,freqs.size)
if freq_wts.shape[1] != freqs.size:
raise ValueError('Input freq_wts does not have shape compatible with freqs')
else:
freq_wts = NP.ones(freqs.size, dtype=NP.float).reshape(1,-1)
eps = 1e-10
if beam.max() > 1.0+eps:
raise ValueError('Input beam maximum exceeds unity. Input beam should be normalized to peak of unity')
nside = HP.npix2nside(beam.shape[0])
domega = HP.nside2pixarea(nside, degrees=False)
df = freqs[1] - freqs[0]
bw = df * freqs.size
weighted_beam = beam[:,NP.newaxis,:] * freq_wts[NP.newaxis,:,:]
theta, phi = HP.pix2ang(nside, NP.arange(beam.shape[0]))
if hemisphere:
ind, = NP.where(theta <= NP.pi/2) # Select upper hemisphere
else:
ind = NP.arange(beam.shape[0])
omega_bw = domega * df * NP.nansum(weighted_beam[ind,:,:]**2, axis=(0,2))
if NP.any(omega_bw > 4*NP.pi*bw):
raise ValueError('3D volume estimated from beam exceeds the upper limit. Check normalization of the input beam')
return omega_bw
################################################################################
class DelaySpectrum(object):
"""
----------------------------------------------------------------------------
Class to manage delay spectrum information on a multi-element interferometer
array.
Attributes:
ia [instance of class InterferometerArray] An instance of class
InterferometerArray that contains the results of the simulated
interferometer visibilities
bp [numpy array] Bandpass weights of size n_baselines x nchan x
n_acc, where n_acc is the number of accumulations in the
observation, nchan is the number of frequency channels, and
n_baselines is the number of baselines
bp_wts [numpy array] Additional weighting to be applied to the bandpass
shapes during the application of the member function
delay_transform(). Same size as attribute bp.
f [list or numpy vector] frequency channels in Hz
cc_freq [list or numpy vector] frequency channels in Hz associated with
clean components of delay spectrum. Same size as cc_lags. This
computed inside member function delayClean()
df [scalar] Frequency resolution (in Hz)
lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as channels. This is
computed in member function delay_transform().
cc_lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as cc_freq. This is computed in
member function delayClean().
lag_kernel [numpy array] Inverse Fourier Transform of the frequency
bandpass shape. In other words, it is the impulse response
corresponding to frequency bandpass. Same size as attributes
bp and bp_wts. It is initialized in __init__() member function
but effectively computed in member functions delay_transform()
and delayClean()
cc_lag_kernel
[numpy array] Inverse Fourier Transform of the frequency
bandpass shape. In other words, it is the impulse response
corresponding to frequency bandpass shape used in complex delay
clean routine. It is initialized in __init__() member function
but effectively computed in member function delayClean()
n_acc [scalar] Number of accumulations
horizon_delay_limits
[numpy array] NxMx2 numpy array denoting the neagtive and
positive horizon delay limits where N is the number of
timestamps, M is the number of baselines. The 0 index in the
third dimenstion denotes the negative horizon delay limit while
the 1 index denotes the positive horizon delay limit
skyvis_lag [numpy array] Complex visibility due to sky emission (in Jy Hz or
K Hz) along the delay axis for each interferometer obtained by
FFT of skyvis_freq along frequency axis. Same size as vis_freq.
Created in the member function delay_transform(). Read its
docstring for more details. Same dimensions as skyvis_freq
vis_lag [numpy array] The simulated complex visibility (in Jy Hz or K Hz)
along delay axis for each interferometer obtained by FFT of
vis_freq along frequency axis. Same size as vis_noise_lag and
skyis_lag. It is evaluated in member function delay_transform().
vis_noise_lag
[numpy array] Complex visibility noise (in Jy Hz or K Hz) along
delay axis for each interferometer generated using an FFT of
vis_noise_freq along frequency axis. Same size as vis_noise_freq.
Created in the member function delay_transform(). Read its
docstring for more details.
cc_skyvis_lag
[numpy array] Complex cleaned visibility delay spectra (in
Jy Hz or K Hz) of noiseless simulated sky visibilities for each
baseline at each LST. Size is nbl x nlags x nlst
cc_skyvis_res_lag
[numpy array] Complex residuals from cleaned visibility delay
spectra (in Jy Hz or K Hz) of noiseless simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst
cc_skyvis_net_lag
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals (in Jy Hz or K Hz) of noiseless simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst. cc_skyvis_net_lag = cc_skyvis_lag +
cc_skyvis_res_lag
cc_vis_lag
[numpy array] Complex cleaned visibility delay spectra (in
Jy Hz or K Hz) of noisy simulated sky visibilities for each
baseline at each LST. Size is nbl x nlags x nlst
cc_vis_res_lag
[numpy array] Complex residuals from cleaned visibility delay
spectra (in Jy Hz or K Hz) of noisy simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst
cc_vis_net_lag
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals (in Jy Hz or K Hz) of noisy simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst. cc_vis_net_lag = cc_vis_lag +
cc_vis_res_lag
cc_skyvis_freq
[numpy array] Complex cleaned visibility delay spectra
transformed to frequency domain (in Jy or K.Sr) obtained from
noiseless simulated sky visibilities for each baseline at each
LST. Size is nbl x nlags x nlst
cc_skyvis_res_freq
[numpy array] Complex residuals from cleaned visibility delay
spectra transformed to frequency domain (in Jy or K.Sr) obtained
from noiseless simulated sky visibilities for each baseline at
each LST. Size is nbl x nlags x nlst
cc_skyvis_net_freq
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals transformed to frequency domain (in Jy or K.Sr)
obtained from noiseless simulated sky visibilities for each
baseline at each LST. Size is nbl x nlags x nlst.
cc_skyvis_net_freq = cc_skyvis_freq + cc_skyvis_res_freq
cc_vis_freq
[numpy array] Complex cleaned visibility delay spectra
transformed to frequency domain (in Jy or K.Sr) obtained from
noisy simulated sky visibilities for each baseline at each LST.
Size is nbl x nlags x nlst
cc_vis_res_freq
[numpy array] Complex residuals from cleaned visibility delay
spectra transformed to frequency domain (in Jy or K.Sr) of noisy
simulated sky visibilities for each baseline at each LST. Size
is nbl x nlags x nlst
cc_vis_net_freq
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals transformed to frequency domain (in Jy or K.Sr)
obtained from noisy simulated sky visibilities for each baseline
at each LST. Size is nbl x nlags x nlst.
cc_vis_net_freq = cc_vis_freq + cc_vis_res_freq
clean_window_buffer
[scalar] number of inverse bandwidths to extend beyond the
horizon delay limit to include in the CLEAN deconvolution.
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
subband_delay_spectra
[dictionary] contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Under each of these keys is information about delay
spectra of different frequency sub-bands (n_win in number) in the
form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'bpcorrect' [boolean] If True (default), correct for frequency
weights that were applied during the original
delay transform using which the delay CLEAN was
done. This would flatten the bandpass after delay
CLEAN. If False, do not apply the correction,
namely, inverse of bandpass weights. This applies
only CLEAned visibilities under the 'cc' key and
hence is present only if the top level key is 'cc'
and absent for key 'sim'
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'. It roughly corresponds to k_parallel.
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'skyvis_lag'
[numpy array] subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on
whether the top level key is 'cc' or 'sim'
respectively, after applying the frequency weights
under the key 'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_lag' [numpy array] subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_noise_lag'
[numpy array] subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is
'sim' and absent for 'cc'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'skyvis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'skyvis_net_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
subband_delay_spectra_resampled
[dictionary] Very similar to the attribute
subband_delay_spectra except now it has been resampled along
delay axis to contain usually only independent delay bins. It
contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Under each of these keys is information about delay
spectra of different frequency sub-bands (n_win in number) after
resampling to independent number of delay bins in the
form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins. It roughly corresponds to
k_parallel.
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
usually approximately inverse of the effective
bandwidth of the subband
'skyvis_lag'
[numpy array] subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on
whether the top level key is 'cc' or 'sim'
respectively, after applying the frequency weights
under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_lag' [numpy array] subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_noise_lag'
[numpy array] subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is
'sim' and absent for 'cc'. It is of size
n_bl x n_win x nlags x n_t.
'skyvis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'skyvis_net_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
Member functions:
__init__() Initializes an instance of class DelaySpectrum
delay_transform()
Transforms the visibilities from frequency axis onto
delay (time) axis using an IFFT. This is performed for
noiseless sky visibilities, thermal noise in visibilities,
and observed visibilities.
delay_transform_allruns()
Transforms the visibilities of multiple runs from frequency
axis onto delay (time) axis using an IFFT.
clean() Transforms the visibilities from frequency axis onto delay
(time) axis using an IFFT and deconvolves the delay transform
quantities along the delay axis. This is performed for noiseless
sky visibilities, thermal noise in visibilities, and observed
visibilities.
delayClean()
Transforms the visibilities from frequency axis onto delay
(time) axis using an IFFT and deconvolves the delay transform
quantities along the delay axis. This is performed for noiseless
sky visibilities, thermal noise in visibilities, and observed
visibilities. This calls an in-house module complex1dClean
instead of the clean routine in AIPY module. It can utilize
parallelization
subband_delay_transform()
Computes delay transform on multiple frequency sub-bands with
specified weights
subband_delay_transform_allruns()
Computes delay transform on multiple frequency sub-bands with
specified weights for multiple realizations of visibilities
subband_delay_transform_closure_phase()
Computes delay transform of closure phases on antenna triplets
on multiple frequency sub-bands with specified weights
get_horizon_delay_limits()
Estimates the delay envelope determined by the sky horizon
for the baseline(s) for the phase centers
set_horizon_delay_limits()
Estimates the delay envelope determined by the sky horizon for
the baseline(s) for the phase centers of the DelaySpectrum
instance. No output is returned. Uses the member function
get_horizon_delay_limits()
save() Saves the interferometer array delay spectrum information to
disk.
----------------------------------------------------------------------------
"""
def __init__(self, interferometer_array=None, init_file=None):
"""
------------------------------------------------------------------------
Intialize the DelaySpectrum class which manages information on delay
spectrum of a multi-element interferometer.
Class attributes initialized are:
f, bp, bp_wts, df, lags, skyvis_lag, vis_lag, n_acc, vis_noise_lag, ia,
pad, lag_kernel, horizon_delay_limits, cc_skyvis_lag, cc_skyvis_res_lag,
cc_skyvis_net_lag, cc_vis_lag, cc_vis_res_lag, cc_vis_net_lag,
cc_skyvis_freq, cc_skyvis_res_freq, cc_sktvis_net_freq, cc_vis_freq,
cc_vis_res_freq, cc_vis_net_freq, clean_window_buffer, cc_freq, cc_lags,
cc_lag_kernel, subband_delay_spectra, subband_delay_spectra_resampled
Read docstring of class DelaySpectrum for details on these
attributes.
Input(s):
interferometer_array
[instance of class InterferometerArray] An instance of
class InterferometerArray from which certain attributes
will be obtained and used
init_file [string] full path to filename in FITS format containing
delay spectrum information of interferometer array
Other input parameters have their usual meanings. Read the docstring of
class DelaySpectrum for details on these inputs.
------------------------------------------------------------------------
"""
argument_init = False
init_file_success = False
if init_file is not None:
try:
hdulist = fits.open(init_file)
except IOError:
argument_init = True
print('\tinit_file provided but could not open the initialization file. Attempting to initialize with input parameters...')
extnames = [hdulist[i].header['EXTNAME'] for i in xrange(1,len(hdulist))]
try:
self.df = hdulist[0].header['freq_resolution']
except KeyError:
hdulist.close()
raise KeyError('Keyword "freq_resolution" not found in header')
try:
self.n_acc = hdulist[0].header['N_ACC']
except KeyError:
hdulist.close()
raise KeyError('Keyword "N_ACC" not found in header')
try:
self.pad = hdulist[0].header['PAD']
except KeyError:
hdulist.close()
raise KeyError('Keyword "PAD" not found in header')
try:
self.clean_window_buffer = hdulist[0].header['DBUFFER']
except KeyError:
hdulist.close()
raise KeyError('Keyword "DBUFFER" not found in header')
try:
iarray_init_file = hdulist[0].header['IARRAY']
except KeyError:
hdulist.close()
raise KeyError('Keyword "IARRAY" not found in header')
self.ia = RI.InterferometerArray(None, None, None, init_file=iarray_init_file)
# if 'SPECTRAL INFO' not in extnames:
# raise KeyError('No extension table found containing spectral information.')
# else:
# self.f = hdulist['SPECTRAL INFO'].data['frequency']
# try:
# self.lags = hdulist['SPECTRAL INFO'].data['lag']
# except KeyError:
# self.lags = None
try:
self.f = hdulist['FREQUENCIES'].data
except KeyError:
hdulist.close()
raise KeyError('Extension "FREQUENCIES" not found in header')
self.lags = None
if 'LAGS' in extnames:
self.lags = hdulist['LAGS'].data
self.cc_lags = None
if 'CLEAN LAGS' in extnames:
self.cc_lags = hdulist['CLEAN LAGS'].data
self.cc_freq = None
if 'CLEAN FREQUENCIES' in extnames:
self.cc_freq = hdulist['CLEAN FREQUENCIES'].data
if 'BANDPASS' in extnames:
self.bp = hdulist['BANDPASS'].data
else:
raise KeyError('Extension named "BANDPASS" not found in init_file.')
if 'BANDPASS WEIGHTS' in extnames:
self.bp_wts = hdulist['BANDPASS WEIGHTS'].data
else:
self.bp_wts = NP.ones_like(self.bp)
if 'HORIZON LIMITS' in extnames:
self.horizon_delay_limits = hdulist['HORIZON LIMITS'].data
else:
self.set_horizon_delay_limits()
self.lag_kernel = None
if 'LAG KERNEL REAL' in extnames:
self.lag_kernel = hdulist['LAG KERNEL REAL'].data
if 'LAG KERNEL IMAG' in extnames:
self.lag_kernel = self.lag_kernel.astype(NP.complex)
self.lag_kernel += 1j * hdulist['LAG KERNEL IMAG'].data
self.cc_lag_kernel = None
if 'CLEAN LAG KERNEL REAL' in extnames:
self.cc_lag_kernel = hdulist['CLEAN LAG KERNEL REAL'].data
if 'CLEAN LAG KERNEL IMAG' in extnames:
self.cc_lag_kernel = self.cc_lag_kernel.astype(NP.complex)
self.cc_lag_kernel += 1j * hdulist['CLEAN LAG KERNEL IMAG'].data
self.skyvis_lag = None
if 'NOISELESS DELAY SPECTRA REAL' in extnames:
self.skyvis_lag = hdulist['NOISELESS DELAY SPECTRA REAL'].data
if 'NOISELESS DELAY SPECTRA IMAG' in extnames:
self.skyvis_lag = self.skyvis_lag.astype(NP.complex)
self.skyvis_lag += 1j * hdulist['NOISELESS DELAY SPECTRA IMAG'].data
self.vis_lag = None
if 'NOISY DELAY SPECTRA REAL' in extnames:
self.vis_lag = hdulist['NOISY DELAY SPECTRA REAL'].data
if 'NOISY DELAY SPECTRA IMAG' in extnames:
self.vis_lag = self.vis_lag.astype(NP.complex)
self.vis_lag += 1j * hdulist['NOISY DELAY SPECTRA IMAG'].data
self.vis_noise_lag = None
if 'DELAY SPECTRA NOISE REAL' in extnames:
self.vis_noise_lag = hdulist['DELAY SPECTRA NOISE REAL'].data
if 'DELAY SPECTRA NOISE IMAG' in extnames:
self.vis_noise_lag = self.vis_noise_lag.astype(NP.complex)
self.vis_noise_lag += 1j * hdulist['DELAY SPECTRA NOISE IMAG'].data
self.cc_skyvis_lag = None
if 'CLEAN NOISELESS DELAY SPECTRA REAL' in extnames:
self.cc_skyvis_lag = hdulist['CLEAN NOISELESS DELAY SPECTRA REAL'].data
if 'CLEAN NOISELESS DELAY SPECTRA IMAG' in extnames:
self.cc_skyvis_lag = self.cc_skyvis_lag.astype(NP.complex)
self.cc_skyvis_lag += 1j * hdulist['CLEAN NOISELESS DELAY SPECTRA IMAG'].data
self.cc_vis_lag = None
if 'CLEAN NOISY DELAY SPECTRA REAL' in extnames:
self.cc_vis_lag = hdulist['CLEAN NOISY DELAY SPECTRA REAL'].data
if 'CLEAN NOISY DELAY SPECTRA IMAG' in extnames:
self.cc_vis_lag = self.cc_vis_lag.astype(NP.complex)
self.cc_vis_lag += 1j * hdulist['CLEAN NOISY DELAY SPECTRA IMAG'].data
self.cc_skyvis_res_lag = None
if 'CLEAN NOISELESS DELAY SPECTRA RESIDUALS REAL' in extnames:
self.cc_skyvis_res_lag = hdulist['CLEAN NOISELESS DELAY SPECTRA RESIDUALS REAL'].data
if 'CLEAN NOISELESS DELAY SPECTRA RESIDUALS IMAG' in extnames:
self.cc_skyvis_res_lag = self.cc_skyvis_res_lag.astype(NP.complex)
self.cc_skyvis_res_lag += 1j * hdulist['CLEAN NOISELESS DELAY SPECTRA RESIDUALS IMAG'].data
self.cc_vis_res_lag = None
if 'CLEAN NOISY DELAY SPECTRA RESIDUALS REAL' in extnames:
self.cc_vis_res_lag = hdulist['CLEAN NOISY DELAY SPECTRA RESIDUALS REAL'].data
if 'CLEAN NOISY DELAY SPECTRA RESIDUALS IMAG' in extnames:
self.cc_vis_res_lag = self.cc_vis_res_lag.astype(NP.complex)
self.cc_vis_res_lag += 1j * hdulist['CLEAN NOISY DELAY SPECTRA RESIDUALS IMAG'].data
self.cc_skyvis_freq = None
if 'CLEAN NOISELESS VISIBILITIES REAL' in extnames:
self.cc_skyvis_freq = hdulist['CLEAN NOISELESS VISIBILITIES REAL'].data
if 'CLEAN NOISELESS VISIBILITIES IMAG' in extnames:
self.cc_skyvis_freq = self.cc_skyvis_freq.astype(NP.complex)
self.cc_skyvis_freq += 1j * hdulist['CLEAN NOISELESS VISIBILITIES IMAG'].data
self.cc_vis_freq = None
if 'CLEAN NOISY VISIBILITIES REAL' in extnames:
self.cc_vis_freq = hdulist['CLEAN NOISY VISIBILITIES REAL'].data
if 'CLEAN NOISY VISIBILITIES IMAG' in extnames:
self.cc_vis_freq = self.cc_vis_freq.astype(NP.complex)
self.cc_vis_freq += 1j * hdulist['CLEAN NOISY VISIBILITIES IMAG'].data
self.cc_skyvis_res_freq = None
if 'CLEAN NOISELESS VISIBILITIES RESIDUALS REAL' in extnames:
self.cc_skyvis_res_freq = hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS REAL'].data
if 'CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG' in extnames:
self.cc_skyvis_res_freq = self.cc_skyvis_res_freq.astype(NP.complex)
self.cc_skyvis_res_freq += 1j * hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG'].data
self.cc_vis_res_freq = None
if 'CLEAN NOISY VISIBILITIES RESIDUALS REAL' in extnames:
self.cc_vis_res_freq = hdulist['CLEAN NOISY VISIBILITIES RESIDUALS REAL'].data
if 'CLEAN NOISY VISIBILITIES RESIDUALS IMAG' in extnames:
self.cc_vis_res_freq = self.cc_vis_res_freq.astype(NP.complex)
self.cc_vis_res_freq += 1j * hdulist['CLEAN NOISY VISIBILITIES RESIDUALS IMAG'].data
self.cc_skyvis_net_lag = None
if (self.cc_skyvis_lag is not None) and (self.cc_skyvis_res_lag is not None):
self.cc_skyvis_net_lag = self.cc_skyvis_lag + self.cc_skyvis_res_lag
self.cc_vis_net_lag = None
if (self.cc_vis_lag is not None) and (self.cc_vis_res_lag is not None):
self.cc_vis_net_lag = self.cc_vis_lag + self.cc_vis_res_lag
self.cc_skyvis_net_freq = None
if (self.cc_skyvis_freq is not None) and (self.cc_skyvis_res_freq is not None):
self.cc_skyvis_net_freq = self.cc_skyvis_freq + self.cc_skyvis_res_freq
self.cc_vis_net_freq = None
if (self.cc_vis_freq is not None) and (self.cc_vis_res_freq is not None):
self.cc_vis_net_freq = self.cc_vis_freq + self.cc_vis_res_freq
self.subband_delay_spectra = {}
self.subband_delay_spectra_resampled = {}
if 'SBDS' in hdulist[0].header:
for key in ['cc', 'sim']:
if '{0}-SBDS'.format(key) in hdulist[0].header:
self.subband_delay_spectra[key] = {}
self.subband_delay_spectra[key]['shape'] = hdulist[0].header['{0}-SBDS-WSHAPE'.format(key)]
if key == 'cc':
self.subband_delay_spectra[key]['bpcorrect'] = bool(hdulist[0].header['{0}-SBDS-BPCORR'.format(key)])
self.subband_delay_spectra[key]['npad'] = hdulist[0].header['{0}-SBDS-NPAD'.format(key)]
self.subband_delay_spectra[key]['freq_center'] = hdulist['{0}-SBDS-F0'.format(key)].data
self.subband_delay_spectra[key]['freq_wts'] = hdulist['{0}-SBDS-FWTS'.format(key)].data
self.subband_delay_spectra[key]['bw_eff'] = hdulist['{0}-SBDS-BWEFF'.format(key)].data
self.subband_delay_spectra[key]['lags'] = hdulist['{0}-SBDS-LAGS'.format(key)].data
self.subband_delay_spectra[key]['lag_kernel'] = hdulist['{0}-SBDS-LAGKERN-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-LAGKERN-IMAG'.format(key)].data
self.subband_delay_spectra[key]['lag_corr_length'] = hdulist['{0}-SBDS-LAGCORR'.format(key)].data
self.subband_delay_spectra[key]['skyvis_lag'] = hdulist['{0}-SBDS-SKYVISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-SKYVISLAG-IMAG'.format(key)].data
self.subband_delay_spectra[key]['vis_lag'] = hdulist['{0}-SBDS-VISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-VISLAG-IMAG'.format(key)].data
if key == 'sim':
self.subband_delay_spectra[key]['vis_noise_lag'] = hdulist['{0}-SBDS-NOISELAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-NOISELAG-IMAG'.format(key)].data
if key == 'cc':
self.subband_delay_spectra[key]['skyvis_res_lag'] = hdulist['{0}-SBDS-SKYVISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-SKYVISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra[key]['vis_res_lag'] = hdulist['{0}-SBDS-VISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-VISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra[key]['skyvis_net_lag'] = self.subband_delay_spectra[key]['skyvis_lag'] + self.subband_delay_spectra[key]['skyvis_res_lag']
self.subband_delay_spectra[key]['vis_net_lag'] = self.subband_delay_spectra[key]['vis_lag'] + self.subband_delay_spectra[key]['vis_res_lag']
if 'SBDS-RS' in hdulist[0].header:
for key in ['cc', 'sim']:
if '{0}-SBDS-RS'.format(key) in hdulist[0].header:
self.subband_delay_spectra_resampled[key] = {}
self.subband_delay_spectra_resampled[key]['freq_center'] = hdulist['{0}-SBDSRS-F0'.format(key)].data
self.subband_delay_spectra_resampled[key]['bw_eff'] = hdulist['{0}-SBDSRS-BWEFF'.format(key)].data
self.subband_delay_spectra_resampled[key]['lags'] = hdulist['{0}-SBDSRS-LAGS'.format(key)].data
self.subband_delay_spectra_resampled[key]['lag_kernel'] = hdulist['{0}-SBDSRS-LAGKERN-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-LAGKERN-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['lag_corr_length'] = hdulist['{0}-SBDSRS-LAGCORR'.format(key)].data
self.subband_delay_spectra_resampled[key]['skyvis_lag'] = hdulist['{0}-SBDSRS-SKYVISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-SKYVISLAG-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['vis_lag'] = hdulist['{0}-SBDSRS-VISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-VISLAG-IMAG'.format(key)].data
if key == 'sim':
self.subband_delay_spectra_resampled[key]['vis_noise_lag'] = hdulist['{0}-SBDSRS-NOISELAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-NOISELAG-IMAG'.format(key)].data
if key == 'cc':
self.subband_delay_spectra_resampled[key]['skyvis_res_lag'] = hdulist['{0}-SBDSRS-SKYVISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-SKYVISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['vis_res_lag'] = hdulist['{0}-SBDSRS-VISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-VISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['skyvis_net_lag'] = self.subband_delay_spectra_resampled[key]['skyvis_lag'] + self.subband_delay_spectra_resampled[key]['skyvis_res_lag']
self.subband_delay_spectra_resampled[key]['vis_net_lag'] = self.subband_delay_spectra_resampled[key]['vis_lag'] + self.subband_delay_spectra_resampled[key]['vis_res_lag']
hdulist.close()
init_file_success = True
return
else:
argument_init = True
if (not argument_init) and (not init_file_success):
raise ValueError('Initialization failed with the use of init_file.')
if not isinstance(interferometer_array, RI.InterferometerArray):
raise TypeError('Input interferometer_array must be an instance of class InterferometerArray')
self.ia = interferometer_array
self.f = interferometer_array.channels
self.df = interferometer_array.freq_resolution
self.n_acc = interferometer_array.n_acc
self.horizon_delay_limits = self.get_horizon_delay_limits()
self.bp = interferometer_array.bp # Inherent bandpass shape
self.bp_wts = interferometer_array.bp_wts # Additional bandpass weights
self.pad = 0.0
self.lags = DSP.spectral_axis(self.f.size, delx=self.df, use_real=False, shift=True)
self.lag_kernel = None
self.skyvis_lag = None
self.vis_lag = None
self.vis_noise_lag = None
self.clean_window_buffer = 1.0
self.cc_lags = None
self.cc_freq = None
self.cc_lag_kernel = None
self.cc_skyvis_lag = None
self.cc_skyvis_res_lag = None
self.cc_vis_lag = None
self.cc_vis_res_lag = None
self.cc_skyvis_net_lag = None
self.cc_vis_net_lag = None
self.cc_skyvis_freq = None
self.cc_skyvis_res_freq = None
self.cc_vis_freq = None
self.cc_vis_res_freq = None
self.cc_skyvis_net_freq = None
self.cc_vis_net_freq = None
self.subband_delay_spectra = {}
self.subband_delay_spectra_resampled = {}
#############################################################################
def delay_transform(self, pad=1.0, freq_wts=None, downsample=True,
action=None, verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities from frequency axis onto delay (time) axis
using an IFFT. This is performed for noiseless sky visibilities, thermal
noise in visibilities, and observed visibilities.
Inputs:
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array. Default (None) will not apply windowing and only the
inherent bandpass will be used.
downsample [boolean] If set to True (default), the delay transform
quantities will be downsampled by exactly the same factor
that was used in padding. For instance, if pad is set to
1.0, the downsampling will be by a factor of 2. If set to
False, no downsampling will be done even if the original
quantities were padded
action [boolean] If set to None (default), just return the delay-
transformed quantities. If set to 'store', these quantities
will be stored as internal attributes
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
------------------------------------------------------------------------
"""
if verbose:
print('Preparing to compute delay transform...\n\tChecking input parameters for compatibility...')
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.size == self.f.size:
freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.ia.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.n_acc:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.f.size, -1), axis=0), self.ia.baselines.shape[0], axis=0)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0]:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.f.size), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0] * self.n_acc:
freq_wts = freq_wts.reshape(self.ia.baselines.shape[0], self.f.size, self.n_acc)
else:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
else:
freq_wts = self.bp_wts
if verbose:
print('\tFrequency window weights assigned.')
if not isinstance(downsample, bool):
raise TypeError('Input downsample must be of boolean type')
if verbose:
print('\tInput parameters have been verified to be compatible.\n\tProceeding to compute delay transform.')
result = {}
result['freq_wts'] = freq_wts
result['pad'] = pad
result['lags'] = DSP.spectral_axis(int(self.f.size*(1+pad)), delx=self.df, use_real=False, shift=True)
if pad == 0.0:
result['vis_lag'] = DSP.FT1D(self.ia.vis_freq * self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['skyvis_lag'] = DSP.FT1D(self.ia.skyvis_freq * self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['vis_noise_lag'] = DSP.FT1D(self.ia.vis_noise_freq * self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['lag_kernel'] = DSP.FT1D(self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
if verbose:
print('\tDelay transform computed without padding.')
else:
npad = int(self.f.size * pad)
result['vis_lag'] = DSP.FT1D(NP.pad(self.ia.vis_freq * self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['skyvis_lag'] = DSP.FT1D(NP.pad(self.ia.skyvis_freq * self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['vis_noise_lag'] = DSP.FT1D(NP.pad(self.ia.vis_noise_freq * self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = DSP.FT1D(NP.pad(self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
if verbose:
print('\tDelay transform computed with padding fraction {0:.1f}'.format(pad))
if downsample:
result['vis_lag'] = DSP.downsampler(result['vis_lag'], 1+pad, axis=1)
result['skyvis_lag'] = DSP.downsampler(result['skyvis_lag'], 1+pad, axis=1)
result['vis_noise_lag'] = DSP.downsampler(result['vis_noise_lag'], 1+pad, axis=1)
result['lag_kernel'] = DSP.downsampler(result['lag_kernel'], 1+pad, axis=1)
result['lags'] = DSP.downsampler(result['lags'], 1+pad)
result['lags'] = result['lags'].flatten()
if verbose:
print('\tDelay transform products downsampled by factor of {0:.1f}'.format(1+pad))
print('delay_transform() completed successfully.')
if action == 'store':
self.pad = pad
self.lags = result['lags']
self.bp_wts = freq_wts
self.vis_lag = result['vis_lag']
self.skyvis_lag = result['skyvis_lag']
self.vis_noise_lag = result['vis_noise_lag']
self.lag_kernel = result['lag_kernel']
return result
#############################################################################
# def clean(self, pad=1.0, freq_wts=None, clean_window_buffer=1.0,
# verbose=True):
# """
# ------------------------------------------------------------------------
# TO BE DEPRECATED!!! USE MEMBER FUNCTION delayClean()
# Transforms the visibilities from frequency axis onto delay (time) axis
# using an IFFT and deconvolves the delay transform quantities along the
# delay axis. This is performed for noiseless sky visibilities, thermal
# noise in visibilities, and observed visibilities.
# Inputs:
# pad [scalar] Non-negative scalar indicating padding fraction
# relative to the number of frequency channels. For e.g., a
# pad of 1.0 pads the frequency axis with zeros of the same
# width as the number of channels. If a negative value is
# specified, delay transform will be performed with no padding
# freq_wts [numpy vector or array] window shaping to be applied before
# computing delay transform. It can either be a vector or size
# equal to the number of channels (which will be applied to all
# time instances for all baselines), or a nchan x n_snapshots
# numpy array which will be applied to all baselines, or a
# n_baselines x nchan numpy array which will be applied to all
# timestamps, or a n_baselines x nchan x n_snapshots numpy
# array. Default (None) will not apply windowing and only the
# inherent bandpass will be used.
# verbose [boolean] If set to True (default), print diagnostic and
# progress messages. If set to False, no such messages are
# printed.
# ------------------------------------------------------------------------
# """
# if not isinstance(pad, (int, float)):
# raise TypeError('pad fraction must be a scalar value.')
# if pad < 0.0:
# pad = 0.0
# if verbose:
# print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
# if freq_wts is not None:
# if freq_wts.size == self.f.size:
# freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.ia.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
# elif freq_wts.size == self.f.size * self.n_acc:
# freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.f.size, -1), axis=0), self.ia.baselines.shape[0], axis=0)
# elif freq_wts.size == self.f.size * self.ia.baselines.shape[0]:
# freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.f.size), axis=2), self.n_acc, axis=2)
# elif freq_wts.size == self.f.size * self.ia.baselines.shape[0] * self.n_acc:
# freq_wts = freq_wts.reshape(self.ia.baselines.shape[0], self.f.size, self.n_acc)
# else:
# raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
# self.bp_wts = freq_wts
# if verbose:
# print('\tFrequency window weights assigned.')
# bw = self.df * self.f.size
# pc = self.ia.phase_center
# pc_coords = self.ia.phase_center_coords
# if pc_coords == 'hadec':
# pc_altaz = GEOM.hadec2altaz(pc, self.ia.latitude, units='degrees')
# pc_dircos = GEOM.altaz2dircos(pc_altaz, units='degrees')
# elif pc_coords == 'altaz':
# pc_dircos = GEOM.altaz2dircos(pc, units='degrees')
# npad = int(self.f.size * pad)
# lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=False)
# dlag = lags[1] - lags[0]
# clean_area = NP.zeros(self.f.size + npad, dtype=int)
# skyvis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.skyvis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
# vis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.vis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
# lag_kernel = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.bp, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
# ccomponents_noiseless = NP.zeros_like(skyvis_lag)
# ccres_noiseless = NP.zeros_like(skyvis_lag)
# ccomponents_noisy = NP.zeros_like(vis_lag)
# ccres_noisy = NP.zeros_like(vis_lag)
# for snap_iter in xrange(self.n_acc):
# progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Baselines '.format(self.ia.baselines.shape[0]), PGB.ETA()], maxval=self.ia.baselines.shape[0]).start()
# for bl_iter in xrange(self.ia.baselines.shape[0]):
# clean_area[NP.logical_and(lags <= self.horizon_delay_limits[snap_iter,bl_iter,1]+clean_window_buffer/bw, lags >= self.horizon_delay_limits[snap_iter,bl_iter,0]-clean_window_buffer/bw)] = 1
# cc_noiseless, info_noiseless = _gentle_clean(skyvis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], area=clean_area, stop_if_div=False, verbose=False, autoscale=True)
# ccomponents_noiseless[bl_iter,:,snap_iter] = cc_noiseless
# ccres_noiseless[bl_iter,:,snap_iter] = info_noiseless['res']
# cc_noisy, info_noisy = _gentle_clean(vis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], area=clean_area, stop_if_div=False, verbose=False, autoscale=True)
# ccomponents_noisy[bl_iter,:,snap_iter] = cc_noisy
# ccres_noisy[bl_iter,:,snap_iter] = info_noisy['res']
# progress.update(bl_iter+1)
# progress.finish()
# deta = lags[1] - lags[0]
# cc_skyvis = NP.fft.fft(ccomponents_noiseless, axis=1) * deta
# cc_skyvis_res = NP.fft.fft(ccres_noiseless, axis=1) * deta
# cc_vis = NP.fft.fft(ccomponents_noisy, axis=1) * deta
# cc_vis_res = NP.fft.fft(ccres_noisy, axis=1) * deta
# self.skyvis_lag = NP.fft.fftshift(skyvis_lag, axes=1)
# self.vis_lag = NP.fft.fftshift(vis_lag, axes=1)
# self.lag_kernel = NP.fft.fftshift(lag_kernel, axes=1)
# self.cc_skyvis_lag = NP.fft.fftshift(ccomponents_noiseless, axes=1)
# self.cc_skyvis_res_lag = NP.fft.fftshift(ccres_noiseless, axes=1)
# self.cc_vis_lag = NP.fft.fftshift(ccomponents_noisy, axes=1)
# self.cc_vis_res_lag = NP.fft.fftshift(ccres_noisy, axes=1)
# self.cc_skyvis_net_lag = self.cc_skyvis_lag + self.cc_skyvis_res_lag
# self.cc_vis_net_lag = self.cc_vis_lag + self.cc_vis_res_lag
# self.lags = NP.fft.fftshift(lags)
# self.cc_skyvis_freq = cc_skyvis
# self.cc_skyvis_res_freq = cc_skyvis_res
# self.cc_vis_freq = cc_vis
# self.cc_vis_res_freq = cc_vis_res
# self.cc_skyvis_net_freq = cc_skyvis + cc_skyvis_res
# self.cc_vis_net_freq = cc_vis + cc_vis_res
# self.clean_window_buffer = clean_window_buffer
#############################################################################
def delay_transform_allruns(self, vis, pad=1.0, freq_wts=None,
downsample=True, verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities of multiple runs from frequency axis onto
delay (time) axis using an IFFT.
Inputs:
vis [numpy array] Visibilities which will be delay transformed.
It must be of shape (...,nbl,nchan,ntimes)
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array or have shape identical to input vis. Default (None)
will not apply windowing and only the inherent bandpass will
be used.
downsample [boolean] If set to True (default), the delay transform
quantities will be downsampled by exactly the same factor
that was used in padding. For instance, if pad is set to
1.0, the downsampling will be by a factor of 2. If set to
False, no downsampling will be done even if the original
quantities were padded
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
Dictionary containing delay spectrum information. It contains the
following keys and values:
'lags' [numpy array] lags of the subband delay spectra with or
without resampling. If not resampled it is of size
nlags=nchan+npad where npad is the number of frequency
channels padded specified under the key 'npad'. If
resampled, it is of shape nlags where nlags is the number
of independent delay bins
'lag_kernel'
[numpy array] The delay kernel which is the result of the
bandpass shape and the spectral window used in determining
the delay spectrum. It is of shape
n_bl x n_win x nlags x n_t.
'vis_lag' [numpy array] delay spectra of visibilities, after
applying the frequency weights under the key 'freq_wts'. It
is of size n_win x (n1xn2x... n_runs dims) x n_bl x nlags x
x n_t.
------------------------------------------------------------------------
"""
if verbose:
print('Preparing to compute delay transform...\n\tChecking input parameters for compatibility...')
try:
vis
except NameError:
raise NameError('Input vis must be provided')
if not isinstance(vis, NP.ndarray):
raise TypeError('Input vis must be a numpy array')
elif vis.ndim < 3:
raise ValueError('Input vis must be at least 3-dimensional')
elif vis.shape[-3:] == (self.ia.baselines.shape[0],self.f.size,self.n_acc):
if vis.ndim == 3:
shp = (1,) + vis.shape
else:
shp = vis.shape
vis = vis.reshape(shp)
else:
raise ValueError('Input vis does not have compatible shape')
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.shape == self.f.shape:
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(1,-1,1))
elif freq_wts.shape == (self.f.size, self.n_acc):
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(1,self.f.size,self.n_acc))
elif freq_wts.shape == (self.ia.baselines.shape[0], self.f.size):
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(self.ia.baselines.shape[0],self.f.size,1))
elif freq_wts.shape == (self.ia.baselines.shape[0], self.f.size, self.n_acc):
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(self.ia.baselines.shape[0],self.f.size,self.n_acc))
elif not freq_wts.shape != vis.shape:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
else:
freq_wts = self.bp_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+self.bp_wts.shape)
bp = self.bp.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+self.bp.shape)
if verbose:
print('\tFrequency window weights assigned.')
if not isinstance(downsample, bool):
raise TypeError('Input downsample must be of boolean type')
if verbose:
print('\tInput parameters have been verified to be compatible.\n\tProceeding to compute delay transform.')
result = {}
result['freq_wts'] = freq_wts
result['pad'] = pad
result['lags'] = DSP.spectral_axis(int(self.f.size*(1+pad)), delx=self.df, use_real=False, shift=True)
if pad == 0.0:
result['vis_lag'] = DSP.FT1D(vis * bp * freq_wts, ax=-2, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['lag_kernel'] = DSP.FT1D(bp * freq_wts, ax=-2, inverse=True, use_real=False, shift=True) * self.f.size * self.df
if verbose:
print('\tDelay transform computed without padding.')
else:
npad = int(self.f.size * pad)
pad_shape = NP.zeros((len(vis.shape[:-3]),2), dtype=NP.int).tolist()
pad_shape += [[0,0], [0,npad], [0,0]]
result['vis_lag'] = DSP.FT1D(NP.pad(vis * bp * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = DSP.FT1D(NP.pad(bp * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
if verbose:
print('\tDelay transform computed with padding fraction {0:.1f}'.format(pad))
if downsample:
result['vis_lag'] = DSP.downsampler(result['vis_lag'], 1+pad, axis=-2)
result['lag_kernel'] = DSP.downsampler(result['lag_kernel'], 1+pad, axis=-2)
result['lags'] = DSP.downsampler(result['lags'], 1+pad)
result['lags'] = result['lags'].flatten()
if verbose:
print('\tDelay transform products downsampled by factor of {0:.1f}'.format(1+pad))
print('delay_transform() completed successfully.')
return result
#############################################################################
def delayClean(self, pad=1.0, freq_wts=None, clean_window_buffer=1.0,
gain=0.1, maxiter=10000, threshold=5e-3,
threshold_type='relative', parallel=False, nproc=None,
verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities from frequency axis onto delay (time) axis
using an IFFT and deconvolves the delay transform quantities along the
delay axis. This is performed for noiseless sky visibilities, thermal
noise in visibilities, and observed visibilities. This calls an in-house
module complex1dClean instead of the clean routine in AIPY module. It
can utilize parallelization
Inputs:
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. If a negative value is
specified, delay transform will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array. Default (None) will not apply windowing and only the
inherent bandpass will be used.
gain [scalar] gain factor to be applied while subtracting clean
component from residuals. This is the fraction of the maximum in
the residuals that will be subtracted. Must lie between 0 and 1.
A lower value will have a smoother convergence but take a longer
time to converge. Default=0.1
maxiter [scalar] maximum number of iterations for cleaning process. Will
terminate if the number of iterations exceed maxiter.
Default=10000
threshold
[scalar] represents the cleaning depth either as a fraction of
the maximum in the input (when thershold_type is set to
'relative') or the absolute value (when threshold_type is set
to 'absolute') in same units of input down to which inp should
be cleaned. Value must always be positive. When threshold_type
is set to 'relative', threshold mu st lie between 0 and 1.
Default=5e-3 (found to work well and converge fast) assuming
threshold_type is set to 'relative'
threshold_type
[string] represents the type of threshold specified by value in
input threshold. Accepted values are 'relative' and 'absolute'.
If set to 'relative' the threshold value is the fraction
(between 0 and 1) of maximum in input down to which it should
be cleaned. If set to 'asbolute' it is the actual value down to
which inp should be cleaned. Default='relative'
parallel [boolean] specifies if parallelization is to be invoked.
False (default) means only serial processing
nproc [integer] specifies number of independent processes to spawn.
Default = None, means automatically determines the number of
process cores in the system and use one less than that to
avoid locking the system for other processes. Applies only
if input parameter 'parallel' (see above) is set to True.
If nproc is set to a value more than the number of process
cores in the system, it will be reset to number of process
cores in the system minus one to avoid locking the system out
for other processes
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
------------------------------------------------------------------------
"""
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.size == self.f.size:
freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.ia.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.n_acc:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.f.size, -1), axis=0), self.ia.baselines.shape[0], axis=0)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0]:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.f.size), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0] * self.n_acc:
freq_wts = freq_wts.reshape(self.ia.baselines.shape[0], self.f.size, self.n_acc)
else:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
self.bp_wts = freq_wts
if verbose:
print('\tFrequency window weights assigned.')
bw = self.df * self.f.size
pc = self.ia.phase_center
pc_coords = self.ia.phase_center_coords
if pc_coords == 'hadec':
pc_altaz = GEOM.hadec2altaz(pc, self.ia.latitude, units='degrees')
pc_dircos = GEOM.altaz2dircos(pc_altaz, units='degrees')
elif pc_coords == 'altaz':
pc_dircos = GEOM.altaz2dircos(pc, units='degrees')
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=False)
dlag = lags[1] - lags[0]
clean_area = NP.zeros(self.f.size + npad, dtype=int)
skyvis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.skyvis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
vis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.vis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
lag_kernel = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
ccomponents_noiseless = NP.zeros_like(skyvis_lag)
ccres_noiseless = NP.zeros_like(skyvis_lag)
ccomponents_noisy = NP.zeros_like(vis_lag)
ccres_noisy = NP.zeros_like(vis_lag)
if parallel:
if nproc is None:
nproc = min(max(MP.cpu_count()-1, 1), self.ia.baselines.shape[0]*self.n_acc)
else:
nproc = min(max(MP.cpu_count()-1, 1), self.ia.baselines.shape[0]*self.n_acc, nproc)
list_of_skyvis_lag = []
list_of_vis_lag = []
list_of_dkern = []
list_of_cboxes = []
for bli in xrange(self.ia.baselines.shape[0]):
for ti in xrange(self.n_acc):
list_of_skyvis_lag += [skyvis_lag[bli,:,ti]]
list_of_vis_lag += [vis_lag[bli,:,ti]]
list_of_dkern += [lag_kernel[bli,:,ti]]
clean_area = NP.zeros(self.f.size + npad, dtype=int)
clean_area[NP.logical_and(lags <= self.horizon_delay_limits[ti,bli,1]+clean_window_buffer/bw, lags >= self.horizon_delay_limits[ti,bli,0]-clean_window_buffer/bw)] = 1
list_of_cboxes += [clean_area]
list_of_gains = [gain] * self.ia.baselines.shape[0]*self.n_acc
list_of_maxiter = [maxiter] * self.ia.baselines.shape[0]*self.n_acc
list_of_thresholds = [threshold] * self.ia.baselines.shape[0]*self.n_acc
list_of_threshold_types = [threshold_type] * self.ia.baselines.shape[0]*self.n_acc
list_of_verbosity = [verbose] * self.ia.baselines.shape[0]*self.n_acc
list_of_pid = range(self.ia.baselines.shape[0]*self.n_acc)
# list_of_pid = [None] * self.ia.baselines.shape[0]*self.n_acc
list_of_progressbars = [True] * self.ia.baselines.shape[0]*self.n_acc
list_of_progressbar_ylocs = NP.arange(self.ia.baselines.shape[0]*self.n_acc) % min(nproc, WM.term.height)
list_of_progressbar_ylocs = list_of_progressbar_ylocs.tolist()
pool = MP.Pool(processes=nproc)
list_of_noiseless_cleanstates = pool.map(complex1dClean_arg_splitter, IT.izip(list_of_skyvis_lag, list_of_dkern, list_of_cboxes, list_of_gains, list_of_maxiter, list_of_thresholds, list_of_threshold_types, list_of_verbosity, list_of_progressbars, list_of_pid, list_of_progressbar_ylocs))
list_of_noisy_cleanstates = pool.map(complex1dClean_arg_splitter, IT.izip(list_of_vis_lag, list_of_dkern, list_of_cboxes, list_of_gains, list_of_maxiter, list_of_thresholds, list_of_threshold_types, list_of_verbosity, list_of_progressbars, list_of_pid, list_of_progressbar_ylocs))
for bli in xrange(self.ia.baselines.shape[0]):
for ti in xrange(self.n_acc):
ind = bli * self.n_acc + ti
noiseless_cleanstate = list_of_noiseless_cleanstates[ind]
ccomponents_noiseless[bli,:,ti] = noiseless_cleanstate['cc']
ccres_noiseless[bli,:,ti] = noiseless_cleanstate['res']
noisy_cleanstate = list_of_noisy_cleanstates[ind]
ccomponents_noisy[bli,:,ti] = noisy_cleanstate['cc']
ccres_noisy[bli,:,ti] = noisy_cleanstate['res']
else:
for snap_iter in xrange(self.n_acc):
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Baselines '.format(self.ia.baselines.shape[0]), PGB.ETA()], maxval=self.ia.baselines.shape[0]).start()
for bl_iter in xrange(self.ia.baselines.shape[0]):
clean_area[NP.logical_and(lags <= self.horizon_delay_limits[snap_iter,bl_iter,1]+clean_window_buffer/bw, lags >= self.horizon_delay_limits[snap_iter,bl_iter,0]-clean_window_buffer/bw)] = 1
cleanstate = complex1dClean(skyvis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], cbox=clean_area, gain=gain, maxiter=maxiter, threshold=threshold, threshold_type=threshold_type, verbose=verbose)
ccomponents_noiseless[bl_iter,:,snap_iter] = cleanstate['cc']
ccres_noiseless[bl_iter,:,snap_iter] = cleanstate['res']
cleanstate = complex1dClean(vis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], cbox=clean_area, gain=gain, maxiter=maxiter, threshold=threshold, threshold_type=threshold_type, verbose=verbose)
ccomponents_noisy[bl_iter,:,snap_iter] = cleanstate['cc']
ccres_noisy[bl_iter,:,snap_iter] = cleanstate['res']
progress.update(bl_iter+1)
progress.finish()
deta = lags[1] - lags[0]
pad_factor = (1.0 + 1.0*npad/self.f.size) # to make sure visibilities after CLEANing are at the same amplitude level as before CLEANing
cc_skyvis = NP.fft.fft(ccomponents_noiseless, axis=1) * deta * pad_factor
cc_skyvis_res = NP.fft.fft(ccres_noiseless, axis=1) * deta * pad_factor
cc_vis = NP.fft.fft(ccomponents_noisy, axis=1) * deta * pad_factor
cc_vis_res = NP.fft.fft(ccres_noisy, axis=1) * deta * pad_factor
self.lags = lags
self.skyvis_lag = NP.fft.fftshift(skyvis_lag, axes=1)
self.vis_lag = NP.fft.fftshift(vis_lag, axes=1)
self.lag_kernel = NP.fft.fftshift(lag_kernel, axes=1)
self.cc_lag_kernel = NP.fft.fftshift(lag_kernel, axes=1)
self.cc_skyvis_lag = NP.fft.fftshift(ccomponents_noiseless, axes=1)
self.cc_skyvis_res_lag = NP.fft.fftshift(ccres_noiseless, axes=1)
self.cc_vis_lag = NP.fft.fftshift(ccomponents_noisy, axes=1)
self.cc_vis_res_lag = NP.fft.fftshift(ccres_noisy, axes=1)
self.cc_skyvis_net_lag = self.cc_skyvis_lag + self.cc_skyvis_res_lag
self.cc_vis_net_lag = self.cc_vis_lag + self.cc_vis_res_lag
self.cc_lags = NP.fft.fftshift(lags)
self.cc_skyvis_freq = cc_skyvis
self.cc_skyvis_res_freq = cc_skyvis_res
self.cc_vis_freq = cc_vis
self.cc_vis_res_freq = cc_vis_res
self.cc_skyvis_net_freq = cc_skyvis + cc_skyvis_res
self.cc_vis_net_freq = cc_vis + cc_vis_res
self.clean_window_buffer = clean_window_buffer
#############################################################################
def subband_delay_transform(self, bw_eff, freq_center=None, shape=None,
fftpow=None, pad=None, bpcorrect=False, action=None,
verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform on multiple frequency sub-bands with specified
weights
Inputs:
bw_eff [dictionary] dictionary with two keys 'cc' and 'sim' to
specify effective bandwidths (in Hz) on the selected
frequency windows for subband delay
transform of CLEANed and simulated visibilities
respectively. The values under these keys can be a scalar,
list or numpy array and are independent of each other. If
a scalar value is provided, the same will be applied to all
frequency windows under that key
freq_center [dictionary] dictionary with two keys 'cc' and 'sim' to
specify frequency centers (in Hz) of the selected frequency
windows for subband delay transform of CLEANed and
simulated visibilities respectively. The values under these
keys can be a scalar, list or numpy array and are
independent of each other. If a scalar is provided, the
same will be applied to all frequency windows. Default=None
uses the center frequency from the class attribute named
channels for both keys 'cc' and 'sim'
shape [dictionary] dictionary with two keys 'cc' and 'sim' to
specify frequency window shape for subband delay transform
of CLEANed and simulated visibilities respectively. Values
held by the keys must be a string. Accepted values for the
string are 'rect' or 'RECT' (for rectangular), 'bnw' and
'BNW' (for Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window) for both keys
fftpow [dictionary] dictionary with two keys 'cc' and 'sim' to
specify the power to which the FFT of the window will be
raised. The values under these keys must be a positive
scalar. Default = 1.0 for each key
pad [dictionary] dictionary with two keys 'cc' and 'sim' to
specify padding fraction relative to the number of frequency
channels for CLEANed and simualted visibilities respectively.
Values held by the keys must be a non-negative scalar. For
e.g., a pad of 1.0 pads the frequency axis with zeros of
the same width as the number of channels. After the delay
transform, the transformed visibilities are downsampled by a
factor of 1+pad. If a negative value is specified, delay
transform will be performed with no padding. Default=None
sets to padding factor to 1.0 under both keys.
bpcorrect [boolean] Only applicable on delay CLEANed visibilities.
If True, correct for frequency weights that were applied
during the original delay transform using which the delay
CLEAN was done. This would flatten the bandpass after delay
CLEAN. If False (default), do not apply the correction,
namely, inverse of bandpass weights
action [string or None] If set to None (default) just updates the
attribute. If set to 'return_oversampled' it returns the
output dictionary corresponding to oversampled delay space
quantities and updates its attribute
subband_delay_spectra with full resolution in delay space.
If set to 'return_resampled' it returns the output
dictionary corresponding to resampled/downsampled delay
space quantities and updates the attribute.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
If keyword input action is set to None (default), the output
is internally stored in the class attributes
subband_delay_spectra and subband_delay_spectra_resampled. If action is
set to 'return_oversampled', the following
output is returned. The output is a dictionary that contains two top
level keys, namely, 'cc' and 'sim' denoting information about CLEAN
and simulated visibilities respectively. Under each of these keys is
information about delay spectra of different frequency sub-bands (n_win
in number) in the form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'bpcorrect' [boolean] If True (default), correct for frequency
weights that were applied during the original
delay transform using which the delay CLEAN was
done. This would flatten the bandpass after delay
CLEAN. If False, do not apply the correction,
namely, inverse of bandpass weights. This applies only
CLEAned visibilities under the 'cc' key and hence is
present only if the top level key is 'cc' and absent
for key 'sim'
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'skyvis_lag'
[numpy array] subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_lag' [numpy array] subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_noise_lag'
[numpy array] subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is 'sim'
and absent for 'cc'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'skyvis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simualted noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simualted noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains two top
level keys, namely, 'cc' and 'sim' denoting information about CLEAN
and simulated visibilities respectively. Under each of these keys is
information about delay spectra of different frequency sub-bands (n_win
in number) in the form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'skyvis_lag'
[numpy array] resampled subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_lag' [numpy array] resampled subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_noise_lag'
[numpy array] resampled subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is 'sim'
and absent for 'cc'. It is of size
n_bl x n_win x nlags x n_t.
'skyvis_res_lag'
[numpy array] resampled subband delay spectra of residuals
after delay CLEAN of simualted noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] resampled subband delay spectra of residuals
after delay CLEAN of simualted noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, dict):
raise TypeError('Effective bandwidth must be specified as a dictionary')
for key in ['cc','sim']:
if key in bw_eff:
if not isinstance(bw_eff[key], (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff[key] = NP.asarray(bw_eff[key]).reshape(-1)
if NP.any(bw_eff[key] <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = {key: NP.asarray(self.f[self.f.size/2]).reshape(-1) for key in ['cc', 'sim']}
# freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, dict):
for key in ['cc', 'sim']:
if isinstance(freq_center[key], (int, float, list, NP.ndarray)):
freq_center[key] = NP.asarray(freq_center[key]).reshape(-1)
if NP.any((freq_center[key] <= self.f.min()) | (freq_center[key] >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
else:
raise TypeError('Input frequency center must be specified as a dictionary')
for key in ['cc', 'sim']:
if (bw_eff[key].size == 1) and (freq_center[key].size > 1):
bw_eff[key] = NP.repeat(bw_eff[key], freq_center[key].size)
elif (bw_eff[key].size > 1) and (freq_center[key].size == 1):
freq_center[key] = NP.repeat(freq_center[key], bw_eff[key].size)
elif bw_eff[key].size != freq_center[key].size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, dict):
raise TypeError('Window shape must be specified as a dictionary')
for key in ['cc', 'sim']:
if not isinstance(shape[key], str):
raise TypeError('Window shape must be a string')
if shape[key] not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = {key: 'rect' for key in ['cc', 'sim']}
# shape = 'rect'
if fftpow is None:
fftpow = {key: 1.0 for key in ['cc', 'sim']}
else:
if not isinstance(fftpow, dict):
raise TypeError('Power to raise FFT of window by must be specified as a dictionary')
for key in ['cc', 'sim']:
if not isinstance(fftpow[key], (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow[key] < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = {key: 1.0 for key in ['cc', 'sim']}
else:
if not isinstance(pad, dict):
raise TypeError('Padding for delay transform must be specified as a dictionary')
for key in ['cc', 'sim']:
if not isinstance(pad[key], (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad[key] < 0.0:
pad[key] = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if not isinstance(bpcorrect, bool):
raise TypeError('Input keyword bpcorrect must be of boolean type')
vis_noise_freq = NP.copy(self.ia.vis_noise_freq)
result = {}
for key in ['cc', 'sim']:
if (key == 'sim') or ((key == 'cc') and (self.cc_lags is not None)):
freq_wts = NP.empty((bw_eff[key].size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape[key], fftpow=fftpow[key], area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff[key] / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center[key].reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape[key], fftpow=fftpow[key], centering=True, peak=None, area_normalize=False, power_normalize=True)
# window = NP.sqrt(frac_width * n_window[i]) * DSP.windowing(n_window[i], shape=shape[key], centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
bpcorrection_factor = 1.0
npad = int(self.f.size * pad[key])
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
if key == 'cc':
skyvis_freq = self.cc_skyvis_freq[:,:self.f.size,:]
vis_freq = self.cc_vis_freq[:,:self.f.size,:]
skyvis_res_freq = self.cc_skyvis_res_freq[:,:self.f.size,:]
vis_res_freq = self.cc_vis_res_freq[:,:self.f.size,:]
skyvis_net_freq = self.cc_skyvis_net_freq[:,:self.f.size,:]
vis_net_freq = self.cc_vis_net_freq[:,:self.f.size,:]
if bpcorrect:
bpcorrection_factor = NP.where(NP.abs(self.bp_wts)>0.0, 1/self.bp_wts, 0.0)
bpcorrection_factor = bpcorrection_factor[:,NP.newaxis,:,:]
else:
skyvis_freq = NP.copy(self.ia.skyvis_freq)
vis_freq = NP.copy(self.ia.vis_freq)
skyvis_lag = DSP.FT1D(NP.pad(skyvis_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_lag = DSP.FT1D(NP.pad(vis_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_noise_lag = DSP.FT1D(NP.pad(vis_noise_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result[key] = {'freq_center': freq_center[key], 'shape': shape[key], 'freq_wts': freq_wts, 'bw_eff': bw_eff[key], 'npad': npad, 'lags': lags, 'skyvis_lag': skyvis_lag, 'vis_lag': vis_lag, 'lag_kernel': lag_kernel, 'lag_corr_length': self.f.size / NP.sum(freq_wts, axis=1)}
if key == 'cc':
skyvis_res_lag = DSP.FT1D(NP.pad(skyvis_res_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_res_lag = DSP.FT1D(NP.pad(vis_res_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
skyvis_net_lag = DSP.FT1D(NP.pad(skyvis_net_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_net_lag = DSP.FT1D(NP.pad(vis_net_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result[key]['vis_res_lag'] = vis_res_lag
result[key]['skyvis_res_lag'] = skyvis_res_lag
result[key]['vis_net_lag'] = vis_net_lag
result[key]['skyvis_net_lag'] = skyvis_net_lag
result[key]['bpcorrect'] = bpcorrect
else:
result[key]['vis_noise_lag'] = vis_noise_lag
if verbose:
print('\tSub-band(s) delay transform computed')
self.subband_delay_spectra = result
result_resampled = {}
for key in ['cc', 'sim']:
if key in result:
result_resampled[key] = {}
result_resampled[key]['freq_center'] = result[key]['freq_center']
result_resampled[key]['bw_eff'] = result[key]['bw_eff']
downsample_factor = NP.min((self.f.size + npad) * self.df / result_resampled[key]['bw_eff'])
result_resampled[key]['lags'] = DSP.downsampler(result[key]['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result_resampled[key]['lag_kernel'] = DSP.downsampler(result[key]['lag_kernel'], downsample_factor, axis=2, method='interp', kind='linear')
result_resampled[key]['skyvis_lag'] = DSP.downsampler(result[key]['skyvis_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['vis_lag'] = DSP.downsampler(result[key]['vis_lag'], downsample_factor, axis=2, method='FFT')
dlag = result_resampled[key]['lags'][1] - result_resampled[key]['lags'][0]
result_resampled[key]['lag_corr_length'] = (1/result[key]['bw_eff']) / dlag
if key == 'cc':
result_resampled[key]['skyvis_res_lag'] = DSP.downsampler(result[key]['skyvis_res_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['vis_res_lag'] = DSP.downsampler(result[key]['vis_res_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['skyvis_net_lag'] = DSP.downsampler(result[key]['skyvis_net_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['vis_net_lag'] = DSP.downsampler(result[key]['vis_net_lag'], downsample_factor, axis=2, method='FFT')
else:
result_resampled[key]['vis_noise_lag'] = DSP.downsampler(result[key]['vis_noise_lag'], downsample_factor, axis=2, method='FFT')
if verbose:
print('\tDownsampled Sub-band(s) delay transform computed')
self.subband_delay_spectra_resampled = result_resampled
if action is not None:
if action == 'return_oversampled':
return result
if action == 'return_resampled':
return result_resampled
#############################################################################
def subband_delay_transform_allruns(self, vis, bw_eff, freq_center=None,
shape=None, fftpow=None, pad=None,
bpcorrect=False, action=None,
verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform on multiple frequency sub-bands with specified
weights for multiple realizations of visibilities
Inputs:
vis [numpy array] Visibilities which will be delay transformed.
It must be of shape (...,nbl,nchan,ntimes)
bw_eff [scalar, list or numpy array] effective bandwidths (in Hz)
on the selected frequency windows for subband delay
transform of visibilities. The values can be a scalar, list
or numpy array. If a scalar value is provided, the same
will be applied to all frequency windows.
freq_center [scalar, list or numpy array] frequency centers (in Hz) of
the selected frequency windows for subband delay transform
of visibilities. The values can be a scalar, list or numpy
array. If a scalar is provided, the same will be applied
to all frequency windows. Default=None uses the center
frequency from the class attribute
shape [string] frequency window shape for subband delay transform
of visibilities. It must be a string. Accepted values for the
string are 'rect' or 'RECT' (for rectangular), 'bnw' and
'BNW' (for Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window)
fftpow [scalar] the power to which the FFT of the window will be
raised. The value must be a positive scalar. Default = 1.0
pad [scalar] padding fraction relative to the number of
frequency channels. Value must be a non-negative scalar.
For e.g., a pad of 1.0 pads the frequency axis with zeros
of the same width as the number of channels. After the
delay transform, the transformed visibilities are
downsampled by a factor of 1+pad. If a negative value is
specified, delay transform will be performed with no
padding. Default=None sets to padding factor to 1.0
action [string or None] If set to 'return_oversampled' it returns
the output dictionary corresponding to oversampled delay
space quantities with full resolution in delay space. If
set to None (default) or 'return_resampled' it returns the
output dictionary corresponding to resampled/downsampled
delay space quantities.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
The output is a dictionary that contains information about delay spectra
of different frequency sub-bands (n_win in number). If action is set to
'return_resampled', it contains the following keys and values:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_win x (1 x 1 x ... nruns times) x n_bl x
(nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated. It is of size n_win
'vis_lag' [numpy array] subband delay spectra of visibilities,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x (nchan+npad) x
x n_t.
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains
information about delay spectra of different frequency sub-bands
(n_win in number) with the following keys and values:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_win x (1 x 1 x ... nruns times) x n_bl x nlags x n_t
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated. It is of size n_win
'vis_lag' [numpy array] subband delay spectra of visibilities,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t
------------------------------------------------------------------------
"""
try:
vis, bw_eff
except NameError:
raise NameError('Input visibilities and effective bandwidth must be specified')
else:
if not isinstance(vis, NP.ndarray):
raise TypeError('Input vis must be a numpy array')
elif vis.ndim < 3:
raise ValueError('Input vis must be at least 3-dimensional')
elif vis.shape[-3:] == (self.ia.baselines.shape[0],self.f.size,self.n_acc):
if vis.ndim == 3:
shp = (1,) + vis.shape
else:
shp = vis.shape
vis = vis.reshape(shp)
else:
raise ValueError('Input vis does not have compatible shape')
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.f.min()) | (freq_center >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape.lower() not in ['rect', 'bhw', 'bnw']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if fftpow is None:
fftpow = 1.0
else:
if not isinstance(fftpow, (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = 1.0
else:
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
result = {}
freq_wts = NP.empty((bw_eff.size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
freq_wts = freq_wts.reshape((bw_eff.size,)+tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(1,self.f.size,1))
bp = self.bp.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+self.bp.shape)
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
pad_shape = [[0,0]] + NP.zeros((len(vis.shape[:-3]),2), dtype=NP.int).tolist()
pad_shape += [[0,0], [0,npad], [0,0]]
vis_lag = DSP.FT1D(NP.pad(vis[NP.newaxis,...] * bp[NP.newaxis,...] * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(bp[NP.newaxis,...] * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result = {'freq_center': freq_center, 'shape': shape, 'freq_wts': freq_wts, 'bw_eff': bw_eff, 'npad': npad, 'lags': lags, 'vis_lag': vis_lag, 'lag_kernel': lag_kernel, 'lag_corr_length': self.f.size / NP.squeeze(NP.sum(freq_wts, axis=-2))}
if verbose:
print('\tSub-band(s) delay transform computed')
if action is not None:
action = 'return_resampled'
if action == 'return_oversampled':
return result
elif action == 'return_resampled':
downsample_factor = NP.min((self.f.size + npad) * self.df / result['bw_eff'])
result['lags'] = DSP.downsampler(result['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result['lag_kernel'] = DSP.downsampler(result['lag_kernel'], downsample_factor, axis=-2, method='interp', kind='linear')
result['vis_lag'] = DSP.downsampler(result['vis_lag'], downsample_factor, axis=-2, method='FFT')
dlag = result['lags'][1] - result['lags'][0]
result['lag_corr_length'] = (1/result['bw_eff']) / dlag
return result
else:
raise ValueError('Invalid value specified for keyword input action')
if verbose:
print('\tDownsampled Sub-band(s) delay transform computed')
#############################################################################
def subband_delay_transform_closure_phase(self, bw_eff, cpinfo=None,
antenna_triplets=None,
specsmooth_info=None,
delay_filter_info=None,
spectral_window_info=None,
freq_center=None, shape=None,
fftpow=None, pad=None, action=None,
verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform of closure phases on antenna triplets on
multiple frequency sub-bands with specified weights. It will have units
of Hz
Inputs:
bw_eff [scalar or numpy array] effective bandwidths (in Hz) on the
selected frequency windows for subband delay transform of
closure phases. If a scalar value is provided, the same
will be applied to all frequency windows
cpinfo [dictionary] If set to None, it will be determined based on
other inputs. Otherwise, it will be used directly. The
dictionary will contain the following keys and values:
'closure_phase_skyvis' [numpy array] [optional] Closure
phases (in radians) for the given
antenna triplets from the noiseless
visibilities. It is of shape
ntriplets x ... x nchan x ntimes
'closure_phase_vis' [numpy array] [optional] Closure
phases (in radians) for the given
antenna triplets for noisy
visibilities. It is of shape
ntriplets x ... x nchan x ntimes
'closure_phase_noise' [numpy array] [optional] Closure
phases (in radians) for the given
antenna triplets for thermal noise
in visibilities. It is of shape
ntriplets x ... x nchan x ntimes
'antenna_triplets' [list of tuples] List of
three-element tuples of antenna IDs
for which the closure phases are
calculated.
'baseline_triplets' [numpy array] List of 3x3 numpy
arrays. Each 3x3 unit in the list
represents triplets of baseline
vectors where the three rows denote
the three baselines in the triplet
and the three columns define the x-,
y- and z-components of the triplet.
The number of 3x3 unit elements in
the list will equal the number of
elements in the list under key
'antenna_triplets'.
antenna_triplets
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. If set to None (default), all
the unique triplets based on the antenna layout attribute
in class InterferometerArray
specsmooth_info
[NoneType or dictionary] Spectral smoothing window to be
applied prior to the delay transform. If set to None, no
smoothing is done. This is usually set if spectral
smoothing is to be done such as in the case of RFI. The
smoothing window parameters are specified using the
following keys and values:
'op_type' [string] Smoothing operation type.
Default='median' (currently accepts only
'median' or 'interp').
'window_size' [integer] Size of smoothing window (in
pixels) along frequency axis. Applies only
if op_type is set to 'median'
'maskchans' [NoneType or numpy array] Numpy boolean array
of size nchan. False entries imply those
channels are not masked and will be used in
in interpolation while True implies they are
masked and will not be used in determining the
interpolation function. If set to None, all
channels are assumed to be unmasked (False).
'evalchans' [NoneType or numpy array] Channel numbers at
which visibilities are to be evaluated. Will
be useful for filling in RFI flagged channels.
If set to None, all channels will be evaluated
'noiseRMS' [NoneType or scalar or numpy array] If set to
None (default), the rest of the parameters are
used in determining the RMS of thermal noise.
If specified as scalar, all other parameters
will be ignored in estimating noiseRMS and
this value will be used instead. If specified
as a numpy array, it must be of shape
broadcastable to (nbl,nchan,ntimes). So
accpeted shapes can be (1,1,1), (1,1,ntimes),
(1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or
(nbl,nchan,ntimes).
delay_filter_info
[NoneType or dictionary] Info containing delay filter
parameters. If set to None (default), no delay filtering is
performed. Otherwise, delay filter is applied on each of the
visibilities in the triplet before computing the closure
phases. The delay filter parameters are specified in a
dictionary as follows:
'type' [string] 'horizon' (default) or 'regular'. If
set to 'horizon', the horizon delay limits are
estimated from the respective baseline lengths
in the triplet. If set to 'regular', the extent
of the filter is determined by the 'min' and
'width' keys (see below).
'min' [scalar] Non-negative number (in seconds) that
specifies the minimum delay in the filter span.
If not specified, it is assumed to be 0. If
'type' is set to 'horizon', the 'min' is ignored
and set to 0.
'width' [scalar] Non-negative number (in numbers of
inverse bandwidths). If 'type' is set to
'horizon', the width represents the delay
buffer beyond the horizon. If 'type' is set to
'regular', this number has to be positive and
determines the span of the filter starting from
the minimum delay in key 'min'.
'mode' [string] 'discard' (default) or 'retain'. If set
to 'discard', the span defining the filter is
discarded and the rest retained. If set to
'retain', the span defining the filter is
retained and the rest discarded. For example,
if 'type' is set to 'horizon' and 'mode' is set
to 'discard', the horizon-to-horizon is
filtered out (discarded).
spectral_window_info
[NoneType or dictionary] Spectral window parameters to
determine the spectral weights and apply to the visibilities
in the frequency domain before filtering in the delay domain.
THESE PARAMETERS ARE APPLIED ON THE INDIVIDUAL VISIBILITIES
THAT GO INTO THE CLOSURE PHASE. THESE ARE NOT TO BE CONFUSED
WITH THE PARAMETERS THAT WILL BE USED IN THE ACTUAL DELAY
TRANSFORM OF CLOSURE PHASE SPECTRA WHICH ARE SPECIFIED
SEPARATELY FURTHER BELOW.
If set to None (default), unity spectral weights are applied.
If spectral weights are to be applied, it must be a provided
as a dictionary with the following keys and values:
bw_eff [scalar] effective bandwidths (in Hz) for the
spectral window
freq_center [scalar] frequency center (in Hz) for the
spectral window
shape [string] frequency window shape for the
spectral window. Accepted values are 'rect' or
'RECT' (for rectangular), 'bnw' and 'BNW' (for
Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
fftpow [scalar] power to which the FFT of the window
will be raised. The value must be a positive
scalar.
freq_center [scalar, list or numpy array] frequency centers (in Hz) of
the selected frequency windows for subband delay transform
of closure phases. The value can be a scalar, list or numpy
array. If a scalar is provided, the same will be applied to
all frequency windows. Default=None uses the center
frequency from the class attribute named channels
shape [string] frequency window shape for subband delay transform
of closure phases. Accepted values for the string are
'rect' or 'RECT' (for rectangular), 'bnw' and 'BNW' (for
Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window)
fftpow [scalar] the power to which the FFT of the window will be
raised. The value must be a positive scalar. Default = 1.0
pad [scalar] padding fraction relative to the number of
frequency channels for closure phases. Value must be a
non-negative scalar. For e.g., a pad of 1.0 pads the
frequency axis with zeros of the same width as the number
of channels. After the delay transform, the transformed
closure phases are downsampled by a factor of 1+pad. If a
negative value is specified, delay transform will be
performed with no padding. Default=None sets to padding
factor to 1.0
action [string or None] If set to None (default) just updates the
attribute. If set to 'return_oversampled' it returns the
output dictionary corresponding to oversampled delay space
quantities with full resolution in delay space. If set to
None (default) or 'return_resampled', it returns the output
dictionary corresponding to resampled or downsampled delay
space quantities.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
If keyword input action is set to 'return_oversampled', the following
output is returned. The output is a dictionary that contains information
about delay spectra of different frequency sub-bands (n_win in number)
under the following keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_triplets x ... x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'closure_phase_skyvis'
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x ... n_win x
nlags x n_t. It is in units of Hz
'closure_phase_vis'
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_noise'
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x ... x n_win x nlags x n_t. It
is in units of Hz
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains
information about closure phases. Under each of these keys is
information about delay spectra of different frequency sub-bands
(n_win in number) under the following keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_triplets x ... x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis'
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_vis'
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_noise'
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x ... x n_win x nlags x n_t. It is
in units of Hz
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.f.min()) | (freq_center >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if fftpow is None:
fftpow = 1.0
else:
if not isinstance(fftpow, (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = 1.0
else:
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if cpinfo is not None:
if not isinstance(cpinfo, dict):
raise TypeError('Input cpinfo must be a dictionary')
else:
cpinfo = self.ia.getClosurePhase(antenna_triplets=antenna_triplets, specsmooth_info=specsmooth_info, delay_filter_info=delay_filter_info, spectral_window_info=spectral_window_info)
result = {'antenna_triplets': cpinfo['antenna_triplets'], 'baseline_triplets': cpinfo['baseline_triplets']}
freq_wts = NP.empty((bw_eff.size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
# lag_kernel = DSP.FT1D(NP.pad(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
# lag_kernel = DSP.FT1D(NP.pad(freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result = {'freq_center': freq_center, 'shape': shape, 'freq_wts': freq_wts, 'bw_eff': bw_eff, 'npad': npad, 'lags': lags, 'lag_corr_length': self.f.size / NP.sum(freq_wts, axis=-1)}
for key in cpinfo:
if key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
available_CP_key = key
ndim_padtuple = [(0,0) for i in range(1+len(cpinfo[key].shape[:-2]))] + [(0,npad), (0,0)]
result[key] = DSP.FT1D(NP.pad(NP.exp(-1j*cpinfo[key].reshape(cpinfo[key].shape[:-2]+(1,)+cpinfo[key].shape[-2:])) * freq_wts.reshape(tuple(NP.ones(len(cpinfo[key].shape[:-2])).astype(int))+freq_wts.shape+(1,)), ndim_padtuple, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
# result[key] = DSP.FT1D(NP.pad(NP.exp(-1j*cpinfo[key][:,NP.newaxis,:,:]) * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(freq_wts.reshape(tuple(NP.ones(len(cpinfo[available_CP_key].shape[:-2])).astype(int))+freq_wts.shape+(1,)), ndim_padtuple, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = lag_kernel
if verbose:
print('\tSub-band(s) delay transform computed')
result_resampled = {'antenna_triplets': cpinfo['antenna_triplets'], 'baseline_triplets': cpinfo['baseline_triplets']}
result_resampled['freq_center'] = result['freq_center']
result_resampled['bw_eff'] = result['bw_eff']
result_resampled['freq_wts'] = result['freq_wts']
downsample_factor = NP.min((self.f.size + npad) * self.df / result_resampled['bw_eff'])
result_resampled['lags'] = DSP.downsampler(result['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result_resampled['lag_kernel'] = DSP.downsampler(result['lag_kernel'], downsample_factor, axis=-2, method='interp', kind='linear')
dlag = result_resampled['lags'][1] - result_resampled['lags'][0]
result_resampled['lag_corr_length'] = (1/result['bw_eff']) / dlag
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in result:
result_resampled[key] = DSP.downsampler(result[key], downsample_factor, axis=-2, method='FFT')
if verbose:
print('\tDownsampled Sub-band(s) delay transform computed')
if (action is None) or (action.lower() == 'return_resampled'):
return result_resampled
elif action.lower() == 'return_oversampled':
return result
else:
raise ValueError('Invalid action specified')
################################################################################
def get_horizon_delay_limits(self, phase_center=None,
phase_center_coords=None):
"""
-------------------------------------------------------------------------
Estimates the delay envelope determined by the sky horizon for the
baseline(s) for the phase centers
Inputs:
phase_center
[numpy array] Phase center of the observation as 2-column or
3-column numpy array. Two columns are used when it is specified
in 'hadec' or 'altaz' coordinates as indicated by the input
phase_center_coords or by three columns when 'dircos' coordinates
are used. This is where the telescopes will be phased up to as
reference. Coordinate system for the phase_center is specified
by another input phase_center_coords. Default=None implies the
corresponding attribute from the DelaySpectrum instance is used.
This is a Nx2 or Nx3 array
phase_center_coords
[string] Coordinate system for array phase center. Accepted
values are 'hadec' (HA-Dec), 'altaz' (Altitude-Azimuth) or
'dircos' (direction cosines). Default=None implies the
corresponding attribute from the DelaySpectrum instance is used.
Outputs:
horizon_envelope:
NxMx2 matrix where M is the number of baselines and N is the number
of phase centers. horizon_envelope[:,:,0] contains the minimum delay
after accounting for (any) non-zenith phase center.
horizon_envelope[:,:,1] contains the maximum delay after accounting
for (any) non-zenith phase center(s).
-------------------------------------------------------------------------
"""
if phase_center is None:
phase_center = self.ia.phase_center
phase_center_coords = self.ia.phase_center_coords
if phase_center_coords not in ['hadec', 'altaz', 'dircos']:
raise ValueError('Phase center coordinates must be "altaz", "hadec" or "dircos"')
if phase_center_coords == 'hadec':
pc_altaz = GEOM.hadec2altaz(phase_center, self.ia.latitude, units='degrees')
pc_dircos = GEOM.altaz2dircos(pc_altaz, units='degrees')
elif phase_center_coords == 'altaz':
pc_dircos = GEOM.altaz2dircos(phase_center, units='degrees')
elif phase_center_coords == 'dircos':
pc_dircos = phase_center
horizon_envelope = DLY.horizon_delay_limits(self.ia.baselines, pc_dircos, units='mks')
return horizon_envelope
#############################################################################
def set_horizon_delay_limits(self):
"""
-------------------------------------------------------------------------
Estimates the delay envelope determined by the sky horizon for the
baseline(s) for the phase centers of the DelaySpectrum instance. No
output is returned. Uses the member function get_horizon_delay_limits()
-------------------------------------------------------------------------
"""
self.horizon_delay_limits = self.get_horizon_delay_limits()
#############################################################################
def save(self, ds_outfile, ia_outfile, tabtype='BinTabelHDU', overwrite=False,
verbose=True):
"""
-------------------------------------------------------------------------
Saves the interferometer array delay spectrum information to disk.
Inputs:
outfile [string] Filename with full path for for delay spectrum
data to be saved to. Will be appended with '.ds.fits'
ia_outfile [string] Filename with full path for interferometer array
data to be saved to. Will be appended with '.fits'
extension
Keyword Input(s):
tabtype [string] indicates table type for one of the extensions in
the FITS file. Allowed values are 'BinTableHDU' and
'TableHDU' for binary and ascii tables respectively. Default
is 'BinTableHDU'.
overwrite [boolean] True indicates overwrite even if a file already
exists. Default = False (does not overwrite)
verbose [boolean] If True (default), prints diagnostic and progress
messages. If False, suppress printing such messages.
-------------------------------------------------------------------------
"""
try:
ds_outfile, ia_outfile
except NameError:
raise NameError('Both delay spectrum and interferometer array output filenames must be specified. Aborting DelaySpectrum.save()...')
if verbose:
print('\nSaving information about interferometer array...')
self.ia.save(ia_outfile, tabtype=tabtype, overwrite=overwrite,
verbose=verbose)
if verbose:
print('\nSaving information about delay spectra...')
hdulist = []
hdulist += [fits.PrimaryHDU()]
hdulist[0].header['EXTNAME'] = 'PRIMARY'
hdulist[0].header['NCHAN'] = (self.f.size, 'Number of frequency channels')
hdulist[0].header['NLAGS'] = (self.lags.size, 'Number of lags')
hdulist[0].header['freq_resolution'] = (self.df, 'Frequency resolution (Hz)')
hdulist[0].header['N_ACC'] = (self.n_acc, 'Number of accumulations')
hdulist[0].header['PAD'] = (self.pad, 'Padding factor')
hdulist[0].header['DBUFFER'] = (self.clean_window_buffer, 'CLEAN window buffer (1/bandwidth)')
hdulist[0].header['IARRAY'] = (ia_outfile+'.fits', 'Location of InterferometerArray simulated visibilities')
if verbose:
print('\tCreated a primary HDU.')
# cols = []
# cols += [fits.Column(name='frequency', format='D', array=self.f)]
# cols += [fits.Column(name='lag', format='D', array=self.lags)]
# columns = _astropy_columns(cols, tabtype=tabtype)
# tbhdu = fits.new_table(columns)
# tbhdu.header.set('EXTNAME', 'SPECTRAL INFO')
# hdulist += [tbhdu]
# if verbose:
# print('\tCreated an extension for spectral information.')
hdulist += [fits.ImageHDU(self.f, name='FREQUENCIES')]
hdulist += [fits.ImageHDU(self.lags, name='LAGS')]
if verbose:
print('\tCreated an extension for spectral information.')
hdulist += [fits.ImageHDU(self.horizon_delay_limits, name='HORIZON LIMITS')]
if verbose:
print('\tCreated an extension for horizon delay limits of size {0[0]} x {0[1]} x {0[2]} as a function of snapshot instance, baseline, and (min,max) limits'.format(self.horizon_delay_limits.shape))
hdulist += [fits.ImageHDU(self.bp, name='BANDPASS')]
if verbose:
print('\tCreated an extension for bandpass functions of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, frequency, and snapshot instance'.format(self.bp.shape))
hdulist += [fits.ImageHDU(self.bp_wts, name='BANDPASS WEIGHTS')]
if verbose:
print('\tCreated an extension for bandpass weights of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, frequency, and snapshot instance'.format(self.bp_wts.shape))
if self.lag_kernel is not None:
hdulist += [fits.ImageHDU(self.lag_kernel.real, name='LAG KERNEL REAL')]
hdulist += [fits.ImageHDU(self.lag_kernel.imag, name='LAG KERNEL IMAG')]
if verbose:
print('\tCreated an extension for convolving lag kernel of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, lags, and snapshot instance'.format(self.lag_kernel.shape))
if self.skyvis_lag is not None:
hdulist += [fits.ImageHDU(self.skyvis_lag.real, name='NOISELESS DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.skyvis_lag.imag, name='NOISELESS DELAY SPECTRA IMAG')]
if self.vis_lag is not None:
hdulist += [fits.ImageHDU(self.vis_lag.real, name='NOISY DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.vis_lag.imag, name='NOISY DELAY SPECTRA IMAG')]
if self.vis_noise_lag is not None:
hdulist += [fits.ImageHDU(self.vis_noise_lag.real, name='DELAY SPECTRA NOISE REAL')]
hdulist += [fits.ImageHDU(self.vis_noise_lag.imag, name='DELAY SPECTRA NOISE IMAG')]
if self.cc_freq is not None:
hdulist += [fits.ImageHDU(self.cc_freq, name='CLEAN FREQUENCIES')]
if self.cc_lags is not None:
hdulist += [fits.ImageHDU(self.cc_lags, name='CLEAN LAGS')]
if verbose:
print('\tCreated an extension for spectral axes of clean components')
if self.cc_lag_kernel is not None:
hdulist += [fits.ImageHDU(self.cc_lag_kernel.real, name='CLEAN LAG KERNEL REAL')]
hdulist += [fits.ImageHDU(self.cc_lag_kernel.imag, name='CLEAN LAG KERNEL IMAG')]
if verbose:
print('\tCreated an extension for deconvolving lag kernel of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, lags, and snapshot instance'.format(self.cc_lag_kernel.shape))
if self.cc_skyvis_lag is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_lag.real, name='CLEAN NOISELESS DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_lag.imag, name='CLEAN NOISELESS DELAY SPECTRA IMAG')]
if self.cc_skyvis_res_lag is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_res_lag.real, name='CLEAN NOISELESS DELAY SPECTRA RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_res_lag.imag, name='CLEAN NOISELESS DELAY SPECTRA RESIDUALS IMAG')]
if self.cc_skyvis_freq is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_freq.real, name='CLEAN NOISELESS VISIBILITIES REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_freq.imag, name='CLEAN NOISELESS VISIBILITIES IMAG')]
if self.cc_skyvis_res_freq is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_res_freq.real, name='CLEAN NOISELESS VISIBILITIES RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_res_freq.imag, name='CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG')]
if self.cc_vis_lag is not None:
hdulist += [fits.ImageHDU(self.cc_vis_lag.real, name='CLEAN NOISY DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_lag.imag, name='CLEAN NOISY DELAY SPECTRA IMAG')]
if self.cc_vis_res_lag is not None:
hdulist += [fits.ImageHDU(self.cc_vis_res_lag.real, name='CLEAN NOISY DELAY SPECTRA RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_res_lag.imag, name='CLEAN NOISY DELAY SPECTRA RESIDUALS IMAG')]
if self.cc_vis_freq is not None:
hdulist += [fits.ImageHDU(self.cc_vis_freq.real, name='CLEAN NOISY VISIBILITIES REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_freq.imag, name='CLEAN NOISY VISIBILITIES IMAG')]
if self.cc_vis_res_freq is not None:
hdulist += [fits.ImageHDU(self.cc_vis_res_freq.real, name='CLEAN NOISY VISIBILITIES RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_res_freq.imag, name='CLEAN NOISY VISIBILITIES RESIDUALS IMAG')]
if verbose:
print('\tCreated extensions for clean components of noiseless, noisy and residuals of visibilities in frequency and delay coordinates of size {0[0]} x {0[1]} x {0[2]} as a function of baselines, lags/frequency and snapshot instance'.format(self.lag_kernel.shape))
if self.subband_delay_spectra:
hdulist[0].header['SBDS'] = (1, 'Presence of Subband Delay Spectra')
for key in self.subband_delay_spectra:
hdulist[0].header['{0}-SBDS'.format(key)] = (1, 'Presence of {0} Subband Delay Spectra'.format(key))
hdulist[0].header['{0}-SBDS-WSHAPE'.format(key)] = (self.subband_delay_spectra[key]['shape'], 'Shape of {0} subband frequency weights'.format(key))
if key == 'cc':
hdulist[0].header['{0}-SBDS-BPCORR'.format(key)] = (int(self.subband_delay_spectra[key]['bpcorrect']), 'Truth value for {0} subband delay spectrum bandpass windows weights correction'.format(key))
hdulist[0].header['{0}-SBDS-NPAD'.format(key)] = (self.subband_delay_spectra[key]['npad'], 'Number of zero-padded channels for subband delay spectra'.format(key))
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['freq_center'], name='{0}-SBDS-F0'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['freq_wts'], name='{0}-SBDS-FWTS'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['bw_eff'], name='{0}-SBDS-BWEFF'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lags'], name='{0}-SBDS-LAGS'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lag_kernel'].real, name='{0}-SBDS-LAGKERN-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lag_kernel'].imag, name='{0}-SBDS-LAGKERN-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lag_corr_length'], name='{0}-SBDS-LAGCORR'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_lag'].real, name='{0}-SBDS-SKYVISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_lag'].imag, name='{0}-SBDS-SKYVISLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_lag'].real, name='{0}-SBDS-VISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_lag'].imag, name='{0}-SBDS-VISLAG-IMAG'.format(key))]
if key == 'sim':
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_noise_lag'].real, name='{0}-SBDS-NOISELAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_noise_lag'].imag, name='{0}-SBDS-NOISELAG-IMAG'.format(key))]
if key == 'cc':
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_res_lag'].real, name='{0}-SBDS-SKYVISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_res_lag'].imag, name='{0}-SBDS-SKYVISRESLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_res_lag'].real, name='{0}-SBDS-VISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_res_lag'].imag, name='{0}-SBDS-VISRESLAG-IMAG'.format(key))]
if verbose:
print('\tCreated extensions for information on subband delay spectra for simulated and clean components of visibilities as a function of baselines, lags/frequency and snapshot instance')
if self.subband_delay_spectra_resampled:
hdulist[0].header['SBDS-RS'] = (1, 'Presence of Resampled Subband Delay Spectra')
for key in self.subband_delay_spectra_resampled:
hdulist[0].header['{0}-SBDS-RS'.format(key)] = (1, 'Presence of {0} Reampled Subband Delay Spectra'.format(key))
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['freq_center'], name='{0}-SBDSRS-F0'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['bw_eff'], name='{0}-SBDSRS-BWEFF'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lags'], name='{0}-SBDSRS-LAGS'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lag_kernel'].real, name='{0}-SBDSRS-LAGKERN-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lag_kernel'].imag, name='{0}-SBDSRS-LAGKERN-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lag_corr_length'], name='{0}-SBDSRS-LAGCORR'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_lag'].real, name='{0}-SBDSRS-SKYVISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_lag'].imag, name='{0}-SBDSRS-SKYVISLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_lag'].real, name='{0}-SBDSRS-VISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_lag'].imag, name='{0}-SBDSRS-VISLAG-IMAG'.format(key))]
if key == 'sim':
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_noise_lag'].real, name='{0}-SBDSRS-NOISELAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_noise_lag'].imag, name='{0}-SBDSRS-NOISELAG-IMAG'.format(key))]
if key == 'cc':
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_res_lag'].real, name='{0}-SBDSRS-SKYVISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_res_lag'].imag, name='{0}-SBDSRS-SKYVISRESLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_res_lag'].real, name='{0}-SBDSRS-VISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_res_lag'].imag, name='{0}-SBDSRS-VISRESLAG-IMAG'.format(key))]
if verbose:
print('\tCreated extensions for information on resampled subband delay spectra for simulated and clean components of visibilities as a function of baselines, lags/frequency and snapshot instance')
hdu = fits.HDUList(hdulist)
hdu.writeto(ds_outfile+'.ds.fits', clobber=overwrite)
################################################################################
class DelayPowerSpectrum(object):
"""
----------------------------------------------------------------------------
Class to manage delay power spectrum from visibility measurements of a
multi-element interferometer array.
Attributes:
cosmo [instance of cosmology class from astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module.
ds [instance of class DelaySpectrum] An instance of class
DelaySpectrum that contains the information on delay spectra of
simulated visibilities
f [list or numpy vector] frequency channels in Hz
lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as channels. This is
computed in member function delay_transform().
cc_lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as cc_freq. This is computed in
member function delayClean().
df [scalar] Frequency resolution (in Hz)
bl [M x 3 Numpy array] The baseline vectors associated with the
M interferometers in SI units
bl_length [M-element numpy array] Lengths of the baseline in SI units
f0 [scalar] Central frequency (in Hz)
wl0 [scalar] Central wavelength (in m)
z [scalar] redshift
bw [scalar] (effective) bandwidth (in Hz)
kprll [numpy array] line-of-sight wavenumbers (in h/Mpc) corresponding
to delays in the delay spectrum
kperp [numpy array] transverse wavenumbers (in h/Mpc) corresponding
to baseline lengths
horizon_kprll_limits
[numpy array] limits on k_parallel corresponding to limits on
horizon delays. It is of size NxMx2 denoting the neagtive and
positive horizon delay limits where N is the number of
timestamps, M is the number of baselines. The 0 index in the
third dimenstion denotes the negative horizon limit while
the 1 index denotes the positive horizon limit
drz_los [scalar] comoving line-of-sight depth (Mpc/h) corresponding to
specified redshift and bandwidth for redshifted 21 cm line
rz_transverse
[scalar] comoving transverse distance (Mpc/h) corresponding to
specified redshift for redshifted 21 cm line
rz_los [scalar] comoving line-of-sight distance (Mpc/h) corresponding
to specified redshift for redshifted 21 cm line
jacobian1 [scalar] first jacobian in conversion of delay spectrum to
power spectrum. It is equal to A_eff / wl**2 / bw
jacobian2 [scalar] second jacobian in conversion of delay spectrum to
power spectrum. It is equal to rz_los**2 * drz_los / bw
Jy2K [scalar] factor to convert Jy/Sr to K. It is equal to
wl**2 * Jy / (2k)
K2Jy [scalar] factor to convert K to Jy/Sr. It is equal to 1/Jy2K
dps [dictionary of numpy arrays] contains numpy arrays containing
delay power spectrum in units of K^2 (Mpc/h)^3 under the
following keys:
'skyvis' [numpy array] delay power spectrum of noiseless
delay spectra
'vis' [numpy array] delay power spectrum of noisy delay
spectra
'noise' [numpy array] delay power spectrum of thermal noise
delay spectra
'cc_skyvis' [numpy array] delay power spectrum of clean
components of noiseless delay spectra
'cc_vis' [numpy array] delay power spectrum of clean
components of noisy delay spectra
'cc_skyvis_res'
[numpy array] delay power spectrum of residuals
after delay cleaning of noiseless delay spectra
'cc_vis_res'
[numpy array] delay power spectrum of residuals
after delay cleaning of noisy delay spectra
'cc_skyvis_net'
[numpy array] delay power spectrum of sum of
residuals and clean components
after delay cleaning of noiseless delay spectra
'cc_vis_net'
[numpy array] delay power spectrum of sum of
residuals and clean components
after delay cleaning of noisy delay spectra
subband_delay_power_spectra
[dictionary] contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Essentially this is the power spectrum equivalent
of the attribute suuband_delay_spectra under class DelaySpectrum.
Under each of these keys is information about delay power spectra
of different frequency sub-bands (n_win in number) in the form of
a dictionary under the following keys:
'z' [numpy array] contains the redshifts corresponding to
center frequencies (in Hz) of the frequency subbands
of the subband delay spectra. It is of size n_win.
'dz' [numpy array] contains the width in redshifts
corresponding to the effective bandwidths (in Hz) of
the subbands being delay transformed. It is of size
n_win.
'kprll' [numpy array] line-of-sight k-modes (in h/Mpc)
corresponding to lags of the subband delay spectra.
It is of size n_win x (nchan+npad)
'kperp' [numpy array] transverse k-modes (in h/Mpc)
corresponding to the baseline lengths and the
center frequencies. It is of size
n_win x n_bl
horizon_kprll_limits
[numpy array] limits on k_parallel corresponding to
limits on horizon delays for each subband. It is of
size N x n_win x M x 2 denoting the neagtive and
positive horizon delay limits where N is the number
of timestamps, n_win is the number of subbands, M is
the number of baselines. The 0 index in the fourth
dimenstion denotes the negative horizon limit while
the 1 index denotes the positive horizon limit
'rz_los' [numpy array] Comoving distance along LOS (in Mpc/h)
corresponding to the different redshifts under key
'z'. It is of size n_win
'rz_transverse'
[numpy array] transverse comoving distance
(in Mpc/h) corresponding to the different redshifts
under key 'z'. It is of size n_win
'drz_los' [numpy array] line-of-sight comoving depth (in
Mpc/h) corresponding to the redshift widths under
key 'dz' and redshifts under key 'z'. It is of size
n_win
'jacobian1' [numpy array] first jacobian in conversion of delay
spectrum to power spectrum. It is equal to
A_eff / wl**2 / bw. It is of size n_win
'jacobian2' [numpy array] second jacobian in conversion of delay
spectrum to power spectrum. It is equal to
rz_los**2 * drz_los / bw. It is of size n_win
'Jy2K' [numpy array] factor to convert Jy/Sr to K. It is
equal to wl**2 * Jy / (2k). It is of size n_win
'factor' [numpy array] conversion factor to convert delay
spectrum (in Jy Hz) to delay power spectrum (in
K^2 (Mpc/h)^3). It is equal to
jacobian1 * jacobian2 * Jy2K**2. It is of size n_win
'skyvis_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noiseless simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'skyvis_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_lag' [numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noisy simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'vis_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_noise_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to thermal noise simulated (under top
level key 'sim') delay spectrum under key
'vis_noise_lag' in attribute subband_delay_spectra
under instance of class DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'skyvis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_res_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_res_lag' in attribute subband_delay_spectra
under instance of class DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'skyvis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_net_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_net_lag' in attribute subband_delay_spectra
under instance of class DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
subband_delay_power_spectra_resampled
[dictionary] contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Essentially this is the power spectrum equivalent
of the attribute suuband_delay_spectra_resampled under class
DelaySpectrum. Under each of these keys is information about
delay power spectra of different frequency sub-bands (n_win in
number) in the form of a dictionary under the following keys:
'kprll' [numpy array] line-of-sight k-modes (in h/Mpc)
corresponding to lags of the subband delay spectra.
It is of size n_win x nlags, where nlags is the
resampeld number of delay bins
'kperp' [numpy array] transverse k-modes (in h/Mpc)
corresponding to the baseline lengths and the
center frequencies. It is of size
n_win x n_bl
'horizon_kprll_limits'
[numpy array] limits on k_parallel corresponding to
limits on horizon delays for each subband. It is of
size N x n_win x M x 2 denoting the negative and
positive horizon delay limits where N is the number
of timestamps, n_win is the number of subbands, M is
the number of baselines. The 0 index in the fourth
dimenstion denotes the negative horizon limit while
the 1 index denotes the positive horizon limit
'skyvis_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noiseless simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'skyvis_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_lag' [numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noisy simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'vis_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_noise_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to thermal noise simulated (under top
level key 'sim') delay spectrum under key
'vis_noise_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'skyvis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_res_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_res_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'skyvis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_net_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_net_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
Member functions:
__init__() Initialize an instance of class DelayPowerSpectrum
comoving_los_depth()
Compute comoving line-of-sight depth (Mpc/h) corresponding to
specified redshift and bandwidth for redshifted 21 cm line
comoving_transverse_distance()
Compute comoving transverse distance (Mpc/h) corresponding to
specified redshift for redshifted 21 cm line
comoving_los_distance()
Compute comoving line-of-sight distance (Mpc/h) corresponding
to specified redshift for redshifted 21 cm line
k_parallel()
Compute line-of-sight wavenumbers (h/Mpc) corresponding to
specified delays and redshift for redshifted 21 cm line
k_perp() Compute transverse wavenumbers (h/Mpc) corresponding to
specified baseline lengths and redshift for redshifted 21 cm
line assuming a mean wavelength (in m) for the relationship
between baseline lengths and spatial frequencies (u and v)
compute_power_spectrum()
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the
delay spectrum in units of Jy Hz
compute_power_spectrum_allruns()
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the
delay spectrum in units of Jy Hz from multiple runs of
visibilities
compute_individual_closure_phase_power_spectrum()
Compute delay power spectrum of closure phase in units of
K^2 (Mpc/h)^3 from the delay spectrum in units of Jy Hz where
the original visibility amplitudes of closure phase complex
exponents are assumed to be 1 Jy across the band
compute_averaged_closure_phase_power_spectrum()
Compute delay power spectrum of closure phase in units of
K^2 (Mpc/h)^3 from the delay spectrum in units of Jy Hz and
average over 'auto' and 'cross' modes, where the original
visibility amplitudes of closure phase complex exponents are
assumed to be 1 Jy across the band
----------------------------------------------------------------------------
"""
def __init__(self, dspec, cosmo=cosmo100):
"""
------------------------------------------------------------------------
Initialize an instance of class DelayPowerSpectrum. Attributes
initialized are: ds, cosmo, f, df, f0, z, bw, drz_los, rz_transverse,
rz_los, kprll, kperp, jacobian1, jacobian2, subband_delay_power_spectra,
subband_delay_power_spectra_resampled
Inputs:
dspec [instance of class DelaySpectrum] An instance of class
DelaySpectrum that contains the information on delay spectra of
simulated visibilities
cosmo [instance of a cosmology class in Astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module. Default
value is set using concurrent cosmology but keep
H0=100 km/s/Mpc
------------------------------------------------------------------------
"""
try:
dspec
except NameError:
raise NameError('No delay spectrum instance supplied for initialization')
if not isinstance(dspec, DelaySpectrum):
raise TypeError('Input dspec must be an instance of class DelaySpectrum')
if not isinstance(cosmo, (CP.FLRW, CP.default_cosmology)):
raise TypeError('Input cosmology must be a cosmology class defined in Astropy')
self.cosmo = cosmo
self.ds = dspec
self.f = self.ds.f
self.lags = self.ds.lags
self.cc_lags = self.ds.cc_lags
self.bl = self.ds.ia.baselines
self.bl_length = self.ds.ia.baseline_lengths
self.df = self.ds.df
self.f0 = self.f[int(self.f.size/2)]
self.wl0 = FCNST.c / self.f0
self.z = CNST.rest_freq_HI / self.f0 - 1
self.bw = self.df * self.f.size
self.kprll = self.k_parallel(self.lags, redshift=self.z, action='return') # in h/Mpc
self.kperp = self.k_perp(self.bl_length, redshift=self.z, action='return') # in h/Mpc
self.horizon_kprll_limits = self.k_parallel(self.ds.horizon_delay_limits, redshift=self.z, action='return') # in h/Mpc
self.drz_los = self.comoving_los_depth(self.bw, self.z, action='return') # in Mpc/h
self.rz_transverse = self.comoving_transverse_distance(self.z, action='return') # in Mpc/h
self.rz_los = self.comoving_los_distance(self.z, action='return') # in Mpc/h
# self.jacobian1 = NP.mean(self.ds.ia.A_eff) / self.wl0**2 / self.bw
omega_bw = self.beam3Dvol(freq_wts=self.ds.bp_wts[0,:,0])
self.jacobian1 = 1 / omega_bw
# self.jacobian2 = self.rz_transverse**2 * self.drz_los / self.bw
self.jacobian2 = self.rz_los**2 * self.drz_los / self.bw
self.Jy2K = self.wl0**2 * CNST.Jy / (2*FCNST.k)
self.K2Jy = 1 / self.Jy2K
self.dps = {}
self.dps['skyvis'] = None
self.dps['vis'] = None
self.dps['noise'] = None
self.dps['cc_skyvis'] = None
self.dps['cc_vis'] = None
self.dps['cc_skyvis_res'] = None
self.dps['cc_vis_res'] = None
self.dps['cc_skyvis_net'] = None
self.dps['cc_vis_net'] = None
self.subband_delay_power_spectra = {}
self.subband_delay_power_spectra_resampled = {}
############################################################################
def comoving_los_depth(self, bw, redshift, action=None):
"""
------------------------------------------------------------------------
Compute comoving line-of-sight depth (Mpc/h) corresponding to specified
redshift and bandwidth for redshifted 21 cm line
Inputs:
bw [scalar] bandwidth in Hz
redshift [scalar] redshift
action [string] If set to None (default), the comoving depth
along the line of sight (Mpc/h) and specified reshift are
stored internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the comoving depth
along line of sight (Mpc/h) computed is returned
Outputs:
If keyword input action is set to 'return', the comoving depth along
line of sight (Mpc/h) computed is returned
------------------------------------------------------------------------
"""
drz_los = (FCNST.c/1e3) * bw * (1+redshift)**2 / CNST.rest_freq_HI / self.cosmo.H0.value / self.cosmo.efunc(redshift) # in Mpc/h
if action is None:
self.z = redshift
self.drz_los = drz_los
return
else:
return drz_los
############################################################################
def comoving_transverse_distance(self, redshift, action=None):
"""
------------------------------------------------------------------------
Compute comoving transverse distance (Mpc/h) corresponding to specified
redshift for redshifted 21 cm line
Inputs:
redshift [scalar] redshift
action [string] If set to None (default), the comoving
transverse distance (Mpc/h) and specified reshift are stored
internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the comoving
transverse distance (Mpc/h) computed is returned
Outputs:
If keyword input action is set to 'return', the comoving transverse
distance (Mpc/h) computed is returned
------------------------------------------------------------------------
"""
rz_transverse = self.cosmo.comoving_transverse_distance(redshift).to('Mpc').value # in Mpc/h
if action is None:
self.z = redshift
self.rz_transverse = rz_transverse
return
else:
return rz_transverse
############################################################################
def comoving_los_distance(self, redshift, action=None):
"""
------------------------------------------------------------------------
Compute comoving line-of-sight distance (Mpc/h) corresponding to
specified redshift for redshifted 21 cm line
Inputs:
redshift [scalar] redshift
action [string] If set to None (default), the comoving
line-of-sight distance (Mpc/h) and specified reshift are
stored internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the comoving
line-of-sight distance (Mpc/h) computed is returned
Outputs:
If keyword input action is set to 'return', the comoving line-of-sight
distance (Mpc/h) computed is returned
------------------------------------------------------------------------
"""
rz_los = self.cosmo.comoving_distance(redshift).to('Mpc').value # in Mpc/h
if action is None:
self.z = redshift
self.rz_los = rz_los
return
else:
return rz_los
############################################################################
def k_parallel(self, lags, redshift, action=None):
"""
------------------------------------------------------------------------
Compute line-of-sight wavenumbers (h/Mpc) corresponding to specified
delays and redshift for redshifted 21 cm line
Inputs:
lags [numpy array] geometric delays (in seconds) obtained as
Fourier conjugate variable of frequencies in the bandpass
redshift [scalar] redshift
action [string] If set to None (default), the line-of-sight
wavenumbers (h/Mpc) and specified reshift are
stored internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the line-of-sight
wavenumbers (h/Mpc) computed is returned
Outputs:
If keyword input action is set to 'return', the line-of-sight
wavenumbers (h/Mpc) computed is returned. It is of same size as input
lags
------------------------------------------------------------------------
"""
eta2kprll = dkprll_deta(redshift, cosmo=self.cosmo)
kprll = eta2kprll * lags
if action is None:
self.z = redshift
self.kprll = kprll
return
else:
return kprll
############################################################################
def k_perp(self, baseline_length, redshift, action=None):
"""
------------------------------------------------------------------------
Compute transverse wavenumbers (h/Mpc) corresponding to specified
baseline lengths and redshift for redshifted 21 cm line assuming a
mean wavelength (in m) for the relationship between baseline lengths and
spatial frequencies (u and v)
Inputs:
baseline_length
[numpy array] baseline lengths (in m)
redshift [scalar] redshift
action [string] If set to None (default), the transverse
wavenumbers (h/Mpc) and specified reshift are stored
internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the transverse
wavenumbers (h/Mpc) computed is returned
Outputs:
If keyword input action is set to 'return', the transverse
wavenumbers (h/Mpc) computed is returned
------------------------------------------------------------------------
"""
kperp = 2 * NP.pi * (baseline_length/self.wl0) / self.comoving_transverse_distance(redshift, action='return')
if action is None:
self.z = redshift
self.kperp = kperp
return
else:
return kperp
############################################################################
def beam3Dvol(self, freq_wts=None, nside=32):
"""
------------------------------------------------------------------------
Compute three-dimensional (transverse-LOS) volume of the beam in units
of "Sr Hz".
freq_wts [numpy array] Frequency weights centered on different
spectral windows or redshifts. Its shape is (nwin,nchan).
nchan should match the number of spectral channels in the
class attribute for frequency channels
'nside' [integer] NSIDE parameter for determining and interpolating
the beam. If not set, it will be set to 64 (default).
Output:
omega_bw [numpy array] Integral of the square of the power pattern
over transverse and spectral axes. Its shape is (nwin,)
------------------------------------------------------------------------
"""
if self.ds.ia.simparms_file is not None:
parms_file = open(self.ds.ia.simparms_file, 'r')
parms = yaml.safe_load(parms_file)
parms_file.close()
# sky_nside = parms['fgparm']['nside']
beam_info = parms['beam']
use_external_beam = beam_info['use_external']
beam_chromaticity = beam_info['chromatic']
select_beam_freq = beam_info['select_freq']
if select_beam_freq is None:
select_beam_freq = self.f0
theta, phi = HP.pix2ang(nside, NP.arange(HP.nside2npix(nside)))
theta_phi = NP.hstack((theta.reshape(-1,1), phi.reshape(-1,1)))
if use_external_beam:
beam_file = beam_info['file']
if beam_info['filefmt'].lower() in ['hdf5', 'fits', 'uvbeam']:
beam_filefmt = beam_info['filefmt'].lower()
else:
raise ValueError('Invalid beam file format specified')
if beam_info['filepathtype'] == 'default':
beam_file = prisim_path+'data/beams/' + beam_file
beam_pol = beam_info['pol']
beam_id = beam_info['identifier']
pbeam_spec_interp_method = beam_info['spec_interp']
if beam_filefmt == 'fits':
extbeam = fits.getdata(beam_file, extname='BEAM_{0}'.format(beam_pol))
beam_freqs = fits.getdata(beam_file, extname='FREQS_{0}'.format(beam_pol))
extbeam = extbeam.reshape(-1,beam_freqs.size) # npix x nfreqs
prihdr = fits.getheader(beam_file, 0)
beamunit = prihdr['GAINUNIT']
elif beam_filefmt.lower() == 'hdf5':
with h5py.File(beam_file, 'r') as fileobj:
extbeam = fileobj['gain_info'][beam_pol].value
extbeam = extbeam.T
beam_freqs = fileobj['spectral_info']['freqs'].value
beamunit = fileobj['header']['gainunit'].value
elif beam_filefmt == 'uvbeam':
if uvbeam_module_found:
uvbm = UVBeam()
uvbm.read_beamfits(beam_file)
axis_vec_ind = 0 # for power beam
spw_ind = 0 # spectral window index
if beam_pol.lower() in ['x', 'e']:
beam_pol_ind = 0
else:
beam_pol_ind = 1
extbeam = uvbm.data_array[axis_vec_ind,spw_ind,beam_pol_ind,:,:].T # npix x nfreqs
beam_freqs = uvbm.freq_array.ravel() # nfreqs (in Hz)
else:
raise ImportError('uvbeam module not installed/found')
if NP.abs(NP.abs(extbeam).max() - 1.0) > 1e-10:
extbeam /= NP.abs(extbeam).max()
beamunit = ''
else:
raise ValueError('Specified external beam file format not currently supported')
if beamunit.lower() == 'db':
extbeam = 10**(extbeam/10.0)
beam_nside = HP.npix2nside(extbeam.shape[0])
if beam_nside < nside:
nside = beam_nside
if beam_chromaticity:
if pbeam_spec_interp_method == 'fft':
extbeam = extbeam[:,:-1]
beam_freqs = beam_freqs[:-1]
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(extbeam), theta_phi=theta_phi, inloc_axis=beam_freqs, outloc_axis=self.f, axis=1, kind=pbeam_spec_interp_method, assume_sorted=True)
else:
nearest_freq_ind = NP.argmin(NP.abs(beam_freqs - select_beam_freq))
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(NP.repeat(extbeam[:,nearest_freq_ind].reshape(-1,1), self.f.size, axis=1)), theta_phi=theta_phi, inloc_axis=self.f, outloc_axis=self.f, axis=1, assume_sorted=True)
interp_logbeam_max = NP.nanmax(interp_logbeam, axis=0)
interp_logbeam_max[interp_logbeam_max <= 0.0] = 0.0
interp_logbeam_max = interp_logbeam_max.reshape(1,-1)
interp_logbeam = interp_logbeam - interp_logbeam_max
beam = 10**interp_logbeam
else:
alt = 90.0 - NP.degrees(theta)
az = NP.degrees(phi)
altaz = NP.hstack((alt.reshape(-1,1), az.reshape(-1,1)))
if beam_chromaticity:
beam = PB.primary_beam_generator(altaz, self.f, self.ds.ia.telescope, freq_scale='Hz', skyunits='altaz', east2ax1=0.0, pointing_info=None, pointing_center=None)
else:
beam = PB.primary_beam_generator(altaz, select_beam_freq, self.ds.ia.telescope, skyunits='altaz', pointing_info=None, pointing_center=None, freq_scale='Hz', east2ax1=0.0)
beam = beam.reshape(-1,1) * NP.ones(self.f.size).reshape(1,-1)
else:
theta, phi = HP.pix2ang(nside, NP.arange(HP.nside2npix(nside)))
alt = 90.0 - NP.degrees(theta)
az = NP.degrees(phi)
altaz = NP.hstack((alt.reshape(-1,1), az.reshape(-1,1)))
beam = PB.primary_beam_generator(altaz, self.f, self.ds.ia.telescope, freq_scale='Hz', skyunits='altaz', east2ax1=0.0, pointing_info=None, pointing_center=None)
omega_bw = beam3Dvol(beam, self.f, freq_wts=freq_wts, hemisphere=True)
return omega_bw
############################################################################
def compute_power_spectrum(self):
"""
------------------------------------------------------------------------
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the delay
spectrum in units of Jy Hz.
------------------------------------------------------------------------
"""
self.dps = {}
factor = self.jacobian1 * self.jacobian2 * self.Jy2K**2
if self.ds.skyvis_lag is not None: self.dps['skyvis'] = NP.abs(self.ds.skyvis_lag)**2 * factor
if self.ds.vis_lag is not None: self.dps['vis'] = NP.abs(self.ds.vis_lag)**2 * factor
if self.ds.vis_noise_lag is not None: self.dps['noise'] = NP.abs(self.ds.vis_noise_lag)**2 * factor
if self.ds.cc_lags is not None:
if self.ds.cc_skyvis_lag is not None: self.dps['cc_skyvis'] = NP.abs(self.ds.cc_skyvis_lag)**2 * factor
if self.ds.cc_vis_lag is not None: self.dps['cc_vis'] = NP.abs(self.ds.cc_vis_lag)**2 * factor
if self.ds.cc_skyvis_res_lag is not None: self.dps['cc_skyvis_res'] = NP.abs(self.ds.cc_skyvis_res_lag)**2 * factor
if self.ds.cc_vis_res_lag is not None: self.dps['cc_vis_res'] = NP.abs(self.ds.cc_vis_res_lag)**2 * factor
if self.ds.cc_skyvis_net_lag is not None: self.dps['cc_skyvis_net'] = NP.abs(self.ds.cc_skyvis_net_lag)**2 * factor
if self.ds.cc_vis_net_lag is not None: self.dps['cc_vis_net'] = NP.abs(self.ds.cc_vis_net_lag)**2 * factor
if self.ds.subband_delay_spectra:
for key in self.ds.subband_delay_spectra:
self.subband_delay_power_spectra[key] = {}
wl = FCNST.c / self.ds.subband_delay_spectra[key]['freq_center']
self.subband_delay_power_spectra[key]['z'] = CNST.rest_freq_HI / self.ds.subband_delay_spectra[key]['freq_center'] - 1
self.subband_delay_power_spectra[key]['dz'] = CNST.rest_freq_HI / self.ds.subband_delay_spectra[key]['freq_center']**2 * self.ds.subband_delay_spectra[key]['bw_eff']
kprll = NP.empty((self.ds.subband_delay_spectra[key]['freq_center'].size, self.ds.subband_delay_spectra[key]['lags'].size))
kperp = NP.empty((self.ds.subband_delay_spectra[key]['freq_center'].size, self.bl_length.size))
horizon_kprll_limits = NP.empty((self.ds.n_acc, self.ds.subband_delay_spectra[key]['freq_center'].size, self.bl_length.size, 2))
for zind,z in enumerate(self.subband_delay_power_spectra[key]['z']):
kprll[zind,:] = self.k_parallel(self.ds.subband_delay_spectra[key]['lags'], z, action='return')
kperp[zind,:] = self.k_perp(self.bl_length, z, action='return')
horizon_kprll_limits[:,zind,:,:] = self.k_parallel(self.ds.horizon_delay_limits, z, action='return')
self.subband_delay_power_spectra[key]['kprll'] = kprll
self.subband_delay_power_spectra[key]['kperp'] = kperp
self.subband_delay_power_spectra[key]['horizon_kprll_limits'] = horizon_kprll_limits
self.subband_delay_power_spectra[key]['rz_los'] = self.cosmo.comoving_distance(self.subband_delay_power_spectra[key]['z']).to('Mpc').value # in Mpc/h
self.subband_delay_power_spectra[key]['rz_transverse'] = self.comoving_transverse_distance(self.subband_delay_power_spectra[key]['z'], action='return') # in Mpc/h
self.subband_delay_power_spectra[key]['drz_los'] = self.comoving_los_depth(self.ds.subband_delay_spectra[key]['bw_eff'], self.subband_delay_power_spectra[key]['z'], action='return')
# self.subband_delay_power_spectra[key]['jacobian1'] = NP.mean(self.ds.ia.A_eff) / wl**2 / self.ds.subband_delay_spectra[key]['bw_eff']
omega_bw = self.beam3Dvol(freq_wts=self.ds.subband_delay_spectra[key]['freq_wts'])
self.subband_delay_power_spectra[key]['jacobian1'] = 1 / omega_bw
# self.subband_delay_power_spectra[key]['jacobian2'] = self.subband_delay_power_spectra[key]['rz_transverse']**2 * self.subband_delay_power_spectra[key]['drz_los'] / self.ds.subband_delay_spectra[key]['bw_eff']
self.subband_delay_power_spectra[key]['jacobian2'] = self.subband_delay_power_spectra[key]['rz_los']**2 * self.subband_delay_power_spectra[key]['drz_los'] / self.ds.subband_delay_spectra[key]['bw_eff']
self.subband_delay_power_spectra[key]['Jy2K'] = wl**2 * CNST.Jy / (2*FCNST.k)
self.subband_delay_power_spectra[key]['factor'] = self.subband_delay_power_spectra[key]['jacobian1'] * self.subband_delay_power_spectra[key]['jacobian2'] * self.subband_delay_power_spectra[key]['Jy2K']**2
conversion_factor = self.subband_delay_power_spectra[key]['factor'].reshape(1,-1,1,1)
self.subband_delay_power_spectra[key]['skyvis_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['skyvis_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['vis_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_lag'])**2 * conversion_factor
if key == 'cc':
self.subband_delay_power_spectra[key]['skyvis_res_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['skyvis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['vis_res_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['skyvis_net_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['skyvis_net_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['vis_net_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_net_lag'])**2 * conversion_factor
else:
self.subband_delay_power_spectra[key]['vis_noise_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_noise_lag'])**2 * conversion_factor
if self.ds.subband_delay_spectra_resampled:
for key in self.ds.subband_delay_spectra_resampled:
self.subband_delay_power_spectra_resampled[key] = {}
kprll = NP.empty((self.ds.subband_delay_spectra_resampled[key]['freq_center'].size, self.ds.subband_delay_spectra_resampled[key]['lags'].size))
kperp = NP.empty((self.ds.subband_delay_spectra_resampled[key]['freq_center'].size, self.bl_length.size))
horizon_kprll_limits = NP.empty((self.ds.n_acc, self.ds.subband_delay_spectra_resampled[key]['freq_center'].size, self.bl_length.size, 2))
for zind,z in enumerate(self.subband_delay_power_spectra[key]['z']):
kprll[zind,:] = self.k_parallel(self.ds.subband_delay_spectra_resampled[key]['lags'], z, action='return')
kperp[zind,:] = self.k_perp(self.bl_length, z, action='return')
horizon_kprll_limits[:,zind,:,:] = self.k_parallel(self.ds.horizon_delay_limits, z, action='return')
self.subband_delay_power_spectra_resampled[key]['kprll'] = kprll
self.subband_delay_power_spectra_resampled[key]['kperp'] = kperp
self.subband_delay_power_spectra_resampled[key]['horizon_kprll_limits'] = horizon_kprll_limits
conversion_factor = self.subband_delay_power_spectra[key]['factor'].reshape(1,-1,1,1)
self.subband_delay_power_spectra_resampled[key]['skyvis_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['skyvis_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['vis_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_lag'])**2 * conversion_factor
if key == 'cc':
self.subband_delay_power_spectra_resampled[key]['skyvis_res_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['skyvis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['vis_res_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['skyvis_net_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['skyvis_net_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['vis_net_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_net_lag'])**2 * conversion_factor
else:
self.subband_delay_power_spectra_resampled[key]['vis_noise_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_noise_lag'])**2 * conversion_factor
############################################################################
def compute_power_spectrum_allruns(self, dspec, subband=False):
"""
------------------------------------------------------------------------
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the delay
spectrum in units of Jy Hz from multiple runs of visibilities
Inputs:
dspec [dictionary] Delay spectrum information. If subband is set to
False, it contains the keys 'vislag1' and maybe 'vislag2'
(optional). If subband is set to True, it must contain these
keys as well - 'lags', 'freq_center', 'bw_eff', 'freq_wts' as
well. The value under these keys are described below:
'vislag1' [numpy array] subband delay spectra of first set of
visibilities. It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t
if subband is set to True or of shape
(n1xn2x... n_runs dims) x n_bl x nlags x n_t if
subband is set to False
It must be specified independent of subband value
'vislag2' [numpy array] subband delay spectra of second set of
visibilities (optional). If not specified, value
under key 'vislag1' is copied under this key and
auto-delay spectrum is computed. If explicitly
specified, it must be of same shape as value under
'vislag1' and cross-delay spectrum will be computed.
It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t
if subband is set to True or of shape
(n1xn2x... n_runs dims) x n_bl x nlags x n_t if
subband is set to False. It is applicable
independent of value of input subband
'lags' [numpy array] Contains the lags in the delay
spectrum. Applicable only if subband is set to True.
It is of size nlags
'freq_center'
[numpy array] frequency centers (in Hz) of the
selected frequency windows for subband delay
transform of visibilities. The values can be a
scalar, list or numpy array. Applicable only if
subband is set to True. It is of size n_win
'bw_eff' [scalar, list or numpy array] effective bandwidths
(in Hz) on the selected frequency windows for
subband delay transform of visibilities. The values
can be a scalar, list or numpy array. Applicable
only if subband is set to True. It is of size n_win
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan. Applicable
only if subband is set to True.
subband [boolean] If set to False (default), the entire band is used in
determining the delay power spectrum and only value
under key 'vislag1' and optional key 'vislag2' in
input dspec is required. If set to True, delay pwoer
spectrum in specified subbands is determined. In
addition to key 'vislag1' and optional key 'vislag2',
following keys are also required in input dictionary
dspec, namely, 'lags', 'freq_center', 'bw_eff',
'freq_wts'
Output:
Dictionary containing delay power spectrum (in units of K^2 (Mpc/h)^3)
of shape (n1xn2x... n_runs dims) x n_bl x nlags x n_t under key
'fullband' if subband is set to False or of shape
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t under key 'subband'
if subband is set to True.
------------------------------------------------------------------------
"""
try:
dspec
except NameError:
raise NameError('Input dspec must be specified')
if not isinstance(dspec, dict):
raise TypeError('Input dspec must be a dictionary')
else:
mode = 'auto'
if 'vislag1' not in dspec:
raise KeyError('Key "vislag1" not found in input dspec')
if not isinstance(dspec['vislag1'], NP.ndarray):
raise TypeError('Value under key "vislag1" must be a numpy array')
if 'vislag2' not in dspec:
dspec['vislag2'] = dspec['vislag1']
else:
mode = 'cross'
if not isinstance(dspec['vislag2'], NP.ndarray):
raise TypeError('Value under key "vislag2" must be a numpy array')
if dspec['vislag1'].shape != dspec['vislag2'].shape:
raise ValueError('Value under keys "vislag1" and "vislag2" must have same shape')
if not isinstance(subband, bool):
raise TypeError('Input subband must be boolean')
dps = {}
if not subband:
factor = self.jacobian1 * self.jacobian2 * self.Jy2K**2 # scalar
factor = factor.reshape(tuple(NP.ones(dspec['vislag1'].ndim, dtype=NP.int)))
key = 'fullband'
else:
dspec['freq_center'] = NP.asarray(dspec['freq_center']).ravel() # n_win
dspec['bw_eff'] = NP.asarray(dspec['bw_eff']).ravel() # n_win
wl = FCNST.c / dspec['freq_center'] # n_win
redshift = CNST.rest_freq_HI / dspec['freq_center'] - 1 # n_win
dz = CNST.rest_freq_HI / dspec['freq_center']**2 * dspec['bw_eff'] # n_win
kprll = NP.empty((dspec['freq_center'].size, dspec['lags'].size)) # n_win x nlags
kperp = NP.empty((dspec['freq_center'].size, self.bl_length.size)) # n_win x nbl
for zind,z in enumerate(redshift):
kprll[zind,:] = self.k_parallel(dspec['lags'], z, action='return')
kperp[zind,:] = self.k_perp(self.bl_length, z, action='return')
rz_los = self.cosmo.comoving_distance(redshift).to('Mpc').value
rz_transverse = self.comoving_transverse_distance(redshift, action='return') # n_win
drz_los = self.comoving_los_depth(dspec['bw_eff'], redshift, action='return') # n_win
omega_bw = self.beam3Dvol(freq_wts=NP.squeeze(dspec['freq_wts']))
jacobian1 = 1 / omega_bw # n_win
# jacobian2 = rz_transverse**2 * drz_los / dspec['bw_eff'] # n_win
jacobian2 = rz_los**2 * drz_los / dspec['bw_eff'] # n_win
Jy2K = wl**2 * CNST.Jy / (2*FCNST.k) # n_win
factor = jacobian1 * jacobian2 * Jy2K**2 # n_win
factor = factor.reshape((-1,)+tuple(NP.ones(dspec['vislag1'].ndim-1, dtype=NP.int)))
key = 'subband'
dps[key] = dspec['vislag1'] * dspec['vislag2'].conj() * factor
dps[key] = dps[key].real
if mode == 'cross':
dps[key] *= 2
return dps
############################################################################
def compute_individual_closure_phase_power_spectrum(self, closure_phase_delay_spectra):
"""
------------------------------------------------------------------------
Compute delay power spectrum of closure phase in units of Mpc/h from the
delay spectrum in units of Hz
Inputs:
closure_phase_delay_spectra
[dictionary] contains information about closure phase delay spectra of
different frequency sub-bands (n_win in number) under the following
keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis' (optional)
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It must be in units of Hz.
'closure_phase_vis' (optional)
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x n_win x nlags x n_t.
It must be in units of Hz.
'closure_phase_noise' (optional)
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x n_win x nlags x n_t. It must be
in units of Hz.
Output:
Dictionary with closure phase delay power spectra containing the
following keys and values:
'z' [numpy array] Redshifts corresponding to the centers of the
frequency subbands. Same size as number of values under key
'freq_center' which is n_win
'kprll' [numpy array] k_parallel (h/Mpc) for different subbands and
various delays. It is of size n_win x nlags
'kperp' [numpy array] k_perp (h/Mpc) for different subbands and the
antenna/baseline triplets. It is of size n_win x n_triplets
x 3 x 3 where the 3 x 3 refers to 3 different baselines and
3 components of the baseline vector respectively
'horizon_kprll_limits'
[numpy array] limits on k_parallel corresponding to limits
on horizon delays for each of the baseline triplets and
subbands. It is of shape n_t x n_win x n_triplets x 3 x 2,
where 3 is for the three baselines involved in the triplet,
2 limits (upper and lower). It has units of h/Mpc
'closure_phase_skyvis'
[numpy array] subband delay power spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It is in units of Mpc/h. This is returned if
this key is present in the input closure_phase_delay_spectra
'closure_phase_vis'
[numpy array] subband delay power spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x n_win x nlags x n_t.
It is in units of Mpc/h. This is returned if this key is
present in the input closure_phase_delay_spectra
'closure_phase_noise'
[numpy array] subband delay power spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x n_win x nlags x n_t. It is in
units of Mpc/h. This is returned if this key is present in
the input closure_phase_delay_spectra
------------------------------------------------------------------------
"""
try:
closure_phase_delay_spectra
except NameError:
raise NameError('Input closure_phase_delay_spectra must be provided')
closure_phase_delay_power_spectra = {}
wl = FCNST.c / closure_phase_delay_spectra['freq_center']
z = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center'] - 1
dz = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center']**2 * closure_phase_delay_spectra['bw_eff']
kprll = NP.empty((closure_phase_delay_spectra['freq_center'].size, closure_phase_delay_spectra['lags'].size))
kperp = NP.empty((closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3)) # n_win x n_triplets x 3, where 3 is for the three baselines involved
horizon_kprll_limits = NP.empty((self.ds.n_acc, closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3, 2)) # n_t x n_win x n_triplets x 3 x 2, where 3 is for the three baselines involved
for zind,redshift in enumerate(z):
kprll[zind,:] = self.k_parallel(closure_phase_delay_spectra['lags'], redshift, action='return')
for triplet_ind, ant_triplet in enumerate(closure_phase_delay_spectra['antenna_triplets']):
bl_lengths = NP.sqrt(NP.sum(closure_phase_delay_spectra['baseline_triplets'][triplet_ind]**2, axis=1))
kperp[zind,triplet_ind,:] = self.k_perp(bl_lengths, redshift, action='return')
horizon_delay_limits = bl_lengths.reshape(1,-1,1) / FCNST.c # 1x3x1, where 1 phase center, 3 is for the three baselines involved in the triplet, 1 upper limit
horizon_delay_limits = NP.concatenate((horizon_delay_limits, -horizon_delay_limits), axis=2) # 1x3x2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
horizon_kprll_limits[:,zind,triplet_ind,:,:] = self.k_parallel(horizon_delay_limits, redshift, action='return') # 1 x n_win x n_triplets x 3 x 2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
closure_phase_delay_power_spectra['z'] = z
closure_phase_delay_power_spectra['kprll'] = kprll
closure_phase_delay_power_spectra['kperp'] = kperp
closure_phase_delay_power_spectra['horizon_kprll_limits'] = horizon_kprll_limits
# rz_transverse = self.comoving_transverse_distance(closure_phase_delay_power_spectra['z'], action='return')
drz_los = self.comoving_los_depth(closure_phase_delay_spectra['bw_eff'], closure_phase_delay_power_spectra['z'], action='return')
# omega_bw = self.beam3Dvol(freq_wts=closure_phase_delay_spectra['freq_wts'])
# jacobian1 = 1 / omega_bw
# jacobian2 = rz_transverse**2 * drz_los / closure_phase_delay_spectra['bw_eff']
# Jy2K = wl**2 * CNST.Jy / (2*FCNST.k)
jacobian1 = 1 / closure_phase_delay_spectra['bw_eff']
jacobian2 = drz_los / closure_phase_delay_spectra['bw_eff']
factor = jacobian1 * jacobian2
conversion_factor = factor.reshape(1,-1,1,1)
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in closure_phase_delay_spectra:
closure_phase_delay_power_spectra[key] = NP.abs(closure_phase_delay_spectra[key])**2 * conversion_factor
return closure_phase_delay_power_spectra
############################################################################
def compute_averaged_closure_phase_power_spectrum(self, closure_phase_delay_spectra):
"""
------------------------------------------------------------------------
Compute delay power spectrum of closure phase in units of Mpc/h from the
delay spectrum in units of Jy Hz and average over 'auto' and 'cross'
modes
Inputs:
closure_phase_delay_spectra
[dictionary] contains information about closure phase delay spectra of
different frequency sub-bands (n_win in number) under the following
keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis' (optional)
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It must be in units of Hz.
'closure_phase_vis' (optional)
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x n_win x nlags x n_t.
It must be in units of Hz.
'closure_phase_noise' (optional)
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x n_win x nlags x n_t. It must be
in units of Hz.
Output:
Dictionary with closure phase delay power spectra containing the
following keys and values:
'z' [numpy array] Redshifts corresponding to the centers of the
frequency subbands. Same size as number of values under key
'freq_center' which is n_win
'kprll' [numpy array] k_parallel (h/Mpc) for different subbands and
various delays. It is of size n_win x nlags
'kperp' [numpy array] k_perp (h/Mpc) for different subbands and the
antenna/baseline triplets. It is of size n_win x n_triplets
x 3 x 3 where the 3 x 3 refers to 3 different baselines and
3 components of the baseline vector respectively
'horizon_kprll_limits'
[numpy array] limits on k_parallel corresponding to limits
on horizon delays for each of the baseline triplets and
subbands. It is of shape n_t x n_win x n_triplets x 3 x 2,
where 3 is for the three baselines involved in the triplet,
2 limits (upper and lower). It has units of h/Mpc
'auto' [dictionary] average of diagonal terms in the power spectrum
matrix with possibly the following keys and values:
'closure_phase_skyvis'
[numpy array] subband delay power spectra of closure
phases of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It is in units of Mpc/h. This is returned
if this key is present in the input
closure_phase_delay_spectra
'closure_phase_vis'
[numpy array] subband delay power spectra of closure
phases of noisy sky visiblities from the specified
antenna triplets. It is of size
1 x n_win x nlags x n_t. It is in units of Mpc/h. This
is returned if this key is present in the input
closure_phase_delay_spectra
'closure_phase_noise'
[numpy array] subband delay power spectra of closure
phases of noise visiblities from the specified antenna
triplets. It is of size 1 x n_win x nlags x n_t. It is
in units of Mpc/h. This is returned if this key is
present in the input closure_phase_delay_spectra
'cross' [dictionary] average of off-diagonal terms in the power
spectrum matrix with possibly the following keys and values:
'closure_phase_skyvis'
[numpy array] subband delay power spectra of closure
phases of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It is in units of Mpc/h. This is returned
if this key is present in the input
closure_phase_delay_spectra
'closure_phase_vis'
[numpy array] subband delay power spectra of closure
phases of noisy sky visiblities from the specified
antenna triplets. It is of size
1 x n_win x nlags x n_t. It is in units of Mpc/h. This
is returned if this key is present
in the input closure_phase_delay_spectra
'closure_phase_noise'
[numpy array] subband delay power spectra of closure
phases of noise visiblities from the specified antenna
triplets. It is of size 1 x n_win x nlags x n_t. It is
in units of Mpc/h. This is returned if this key is
present in the input closure_phase_delay_spectra
------------------------------------------------------------------------
"""
try:
closure_phase_delay_spectra
except NameError:
raise NameError('Input closure_phase_delay_spectra must be provided')
closure_phase_delay_power_spectra = {}
wl = FCNST.c / closure_phase_delay_spectra['freq_center']
z = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center'] - 1
dz = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center']**2 * closure_phase_delay_spectra['bw_eff']
kprll = NP.empty((closure_phase_delay_spectra['freq_center'].size, closure_phase_delay_spectra['lags'].size))
kperp = NP.empty((closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3)) # n_win x n_triplets x 3, where 3 is for the three baselines involved
horizon_kprll_limits = NP.empty((self.ds.n_acc, closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3, 2)) # n_t x n_win x n_triplets x 3 x 2, where 3 is for the three baselines involved
for zind,redshift in enumerate(z):
kprll[zind,:] = self.k_parallel(closure_phase_delay_spectra['lags'], redshift, action='return')
for triplet_ind, ant_triplet in enumerate(closure_phase_delay_spectra['antenna_triplets']):
bl_lengths = NP.sqrt(NP.sum(closure_phase_delay_spectra['baseline_triplets'][triplet_ind]**2, axis=1))
kperp[zind,triplet_ind,:] = self.k_perp(bl_lengths, redshift, action='return')
horizon_delay_limits = bl_lengths.reshape(1,-1,1) / FCNST.c # 1x3x1, where 1 phase center, 3 is for the three baselines involved in the triplet, 1 upper limit
horizon_delay_limits = NP.concatenate((horizon_delay_limits, -horizon_delay_limits), axis=2) # 1x3x2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
horizon_kprll_limits[:,zind,triplet_ind,:,:] = self.k_parallel(horizon_delay_limits, redshift, action='return') # 1 x n_win x n_triplets x 3 x 2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
closure_phase_delay_power_spectra['z'] = z
closure_phase_delay_power_spectra['kprll'] = kprll
closure_phase_delay_power_spectra['kperp'] = kperp
closure_phase_delay_power_spectra['horizon_kprll_limits'] = horizon_kprll_limits
# rz_transverse = self.comoving_transverse_distance(closure_phase_delay_power_spectra['z'], action='return')
drz_los = self.comoving_los_depth(closure_phase_delay_spectra['bw_eff'], closure_phase_delay_power_spectra['z'], action='return')
# omega_bw = self.beam3Dvol(freq_wts=closure_phase_delay_spectra['freq_wts'])
# jacobian1 = 1 / omega_bw
# jacobian2 = rz_transverse**2 * drz_los / closure_phase_delay_spectra['bw_eff']
# Jy2K = wl**2 * CNST.Jy / (2*FCNST.k)
jacobian1 = 1 / closure_phase_delay_spectra['bw_eff']
jacobian2 = drz_los / closure_phase_delay_spectra['bw_eff']
factor = jacobian1 * jacobian2
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in closure_phase_delay_spectra:
ndim_shape = NP.ones(closure_phase_delay_spectra[key].ndim, dtype=int)
ndim_shape[-3] = -1
ndim_shape = tuple(ndim_shape)
conversion_factor = factor.reshape(ndim_shape)
for mode in ['auto', 'cross']:
closure_phase_delay_power_spectra[mode] = {}
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in closure_phase_delay_spectra:
nruns = closure_phase_delay_spectra[key].shape[0]
if mode == 'auto':
closure_phase_delay_power_spectra[mode][key] = NP.mean(NP.abs(closure_phase_delay_spectra[key])**2, axis=0, keepdims=True) * conversion_factor
else:
closure_phase_delay_power_spectra[mode][key] = 1.0 / (nruns*(nruns-1)) * (conversion_factor * NP.abs(NP.sum(closure_phase_delay_spectra[key], axis=0, keepdims=True))**2 - nruns * closure_phase_delay_power_spectra['auto'][key])
return closure_phase_delay_power_spectra
############################################################################
| 58.368618
| 341
| 0.572577
| 32,845
| 265,227
| 4.491247
| 0.035713
| 0.030261
| 0.028851
| 0.00983
| 0.812406
| 0.781771
| 0.735166
| 0.70222
| 0.669071
| 0.646443
| 0
| 0.009138
| 0.326611
| 265,227
| 4,543
| 342
| 58.381466
| 0.81681
| 0.510174
| 0
| 0.391626
| 0
| 0.004926
| 0.162502
| 0.010738
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01601
| false
| 0.006158
| 0.015394
| 0.000616
| 0.050493
| 0.025246
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
41d68dfe8dbeed9c7637dbf333fcd1bbebbd7e7b
| 121
|
py
|
Python
|
rusty/asgi/app.py
|
justanotherbyte/Rusty
|
0132c44a99ebc36f31c70482b19161196f41bc5e
|
[
"MIT"
] | 1
|
2021-09-03T13:03:16.000Z
|
2021-09-03T13:03:16.000Z
|
rusty/asgi/app.py
|
justanotherbyte/Rusty
|
0132c44a99ebc36f31c70482b19161196f41bc5e
|
[
"MIT"
] | null | null | null |
rusty/asgi/app.py
|
justanotherbyte/Rusty
|
0132c44a99ebc36f31c70482b19161196f41bc5e
|
[
"MIT"
] | 1
|
2021-12-24T12:33:09.000Z
|
2021-12-24T12:33:09.000Z
|
from starlette.applications import Starlette
class RustyMain(Starlette):
"""
Inheritance for asgi spec.
"""
| 17.285714
| 44
| 0.702479
| 12
| 121
| 7.083333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206612
| 121
| 7
| 45
| 17.285714
| 0.885417
| 0.214876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
41db9cbfbd544f0784150a7a2453e75cfdec8d7e
| 40
|
py
|
Python
|
tests/__init__.py
|
Alviner/async_lock
|
ffa42cb845c4567c997a2a654cda1e31b28097d7
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
Alviner/async_lock
|
ffa42cb845c4567c997a2a654cda1e31b28097d7
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
Alviner/async_lock
|
ffa42cb845c4567c997a2a654cda1e31b28097d7
|
[
"MIT"
] | 1
|
2021-09-30T18:31:16.000Z
|
2021-09-30T18:31:16.000Z
|
"""Unit test package for async_lock."""
| 20
| 39
| 0.7
| 6
| 40
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 40
| 1
| 40
| 40
| 0.771429
| 0.825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
41f14f8dd9e49c71f29b0a0c4fbd22fe713f65d8
| 194
|
py
|
Python
|
reactEx/jobs/admin.py
|
IsDon/isdon-JobShifts
|
f4cbed32b6f24754153a77f7d47baa8895dbf3a3
|
[
"MIT"
] | 2
|
2017-04-25T10:44:55.000Z
|
2020-08-06T12:48:22.000Z
|
reactEx/jobs/admin.py
|
IsDon/isdon-JobShifts
|
f4cbed32b6f24754153a77f7d47baa8895dbf3a3
|
[
"MIT"
] | null | null | null |
reactEx/jobs/admin.py
|
IsDon/isdon-JobShifts
|
f4cbed32b6f24754153a77f7d47baa8895dbf3a3
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Job, WorkShift, Position
# Register your models here.
admin.site.register(Job)
admin.site.register(WorkShift)
admin.site.register(Position)
| 24.25
| 44
| 0.809278
| 27
| 194
| 5.814815
| 0.481481
| 0.171975
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097938
| 194
| 8
| 45
| 24.25
| 0.897143
| 0.134021
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
41fe5df4e3e91688763c979a535d64bdd5e27c6c
| 9,448
|
py
|
Python
|
test/test_card.py
|
furgerf/pyjass
|
a4270d2a93ec65fc5101e4595c3c0a1361c5ffbe
|
[
"Apache-2.0"
] | null | null | null |
test/test_card.py
|
furgerf/pyjass
|
a4270d2a93ec65fc5101e4595c3c0a1361c5ffbe
|
[
"Apache-2.0"
] | 6
|
2020-01-28T22:35:11.000Z
|
2022-02-10T00:06:37.000Z
|
test/test_card.py
|
furgerf/pyjass
|
a4270d2a93ec65fc5101e4595c3c0a1361c5ffbe
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from config import Config
from unittest import TestCase
from card import Card
from encoding import Encoding
from game_type import GameType
from parameterized import parameterized
class CardTest(TestCase):
# pylint: disable=line-too-long
def setUp(self):
self.cards_suit_1 = [Card(0, 1), Card(0, 2)]
self.cards_suit_2 = [Card(1, 1), Card(1, 2)]
@parameterized.expand([
[GameType.OBENABE],
[GameType.TRUMP_HEARTS],
[GameType.TRUMP_CLUBS],
[GameType.TRUMP_DIAMONDS],
[GameType.TRUMP_SPADES],
])
def test_has_worse_value_than_obenabe_trump(self, game_type):
for card in self.cards_suit_1 + self.cards_suit_2:
card.set_game_type(game_type)
self.assertFalse(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_1[0]))
self.assertTrue(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_2[0]))
self.assertTrue(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_2[1]))
self.assertFalse(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_1[0]))
self.assertFalse(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_2[1]))
def test_has_worse_value_than_unnenufe(self):
for card in self.cards_suit_1 + self.cards_suit_2:
card.set_game_type(GameType.UNNENUFE)
self.assertFalse(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_1[0]))
self.assertFalse(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[0].has_worse_value_than(self.cards_suit_2[1]))
self.assertTrue(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_1[0]))
self.assertFalse(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_1[1]))
self.assertTrue(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[1].has_worse_value_than(self.cards_suit_2[1]))
def test_is_beaten_by_obenabe(self):
for card in self.cards_suit_1 + self.cards_suit_2:
card.set_game_type(GameType.OBENABE)
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_1[0]))
self.assertTrue(self.cards_suit_1[0].is_beaten_by(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_2[1]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_1[0]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_2[1]))
def test_is_beaten_by_unnenufe(self):
for card in self.cards_suit_1 + self.cards_suit_2:
card.set_game_type(GameType.UNNENUFE)
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_1[0]))
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[0].is_beaten_by(self.cards_suit_2[1]))
self.assertTrue(self.cards_suit_1[1].is_beaten_by(self.cards_suit_1[0]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_1[1]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_2[0]))
self.assertFalse(self.cards_suit_1[1].is_beaten_by(self.cards_suit_2[1]))
def test_is_beaten_by_trump(self):
non_trump_1 = Card(Card.SPADES, 1)
non_trump_2 = Card(Card.CLUBS, 2)
normal_trump_1 = Card(Card.HEARTS, 0)
normal_trump_2 = Card(Card.HEARTS, 8)
nell = Card(Card.HEARTS, 3)
buur = Card(Card.HEARTS, 5)
cards = [non_trump_1, non_trump_2, normal_trump_1, normal_trump_2, nell, buur]
for card in cards:
card.set_game_type(GameType.TRUMP_HEARTS)
self.assertFalse(non_trump_1.is_trump)
self.assertFalse(non_trump_2.is_trump)
self.assertTrue(normal_trump_1.is_trump)
self.assertTrue(normal_trump_2.is_trump)
self.assertTrue(nell.is_trump)
self.assertTrue(nell.is_nell)
self.assertTrue(buur.is_trump)
self.assertTrue(buur.is_buur)
# buur beats everyone
self.assertFalse(buur.is_beaten_by(non_trump_1))
self.assertFalse(buur.is_beaten_by(normal_trump_1))
self.assertFalse(buur.is_beaten_by(nell))
self.assertTrue(non_trump_1.is_beaten_by(buur))
self.assertTrue(normal_trump_1.is_beaten_by(buur))
self.assertTrue(nell.is_beaten_by(buur))
# nell beats all non-buurs
self.assertFalse(nell.is_beaten_by(non_trump_1))
self.assertFalse(nell.is_beaten_by(normal_trump_1))
self.assertTrue(nell.is_beaten_by(buur))
self.assertTrue(non_trump_1.is_beaten_by(nell))
self.assertTrue(normal_trump_1.is_beaten_by(nell))
self.assertFalse(buur.is_beaten_by(nell))
# normal trump beats smaller trump and non-trumps
self.assertFalse(normal_trump_2.is_beaten_by(non_trump_1))
self.assertFalse(normal_trump_2.is_beaten_by(normal_trump_1))
self.assertTrue(normal_trump_2.is_beaten_by(nell))
self.assertTrue(normal_trump_2.is_beaten_by(buur))
self.assertTrue(non_trump_1.is_beaten_by(normal_trump_2))
self.assertTrue(normal_trump_1.is_beaten_by(normal_trump_2))
self.assertFalse(nell.is_beaten_by(normal_trump_2))
self.assertFalse(buur.is_beaten_by(normal_trump_2))
# non-trumps are normal and always lose against trumps
self.assertFalse(non_trump_1.is_beaten_by(non_trump_2))
self.assertFalse(non_trump_2.is_beaten_by(non_trump_1))
self.assertTrue(non_trump_1.is_beaten_by(normal_trump_1))
self.assertTrue(non_trump_1.is_beaten_by(nell))
self.assertTrue(non_trump_1.is_beaten_by(buur))
def test_card_index_by_suit(self):
Config.ENCODING = Encoding("better", [1, 2, 3, 4], 5, 10, 50, 0, 0, order_value=True, card_index_by_suit=True)
cards = [
Card(Card.SPADES, 0), Card(Card.SPADES, Card.VALUE_NELL), Card(Card.SPADES, Card.VALUE_BUUR), Card(Card.SPADES, 8),
Card(Card.HEARTS, 0), Card(Card.HEARTS, Card.VALUE_NELL), Card(Card.HEARTS, Card.VALUE_BUUR), Card(Card.HEARTS, 8),
Card(Card.DIAMONDS, 0), Card(Card.DIAMONDS, Card.VALUE_NELL), Card(Card.DIAMONDS, Card.VALUE_BUUR), Card(Card.DIAMONDS, 8),
Card(Card.CLUBS, 0), Card(Card.CLUBS, Card.VALUE_NELL), Card(Card.CLUBS, Card.VALUE_BUUR), Card(Card.CLUBS, 8)
]
for card in cards:
card.set_game_type(GameType.TRUMP_DIAMONDS)
# SPADES: not trump, doesn't get reshuffled
self.assertEqual(0, cards[0].card_index)
self.assertEqual(3, cards[1].card_index)
self.assertEqual(5, cards[2].card_index)
self.assertEqual(8, cards[3].card_index)
# HEARTS: not trump, doesn't get reshuffled
self.assertEqual(0+9, cards[4].card_index)
self.assertEqual(3+9, cards[5].card_index)
self.assertEqual(5+9, cards[6].card_index)
self.assertEqual(8+9, cards[7].card_index)
# DIAMONDS: trump, gets reshuffled
self.assertEqual(0+18, cards[8].card_index)
self.assertEqual(7+18, cards[9].card_index)
self.assertEqual(8+18, cards[10].card_index)
self.assertEqual(6+18, cards[11].card_index)
# CLUBS: not trump, doesn't get reshuffled
self.assertEqual(0+27, cards[12].card_index)
self.assertEqual(3+27, cards[13].card_index)
self.assertEqual(5+27, cards[14].card_index)
self.assertEqual(8+27, cards[15].card_index)
def test_card_index_by_value(self):
Config.ENCODING = Encoding("better", [1, 2, 3, 4], 5, 10, 50, 0, 0, order_value=True, card_index_by_suit=False)
cards = [
Card(Card.SPADES, 0), Card(Card.SPADES, Card.VALUE_NELL), Card(Card.SPADES, Card.VALUE_BUUR), Card(Card.SPADES, 8),
Card(Card.HEARTS, 0), Card(Card.HEARTS, Card.VALUE_NELL), Card(Card.HEARTS, Card.VALUE_BUUR), Card(Card.HEARTS, 8),
Card(Card.DIAMONDS, 0), Card(Card.DIAMONDS, Card.VALUE_NELL), Card(Card.DIAMONDS, Card.VALUE_BUUR), Card(Card.DIAMONDS, 8),
Card(Card.CLUBS, 0), Card(Card.CLUBS, Card.VALUE_NELL), Card(Card.CLUBS, Card.VALUE_BUUR), Card(Card.CLUBS, 8)
]
for card in cards:
card.set_game_type(GameType.TRUMP_DIAMONDS)
# SPADES: not trump, doesn't get reshuffled
self.assertEqual(0, cards[0].card_index)
self.assertEqual(12, cards[1].card_index)
self.assertEqual(20, cards[2].card_index)
self.assertEqual(32, cards[3].card_index)
# HEARTS: not trump, doesn't get reshuffled
self.assertEqual(1, cards[4].card_index)
self.assertEqual(13, cards[5].card_index)
self.assertEqual(21, cards[6].card_index)
self.assertEqual(33, cards[7].card_index)
# DIAMONDS: trump, gets reshuffled
self.assertEqual(2, cards[8].card_index)
self.assertEqual(14+16, cards[9].card_index)
self.assertEqual(22+12, cards[10].card_index)
self.assertEqual(34-8, cards[11].card_index)
# CLUBS: not trump, doesn't get reshuffled
self.assertEqual(3, cards[12].card_index)
self.assertEqual(15, cards[13].card_index)
self.assertEqual(23, cards[14].card_index)
self.assertEqual(35, cards[15].card_index)
| 46.313725
| 131
| 0.744814
| 1,580
| 9,448
| 4.151899
| 0.066456
| 0.101524
| 0.146646
| 0.11311
| 0.860366
| 0.848628
| 0.701067
| 0.692073
| 0.622866
| 0.609604
| 0
| 0.041546
| 0.123624
| 9,448
| 203
| 132
| 46.541872
| 0.750725
| 0.05652
| 0
| 0.326923
| 0
| 0
| 0.001348
| 0
| 0
| 0
| 0
| 0
| 0.621795
| 1
| 0.051282
| false
| 0
| 0.038462
| 0
| 0.096154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5104fb8d997531a0318e39179723387b2f922d0f
| 157
|
py
|
Python
|
LABWORK1/api/admin.py
|
maxovic/summerpractice2019
|
0b61ca6302f74618a62bad60615c47f29fa531cb
|
[
"MIT"
] | null | null | null |
LABWORK1/api/admin.py
|
maxovic/summerpractice2019
|
0b61ca6302f74618a62bad60615c47f29fa531cb
|
[
"MIT"
] | null | null | null |
LABWORK1/api/admin.py
|
maxovic/summerpractice2019
|
0b61ca6302f74618a62bad60615c47f29fa531cb
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import TaskList, Task
# Register your models here.
admin.site.register(TaskList)
admin.site.register(Task)
| 15.7
| 34
| 0.789809
| 22
| 157
| 5.636364
| 0.545455
| 0.145161
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127389
| 157
| 9
| 35
| 17.444444
| 0.905109
| 0.165605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
51370945cde16a4afa413cf4a793854fbac1fb3a
| 60
|
py
|
Python
|
api/models/__init__.py
|
biancarosa/neural-networks-api
|
2066a24c7f5af818d14a8cab5cb243ac84a9b3f5
|
[
"CC-BY-4.0"
] | 1
|
2019-04-26T17:56:57.000Z
|
2019-04-26T17:56:57.000Z
|
api/models/__init__.py
|
biancarosa/neural-networks-api
|
2066a24c7f5af818d14a8cab5cb243ac84a9b3f5
|
[
"CC-BY-4.0"
] | null | null | null |
api/models/__init__.py
|
biancarosa/neural-networks-api
|
2066a24c7f5af818d14a8cab5cb243ac84a9b3f5
|
[
"CC-BY-4.0"
] | 1
|
2019-04-26T17:57:00.000Z
|
2019-04-26T17:57:00.000Z
|
from api.models.classifier_network import ClassifierNetwork
| 30
| 59
| 0.9
| 7
| 60
| 7.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 60
| 1
| 60
| 60
| 0.946429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5139d41609e1e1347a992528a31aa63d70a3c0c4
| 419
|
py
|
Python
|
console.py
|
sjbecque/tetris-python
|
76d43f332f4299b9acc676dbc13e1e05ea0939ad
|
[
"MIT"
] | null | null | null |
console.py
|
sjbecque/tetris-python
|
76d43f332f4299b9acc676dbc13e1e05ea0939ad
|
[
"MIT"
] | null | null | null |
console.py
|
sjbecque/tetris-python
|
76d43f332f4299b9acc676dbc13e1e05ea0939ad
|
[
"MIT"
] | null | null | null |
# simple console test environment
from tetris.src.engine import Engine
from tetris.src.game import Game
from tetris.src.tetromino_factory import TetrominoFactory
from tetris.src.cube import Cube
from tetris.src.cube_sets.cube_set import CubeSet
from tetris.src.cube_sets.stones import Stones
from tetris.src.cube_sets.tetromino import Tetromino
e = Engine(True, True)
g = Game()
f = TetrominoFactory()
t = f.produce()
| 29.928571
| 57
| 0.811456
| 65
| 419
| 5.153846
| 0.369231
| 0.208955
| 0.271642
| 0.202985
| 0.18806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112172
| 419
| 14
| 58
| 29.928571
| 0.900538
| 0.073986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.636364
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
514a5f5ebb7b85377e2b35edc36c224183b541ed
| 176
|
py
|
Python
|
scripts/item/consume_2433183.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
scripts/item/consume_2433183.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
scripts/item/consume_2433183.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
# Super Spooky Damage Skin
success = sm.addDamageSkin(2433183)
if success:
sm.chat("The Super Spooky Damage Skin has been added to your account's damage skin collection.")
| 35.2
| 100
| 0.767045
| 27
| 176
| 5
| 0.703704
| 0.222222
| 0.251852
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047297
| 0.159091
| 176
| 4
| 101
| 44
| 0.864865
| 0.136364
| 0
| 0
| 0
| 0
| 0.566667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
514b68f62058220ab48544f1a1635fbff41092bc
| 45
|
py
|
Python
|
examples/hf_transformers/custom/__init__.py
|
AhmedHussKhalifa/torchdistill
|
071089765f95aa09da9573039ac2bd54f47cea1e
|
[
"MIT"
] | 576
|
2020-11-26T03:20:50.000Z
|
2022-03-31T16:42:49.000Z
|
examples/hf_transformers/custom/__init__.py
|
AhmedHussKhalifa/torchdistill
|
071089765f95aa09da9573039ac2bd54f47cea1e
|
[
"MIT"
] | 24
|
2020-12-02T12:16:44.000Z
|
2022-02-17T16:14:49.000Z
|
examples/hf_transformers/custom/__init__.py
|
AhmedHussKhalifa/torchdistill
|
071089765f95aa09da9573039ac2bd54f47cea1e
|
[
"MIT"
] | 60
|
2020-11-26T03:27:04.000Z
|
2022-03-30T09:49:00.000Z
|
from custom import forward_proc, loss, optim
| 22.5
| 44
| 0.822222
| 7
| 45
| 5.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 45
| 1
| 45
| 45
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8506c3b6b6c9cd6c3d942e50789fdc2e1d1bcca3
| 92
|
py
|
Python
|
myProject/tests/testMotors.py
|
thatguy1234510/NuvuDuckyBotCodebase
|
1a2d46d990bee7495ca60dff58039bb1c4374357
|
[
"MIT"
] | 2
|
2019-07-23T20:05:57.000Z
|
2019-07-23T20:18:38.000Z
|
myProject/tests/testMotors.py
|
theloni-monk/NuvuDuckyBotCodebase
|
1a2d46d990bee7495ca60dff58039bb1c4374357
|
[
"MIT"
] | null | null | null |
myProject/tests/testMotors.py
|
theloni-monk/NuvuDuckyBotCodebase
|
1a2d46d990bee7495ca60dff58039bb1c4374357
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append("..")
import CORE.motor
#WRITEME: just have it drive in a square
| 18.4
| 40
| 0.73913
| 16
| 92
| 4.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141304
| 92
| 4
| 41
| 23
| 0.860759
| 0.423913
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
517e993506eea7ff6d8b92b937db4303a7139397
| 163
|
py
|
Python
|
Super_users/admin.py
|
amirhRahimi1993/info
|
29e3e356e37d37415c4fb708185c9448f36d33ca
|
[
"Apache-2.0"
] | null | null | null |
Super_users/admin.py
|
amirhRahimi1993/info
|
29e3e356e37d37415c4fb708185c9448f36d33ca
|
[
"Apache-2.0"
] | null | null | null |
Super_users/admin.py
|
amirhRahimi1993/info
|
29e3e356e37d37415c4fb708185c9448f36d33ca
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import collector , Dr_info
admin.site.register(Dr_info)
admin.site.register(collector)
# Register your models here.
| 23.285714
| 39
| 0.809816
| 24
| 163
| 5.416667
| 0.541667
| 0.092308
| 0.169231
| 0.230769
| 0.353846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110429
| 163
| 6
| 40
| 27.166667
| 0.896552
| 0.159509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
518873d3c92b94ec5fbfcd4935f34e15dd42d818
| 199
|
py
|
Python
|
source/__init__.py
|
jameswind/neuralCT
|
969829459570c808cceafec665931eb411fff5e2
|
[
"Apache-2.0"
] | 27
|
2019-10-01T22:59:26.000Z
|
2020-12-10T14:07:33.000Z
|
source/__init__.py
|
jameswind/neuralCT
|
969829459570c808cceafec665931eb411fff5e2
|
[
"Apache-2.0"
] | 1
|
2020-03-08T12:11:35.000Z
|
2020-03-09T08:58:30.000Z
|
source/__init__.py
|
jameswind/neuralCT
|
969829459570c808cceafec665931eb411fff5e2
|
[
"Apache-2.0"
] | 4
|
2019-10-02T08:13:39.000Z
|
2021-04-02T14:50:26.000Z
|
from .gaussian import Gaussian
from .multivariateGaussian import MultivariateGaussian
from .ringLike import Ring2d,Ring2dNoMomentum
from .harmonicChain import HarmonicChain
from .source import Source
| 39.8
| 54
| 0.874372
| 21
| 199
| 8.285714
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011111
| 0.095477
| 199
| 5
| 55
| 39.8
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
519b56c3e75ca144edfa8cde14b23897845869f6
| 641
|
py
|
Python
|
src/data_augmentation/scripts/move.py
|
duccl/cardiomyopathy-monograph
|
89056ece724dea3443d01552eb6dd1c0d59b949b
|
[
"MIT"
] | 3
|
2021-08-18T22:54:10.000Z
|
2021-09-29T12:47:58.000Z
|
src/data_augmentation/scripts/move.py
|
duccl/cardiomyopathy-monograph
|
89056ece724dea3443d01552eb6dd1c0d59b949b
|
[
"MIT"
] | null | null | null |
src/data_augmentation/scripts/move.py
|
duccl/cardiomyopathy-monograph
|
89056ece724dea3443d01552eb6dd1c0d59b949b
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
def vertically(image: np.ndarray, value: int) -> np.ndarray:
print('Moving image vertically')
(height, width) = image.shape[:2]
translation_matrix = np.float32([[1, 0, 1], [0, 1, value]])
translated_image = cv2.warpAffine(image, translation_matrix, (width, height))
return translated_image
def horizontally(image: np.ndarray, value: int) -> np.ndarray:
print('Moving image horizontally')
(height, width) = image.shape[:2]
translation_matrix = np.float32([[1, 0, value], [0, 1, 1]])
translated_image = cv2.warpAffine(image, translation_matrix, (width, height))
return translated_image
| 29.136364
| 79
| 0.711388
| 86
| 641
| 5.209302
| 0.302326
| 0.080357
| 0.0625
| 0.084821
| 0.799107
| 0.799107
| 0.799107
| 0.799107
| 0.799107
| 0.799107
| 0
| 0.034672
| 0.145086
| 641
| 21
| 80
| 30.52381
| 0.782847
| 0
| 0
| 0.428571
| 0
| 0
| 0.074883
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.428571
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
51a907f90f09cdc014342f4d552aa48efc25499e
| 62
|
py
|
Python
|
cluster/__init__.py
|
jevandezande/cluster
|
6207a2622f2d36558853d50f079ef916d84a3e18
|
[
"MIT"
] | null | null | null |
cluster/__init__.py
|
jevandezande/cluster
|
6207a2622f2d36558853d50f079ef916d84a3e18
|
[
"MIT"
] | null | null | null |
cluster/__init__.py
|
jevandezande/cluster
|
6207a2622f2d36558853d50f079ef916d84a3e18
|
[
"MIT"
] | null | null | null |
from .cluster import Cluster
from .cmolecule import CMolecule
| 20.666667
| 32
| 0.83871
| 8
| 62
| 6.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 33
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
51cf569c7e2d7c6246b6f94d1011fa19c95f5de2
| 38
|
py
|
Python
|
pyc64/__main__.py
|
jepebe/c64
|
89d884b03f4e05019143f1be4b46fd9b7e890ad2
|
[
"BSD-2-Clause"
] | 1
|
2020-12-11T14:20:20.000Z
|
2020-12-11T14:20:20.000Z
|
pyc64/__main__.py
|
jepebe/c64
|
89d884b03f4e05019143f1be4b46fd9b7e890ad2
|
[
"BSD-2-Clause"
] | null | null | null |
pyc64/__main__.py
|
jepebe/c64
|
89d884b03f4e05019143f1be4b46fd9b7e890ad2
|
[
"BSD-2-Clause"
] | null | null | null |
import c64
if __name__ == '__main__':
| 12.666667
| 26
| 0.710526
| 5
| 38
| 3.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0.157895
| 38
| 2
| 27
| 19
| 0.53125
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.