hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6d793f7eed23a8e32819461276be9643de946e28
| 54
|
py
|
Python
|
acmicpc/3053/3053.py
|
love-adela/algorithm
|
4ccd02173c96f8369962f1fd4e5166a221690fa2
|
[
"MIT"
] | 3
|
2019-03-09T05:19:23.000Z
|
2019-04-06T09:26:36.000Z
|
acmicpc/3053/3053.py
|
love-adela/algorithm
|
4ccd02173c96f8369962f1fd4e5166a221690fa2
|
[
"MIT"
] | 1
|
2020-02-23T10:38:04.000Z
|
2020-02-23T10:38:04.000Z
|
acmicpc/3053/3053.py
|
love-adela/algorithm
|
4ccd02173c96f8369962f1fd4e5166a221690fa2
|
[
"MIT"
] | 1
|
2019-05-22T13:47:53.000Z
|
2019-05-22T13:47:53.000Z
|
n=int(input())**2
print(n*3.14159265359)
print(n*2.0)
| 13.5
| 22
| 0.666667
| 12
| 54
| 3
| 0.666667
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.055556
| 54
| 3
| 23
| 18
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
6d8309d0525efb522da2ea48d66f0b90afb3437e
| 83
|
py
|
Python
|
exe.curso em video/Aula 11.py
|
Lorenzo-Lopes/Python-Estudo
|
7ee623ce29b6a0e9fac48189fbd9c641be84d418
|
[
"MIT"
] | null | null | null |
exe.curso em video/Aula 11.py
|
Lorenzo-Lopes/Python-Estudo
|
7ee623ce29b6a0e9fac48189fbd9c641be84d418
|
[
"MIT"
] | null | null | null |
exe.curso em video/Aula 11.py
|
Lorenzo-Lopes/Python-Estudo
|
7ee623ce29b6a0e9fac48189fbd9c641be84d418
|
[
"MIT"
] | null | null | null |
print('oi')
n = int(input('\033[4;34;43mteste\033[1;31;47m /033[0;0;0m'))
print(n)
| 20.75
| 61
| 0.626506
| 19
| 83
| 2.736842
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0.072289
| 83
| 3
| 62
| 27.666667
| 0.38961
| 0
| 0
| 0
| 0
| 0.333333
| 0.542169
| 0.373494
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
6d848008106f98d262699428afcc5f1eeb492c0f
| 100
|
py
|
Python
|
src/main.py
|
StephenGemin/discord-bot
|
b85254b0f1fdf5e1415d2ee026601e7557c73856
|
[
"MIT"
] | null | null | null |
src/main.py
|
StephenGemin/discord-bot
|
b85254b0f1fdf5e1415d2ee026601e7557c73856
|
[
"MIT"
] | null | null | null |
src/main.py
|
StephenGemin/discord-bot
|
b85254b0f1fdf5e1415d2ee026601e7557c73856
|
[
"MIT"
] | null | null | null |
from discord_bot.view import start_discord_bot
if __name__ == "__main__":
start_discord_bot()
| 16.666667
| 46
| 0.77
| 14
| 100
| 4.571429
| 0.642857
| 0.46875
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 100
| 5
| 47
| 20
| 0.752941
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
099b402200da1c4ee1d94cd464ea8b0137f56001
| 62
|
py
|
Python
|
scrapers/lords/__init__.py
|
spudmind/spud
|
86e44bca4efd3cd6358467e1511048698a45edbc
|
[
"MIT"
] | 2
|
2015-04-11T12:22:41.000Z
|
2016-08-18T11:12:06.000Z
|
scrapers/lords/__init__.py
|
spudmind/spud
|
86e44bca4efd3cd6358467e1511048698a45edbc
|
[
"MIT"
] | 84
|
2015-01-22T14:33:49.000Z
|
2015-04-01T23:15:29.000Z
|
scrapers/lords/__init__.py
|
spudmind/spud
|
86e44bca4efd3cd6358467e1511048698a45edbc
|
[
"MIT"
] | 1
|
2015-04-16T03:10:39.000Z
|
2015-04-16T03:10:39.000Z
|
from fetch_lords import fetch
from scrape_lords import scrape
| 20.666667
| 31
| 0.870968
| 10
| 62
| 5.2
| 0.5
| 0.423077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 32
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
61eda79cd22fed4633d1aff056fe430f646f69a4
| 14,712
|
py
|
Python
|
difi/tests/test_metrics.py
|
moeyensj/difi
|
4108ee93f35030174eb456b9e5a8a2e9cbbd04a0
|
[
"BSD-3-Clause"
] | 1
|
2019-02-14T20:10:44.000Z
|
2019-02-14T20:10:44.000Z
|
difi/tests/test_metrics.py
|
moeyensj/difi
|
4108ee93f35030174eb456b9e5a8a2e9cbbd04a0
|
[
"BSD-3-Clause"
] | 22
|
2019-02-06T22:22:20.000Z
|
2021-05-12T17:13:21.000Z
|
difi/tests/test_metrics.py
|
moeyensj/difi
|
4108ee93f35030174eb456b9e5a8a2e9cbbd04a0
|
[
"BSD-3-Clause"
] | 1
|
2020-10-05T05:02:26.000Z
|
2020-10-05T05:02:26.000Z
|
import pytest
import numpy as np
import pandas as pd
from pandas.testing import assert_frame_equal
from ..metrics import calcFindableMinObs
from ..metrics import calcFindableNightlyLinkages
from .create_test_data import createTestDataSet
MIN_OBS = range(5, 10)
def test_calcFindableMinObs():
### Test calcFindableMinObs against the test data set
column_mapping = {
"truth" : "truth",
"obs_id" : "obs_id",
}
for min_obs in MIN_OBS:
# Generate test data set
observations_test, all_truths_test, linkage_members_test, all_linkages_test, summary_test = createTestDataSet(
min_obs,
5,
20)
findable_observations = calcFindableMinObs(observations_test, min_obs=min_obs, column_mapping=column_mapping)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[observations_test["truth"] == truth]["obs_id"].values)
# Make sure all objects with not findable are not included in the findable_observations dataframe
not_findable_truths_test = all_truths_test[all_truths_test["findable"] == 0]["truth"].values
assert len(findable_observations[findable_observations[column_mapping["truth"]].isin(not_findable_truths_test)]) == 0
return
def test_calcFindableNightlyLinkages():
### Test calcFindableNightlyLinkages against the test data set
column_mapping = {
"truth" : "truth",
"obs_id" : "obs_id",
"time" : "time",
"night" : "night",
}
# Generate test data set
observations_test, all_truths_test, linkage_members_test, all_linkages_test, summary_test = createTestDataSet(
5,
5,
20)
# For every single truth in blue, their observations are seperated by a half day
for truth in observations_test[observations_test["class"] == "blue"]["truth"].unique():
mask = (observations_test["truth"] == truth)
observations_test.loc[mask, "time"] = np.arange(0, len(observations_test[mask])/2, 0.5)
# For every single truth in red, their observations are seperated by a quarter day
for truth in observations_test[observations_test["class"] == "red"]["truth"].unique():
mask = (observations_test["truth"] == truth)
observations_test.loc[mask, "time"] = np.arange(0, len(observations_test[mask])/4, 0.25)
# Observation times for greens are selected at random from the available ones in blues and greens
observations_test.loc[observations_test["class"] == "green", "time"] = np.random.choice(
observations_test[~observations_test["time"].isna()]["time"].values,
len(observations_test[observations_test["class"] == "green"]),
replace=True)
# Lets add a night column which is simply the floor of the observation time
observations_test["night"] = np.floor(observations_test["time"]).astype(int)
# With a maximum separation of 0.25 only reds should be findable
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.25,
min_linkage_nights=1,
column_mapping=column_mapping
)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[observations_test["truth"] == truth]["obs_id"].values)
# Make sure that only reds were found
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array(["red"]))
# With a maximum separation of 0.5 reds and blues should be findable
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.5,
min_linkage_nights=1,
column_mapping=column_mapping
)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[observations_test["truth"] == truth]["obs_id"].values)
# Make sure that only reds and blues were found
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array(["red", "blue"]))
# With a minimum linkage length of 1, everything should be findable
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=1,
max_obs_separation=0.5,
min_linkage_nights=1,
column_mapping=column_mapping
)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[observations_test["truth"] == truth]["obs_id"].values)
# Make sure that all reds, blues, and greens were found
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array(["red", "blue", "green"]))
# With a minimum linkage length of 100, nothing should be findable
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=100,
max_obs_separation=0.5,
min_linkage_nights=1,
column_mapping=column_mapping
)
assert len(findable_observations) == 0
### These next few tests focus on red05 which has the following observations:
# obs_id truth class time night
# obs00000 red05 red 0.00 0
# obs00008 red05 red 0.25 0
# obs00013 red05 red 0.50 0
# obs00024 red05 red 0.75 0
# obs00049 red05 red 1.00 1
# obs00051 red05 red 1.25 1
# obs00057 red05 red 1.50 1
# obs00070 red05 red 1.75 1
# obs00085 red05 red 2.00 2
# obs00096 red05 red 2.25 2
# Lets set min_linkage nights to 3 with a maximum separation of 0.25, only red05 should be findable
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.25,
min_linkage_nights=3,
column_mapping=column_mapping
)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[observations_test["truth"] == truth]["obs_id"].values)
# Make sure that only red05 should be findable
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array(["red"]))
np.testing.assert_array_equal(findable_observations["truth"].values, np.array(["red05"]))
# Keep min_linkage nights to 3 with a maximum separation of 0.25, set the last of red05's observations to be outside the time separation
# resulting in only two viable tracklet nights, it should no longer be findable
observations_test.loc[observations_test["obs_id"] == "obs00096", "time"] = 2.26
# obs_id truth class time night findable
# obs00000 red05 red 0.00 0 Y
# obs00008 red05 red 0.25 0 Y
# obs00013 red05 red 0.50 0 Y
# obs00024 red05 red 0.75 0 Y
# obs00049 red05 red 1.00 1 Y
# obs00051 red05 red 1.25 1 Y
# obs00057 red05 red 1.50 1 Y
# obs00070 red05 red 1.75 1 Y
# obs00085 red05 red 2.00 2 N
# obs00096 red05 red 2.26 2 N
# red05 findable : N
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.25,
min_linkage_nights=3,
column_mapping=column_mapping
)
# Red05 should no longer be findable
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array([]))
# Set the observation back to its original time
observations_test.loc[observations_test["obs_id"] == "obs00096", "time"] = 2.25
# Keep min_linkage nights to 3 with a maximum separation of 0.25, set the two of the observations on night 1 to not be
# findable, red05 should still be findable with the remaining observations but those unfindable observations should not
# be returned as findable observations
observations_test.loc[observations_test["obs_id"] == "obs00057", "time"] = 1.51
observations_test.loc[observations_test["obs_id"] == "obs00070", "time"] = 1.77
# This observation needs to be shifted so that it is more than 0.25 from the previous exposure time
# so we dont count a linkage across nights
observations_test.loc[observations_test["obs_id"] == "obs00085", "time"] = 2.10
# obs_id truth class time night findable
# obs00000 red05 red 0.00 0 Y
# obs00008 red05 red 0.25 0 Y
# obs00013 red05 red 0.50 0 Y
# obs00024 red05 red 0.75 0 Y
# obs00049 red05 red 1.00 1 Y
# obs00051 red05 red 1.25 1 Y
# obs00057 red05 red 1.51 1 N
# obs00070 red05 red 1.76 1 N
# obs00085 red05 red 2.10 2 Y
# obs00096 red05 red 2.25 2 Y
# red05 findable : Y
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.25,
min_linkage_nights=3,
column_mapping=column_mapping
)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[(observations_test["truth"] == truth) & (~observations_test["obs_id"].isin(["obs00057", "obs00070"]))]["obs_id"].values)
# Make sure that only red05 should be findable
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array(["red"]))
np.testing.assert_array_equal(findable_observations["truth"].values, np.array(["red05"]))
# Set the observations back to their previous values
observations_test.loc[observations_test["obs_id"] == "obs00057", "time"] = 1.50
observations_test.loc[observations_test["obs_id"] == "obs00070", "time"] = 1.75
observations_test.loc[observations_test["obs_id"] == "obs00085", "time"] = 2.00
# Keep min_linkage nights to 3 with a maximum separation of 0.25, remove some of red05's observations
# so that there are only two observations on each night -- it should still be the only object findable
observations_test = observations_test[~observations_test["obs_id"].isin(["obs00000", "obs00008", "obs00057", "obs00070"])]
# obs_id truth class time night findable
# obs00013 red05 red 0.50 0 Y
# obs00024 red05 red 0.75 0 Y
# obs00049 red05 red 1.00 1 Y
# obs00070 red05 red 1.75 1 Y
# obs00085 red05 red 2.00 2 Y
# obs00096 red05 red 2.25 2 Y
# red05 findable : Y
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.25,
min_linkage_nights=3,
column_mapping=column_mapping
)
for truth in findable_observations[column_mapping["truth"]].unique():
# Make sure all observations are correctly identified as findable
obs_ids = findable_observations[findable_observations[column_mapping["truth"]].isin([truth])]["obs_ids"].values[0]
np.testing.assert_array_equal(obs_ids, observations_test[observations_test["truth"] == truth]["obs_id"].values)
# Make sure that only red05 should be findable
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array(["red"]))
np.testing.assert_array_equal(findable_observations["truth"].values, np.array(["red05"]))
# Keep min_linkage nights to 3 with a maximum separation of 0.25, set one of red05's observations to be outside the time
# separation for a linkage -- it now should not be findable
observations_test.loc[observations_test["obs_id"] == "obs00096", "time"] = 2.26
# obs_id truth class time night findable
# obs00013 red05 red 0.50 0 Y
# obs00024 red05 red 0.75 0 Y
# obs00049 red05 red 1.00 1 Y
# obs00070 red05 red 1.75 1 Y
# obs00085 red05 red 2.00 2 N
# obs00096 red05 red 2.26 2 N
# red05 findable : N
findable_observations = calcFindableNightlyLinkages(
observations_test,
linkage_min_obs=2,
max_obs_separation=0.25,
min_linkage_nights=3,
column_mapping=column_mapping
)
# Red05 should no longer be findable
classes_found = observations_test[observations_test["truth"].isin(findable_observations[column_mapping["truth"]].values)]["class"].unique()
np.testing.assert_array_equal(classes_found, np.array([]))
return
| 49.04
| 185
| 0.688146
| 1,921
| 14,712
| 5.078605
| 0.100469
| 0.134481
| 0.046125
| 0.077798
| 0.818573
| 0.797765
| 0.758303
| 0.758303
| 0.74836
| 0.73811
| 0
| 0.06111
| 0.214723
| 14,712
| 300
| 186
| 49.04
| 0.783346
| 0.321438
| 0
| 0.679245
| 0
| 0
| 0.077406
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 1
| 0.012579
| false
| 0
| 0.044025
| 0
| 0.069182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
11027a164698ede72d76475d8fdffe56acba203c
| 182
|
py
|
Python
|
quran/usecase/image/find_image.py
|
octabytes/quran
|
974d351cf5e6a12a28a5ac9f29c8d2753ae6dd86
|
[
"Apache-2.0"
] | null | null | null |
quran/usecase/image/find_image.py
|
octabytes/quran
|
974d351cf5e6a12a28a5ac9f29c8d2753ae6dd86
|
[
"Apache-2.0"
] | null | null | null |
quran/usecase/image/find_image.py
|
octabytes/quran
|
974d351cf5e6a12a28a5ac9f29c8d2753ae6dd86
|
[
"Apache-2.0"
] | null | null | null |
class FindImage:
def __init__(self, image_repo):
self.image_repo = image_repo
def by_ayah_id(self, ayah_id):
return self.image_repo.find_by_ayah_id(ayah_id)
| 26
| 55
| 0.708791
| 29
| 182
| 3.931034
| 0.413793
| 0.315789
| 0.342105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208791
| 182
| 7
| 55
| 26
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
1141e514f4e00f71b3246547bc4756e21bd24313
| 28
|
py
|
Python
|
_lib/godot/bindings/tools.py
|
WilliamTambellini/godopy
|
7b4142ddf7acafa66e1b2b201afa5fa37a4c7f4e
|
[
"MIT"
] | 30
|
2020-02-09T22:30:06.000Z
|
2022-01-26T04:23:09.000Z
|
_lib/godot/bindings/tools.py
|
WilliamTambellini/godopy
|
7b4142ddf7acafa66e1b2b201afa5fa37a4c7f4e
|
[
"MIT"
] | 1
|
2020-10-12T04:12:52.000Z
|
2020-12-19T07:07:51.000Z
|
_lib/godot/bindings/tools.py
|
WilliamTambellini/godopy
|
7b4142ddf7acafa66e1b2b201afa5fa37a4c7f4e
|
[
"MIT"
] | 5
|
2020-02-10T02:49:13.000Z
|
2021-01-25T18:18:16.000Z
|
from .python.tools import *
| 14
| 27
| 0.75
| 4
| 28
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
114d5755227d7f7b11bfb088254c63c839d57ab7
| 123
|
py
|
Python
|
Section10_Facade/Practice/Generator.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | 1
|
2020-10-20T07:41:51.000Z
|
2020-10-20T07:41:51.000Z
|
Section10_Facade/Practice/Generator.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
Section10_Facade/Practice/Generator.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
from random import randint
class Generator:
def generate(self, count):
return [randint(1,9) for x in range(count)]
| 17.571429
| 47
| 0.715447
| 19
| 123
| 4.631579
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.186992
| 123
| 6
| 48
| 20.5
| 0.86
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3a2c4c2999dcc761298d4f451a078adf13eba7a0
| 149
|
py
|
Python
|
pygem/tests/test_basics.py
|
lilianschuster/PyGEM
|
c805d09960f937fe6e35cdd1587f9089d4bec6b8
|
[
"MIT"
] | 25
|
2019-06-12T21:08:24.000Z
|
2022-03-01T08:05:14.000Z
|
pygem/tests/test_basics.py
|
lilianschuster/PyGEM
|
c805d09960f937fe6e35cdd1587f9089d4bec6b8
|
[
"MIT"
] | 2
|
2020-04-23T14:08:00.000Z
|
2020-06-04T13:52:44.000Z
|
pygem/tests/test_basics.py
|
lilianschuster/PyGEM
|
c805d09960f937fe6e35cdd1587f9089d4bec6b8
|
[
"MIT"
] | 24
|
2019-06-12T19:48:40.000Z
|
2022-02-16T03:42:53.000Z
|
import pygem
def test_version_string():
# simple test to check that the verion number is available
assert type(pygem.__version__ ) == str
| 18.625
| 62
| 0.731544
| 21
| 149
| 4.904762
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208054
| 149
| 7
| 63
| 21.285714
| 0.872881
| 0.375839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e90fdc92d0651cb3d0b6aeb5b5752a662b2453ba
| 13,225
|
py
|
Python
|
model_dropout.py
|
soufiomario/keras-cnn
|
6ba72bf89dfc746d8a30634219160843b16c5fb8
|
[
"CC0-1.0"
] | 5
|
2020-03-18T09:33:17.000Z
|
2022-03-20T15:19:18.000Z
|
model_dropout.py
|
soufiomario/keras-cnn
|
6ba72bf89dfc746d8a30634219160843b16c5fb8
|
[
"CC0-1.0"
] | 1
|
2021-09-07T14:47:07.000Z
|
2021-09-07T14:47:44.000Z
|
model_dropout.py
|
soufiomario/keras-cnn
|
6ba72bf89dfc746d8a30634219160843b16c5fb8
|
[
"CC0-1.0"
] | 11
|
2020-02-14T04:15:51.000Z
|
2021-12-11T12:31:38.000Z
|
import keras
from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
from keras.constraints import max_norm
# Model configuration
img_width, img_height = 32, 32
batch_size = 250
no_epochs = 55
no_classes = 10
validation_split = 0.2
verbosity = 1
max_norm_value = 2.0
# Load CIFAR10 dataset
(input_train, target_train), (input_test, target_test) = cifar10.load_data()
# Reshape data based on channels first / channels last strategy.
# This is dependent on whether you use TF, Theano or CNTK as backend.
# Source: https://github.com/keras-team/keras/blob/master/examples/mnist_cnn.py
if K.image_data_format() == 'channels_first':
input_train = input_train.reshape(input_train.shape[0],3, img_width, img_height)
input_test = input_test.reshape(input_test.shape[0], 3, img_width, img_height)
input_shape = (3, img_width, img_height)
else:
input_train = input_train.reshape(input_train.shape[0], img_width, img_height, 3)
input_test = input_test.reshape(input_test.shape[0], img_width, img_height, 3)
input_shape = (img_width , img_height, 3)
# Parse numbers as floats
input_train = input_train.astype('float32')
input_test = input_test.astype('float32')
# Normalize data
input_train = input_train / 255
input_test = input_test / 255
# Convert target vectors to categorical targets
target_train = keras.utils.to_categorical(target_train, no_classes)
target_test = keras.utils.to_categorical(target_test, no_classes)
# Create the model
model = Sequential()
model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.50))
model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.50))
model.add(Flatten())
model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
model.add(Dense(no_classes, activation='softmax'))
# Compile the model
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adam(),
metrics=['accuracy'])
# Fit data to model
model.fit(input_train, target_train,
batch_size=batch_size,
epochs=no_epochs,
verbose=verbosity,
validation_split=validation_split
)
# Generate generalization metrics
score = model.evaluate(input_test, target_test, verbose=0)
print(f'Test loss: {score[0]} / Test accuracy: {score[1]}')
# ============
# Test loss: 0.8185625041961669 / Test accuracy: 0.7193999886512756
# ============
# model = Sequential()
# model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ============
# Test loss: 1.3634590747833253 / Test accuracy: 0.5906000137329102
# ============
# model = Sequential()
# model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(Dropout(0.50))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', kernel_initializer='he_uniform'))
# model.add(Dropout(0.50))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_initializer='he_uniform'))
# model.add(Dropout(0.50))
# model.add(Dense(no_classes, activation='softmax'))
# ============
# Test loss: 0.8021318348884583 / Test accuracy: 0.7243000268936157
# ============
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ============
# Test loss: 1.0652880083084106 / Test accuracy: 0.623199999332428
# ============
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ============
# Test loss: 0.7692169213294983 / Test accuracy: 0.7314000129699707
# ============
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(2.0), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(2.0), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(2.0), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# Test loss: 0.8035776728630066 / Test accuracy: 0.7233999967575073
# maxnorm=1.0
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# Test loss: 0.8003060577392578 / Test accuracy: 0.7250000238418579
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# Test loss: 0.7669966766357422 / Test accuracy: 0.7365999817848206
# maxnorm = 2.5
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
#Test loss: 2.3026647621154783 / Test accuracy: 0.10000000149011612
# maxnorm = 2.5
# lr 10e-2 && decay linear
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
#Test loss: 2.302865937805176 / Test accuracy: 0.10000000149011612
#maxnorm = 2.5
# lre0 && decay linear
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ==================
# Test loss: Test loss: 0.9980722076416015 / Test accuracy: 0.6534000039100647
# SGD, momentum 0.99, Nesterov false, LR 10e-2, LR decay linear
# maxnorm=2.5
# ==================
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ==================
# Test loss: Test loss: 0.965770835018158 / Test accuracy: 0.6678000092506409
# SGD, momentum 0.99, Nesterov true, LR 10e-2, LR decay linear
# maxnorm=2.5
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ==================
# Test loss: Test loss: 1.0010871562957764 / Test accuracy: 0.6502000093460083
# SGD, momentum 0.99, Nesterov true, default LR settings
# MAXNORM=2.5
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
# ==================
# Test loss: Test loss: 0.9282757438659668 / Test accuracy: 0.6773999929428101
# SGD, momentum 0.99, Nesterov true, default LR settings
# MAXNORM=2.0
# model = Sequential()
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', input_shape=input_shape, kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Conv2D(64, kernel_size=(3, 3), kernel_constraint=max_norm(max_norm_value), activation='relu', kernel_initializer='he_uniform'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Dropout(0.50))
# model.add(Flatten())
# model.add(Dense(256, activation='relu', kernel_constraint=max_norm(max_norm_value), kernel_initializer='he_uniform'))
# model.add(Dense(no_classes, activation='softmax'))
| 49.163569
| 164
| 0.733837
| 1,891
| 13,225
| 4.927552
| 0.088842
| 0.115905
| 0.089719
| 0.122773
| 0.807684
| 0.795235
| 0.79223
| 0.783859
| 0.775059
| 0.757244
| 0
| 0.078243
| 0.092552
| 13,225
| 269
| 165
| 49.163569
| 0.698192
| 0.785633
| 0
| 0.08
| 0
| 0
| 0.050758
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.14
| 0
| 0.14
| 0.02
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e927b0dfc553dadb3b4e6922e4c7c716d582e644
| 1,417
|
py
|
Python
|
tests/test_a0095uniquebinarysearchtreesii.py
|
nirofang/pyleet
|
600d58ad97028c9a14148af4ef468683a011a515
|
[
"MIT"
] | 3
|
2019-11-06T13:10:38.000Z
|
2021-11-17T07:29:54.000Z
|
tests/test_a0095uniquebinarysearchtreesii.py
|
nirofang/pyleet
|
600d58ad97028c9a14148af4ef468683a011a515
|
[
"MIT"
] | 1
|
2020-12-17T22:18:05.000Z
|
2020-12-17T22:18:05.000Z
|
tests/test_a0095uniquebinarysearchtreesii.py
|
nirofang/pyleet
|
600d58ad97028c9a14148af4ef468683a011a515
|
[
"MIT"
] | 1
|
2019-11-06T13:10:45.000Z
|
2019-11-06T13:10:45.000Z
|
from solutions.a0095uniquebinarysearchtreesii import Solution
from utils.tree.TreeNode import TreeNode
import json
solution = Solution()
# def test_generateTrees1():
# n = 3
# expect = sorted([TreeNode.integerListToString(nums) for nums in [
# [1, None, 3, 2],
# [3, 2, None, 1],
# [3, 1, None, None, 2],
# [2, 1, 3],
# [1, None, 2, None, 3]
# ]])
# actual = solution.generateTrees(n)
# assert len(actual) == len(expect)
# actual = sorted([TreeNode.integerListToString(nums) for nums in [TreeNode.treeToList(node) for node in actual]])
# assert actual == expect
def test_generateTrees2():
n = 0
expect = []
actual = solution.generateTrees(n)
assert len(actual) == len(expect)
assert actual == expect
def test_generateTrees3():
n = 1
expect = sorted([TreeNode.integerListToString(nums) for nums in [[1]]])
actual = solution.generateTrees(n)
assert len(actual) == len(expect)
actual = sorted([TreeNode.integerListToString(nums) for nums in [TreeNode.treeToList(node) for node in actual]])
assert actual == expect
def test_generateTrees4():
n = 2
expect = sorted([TreeNode.integerListToString(nums) for nums in [[2, 1], [1, None, 2]]])
actual = solution.generateTrees(n)
assert len(actual) == len(expect)
actual = sorted([TreeNode.integerListToString(nums) for nums in [TreeNode.treeToList(node) for node in actual]])
assert actual == expect
| 29.520833
| 116
| 0.685251
| 180
| 1,417
| 5.372222
| 0.183333
| 0.086867
| 0.204757
| 0.229576
| 0.750776
| 0.724922
| 0.724922
| 0.724922
| 0.671148
| 0.507756
| 0
| 0.027444
| 0.177135
| 1,417
| 47
| 117
| 30.148936
| 0.801887
| 0.304869
| 0
| 0.458333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e94363fca85ee7931d530cd96b67366486d585e0
| 216
|
py
|
Python
|
src/common/new_user_creation.py
|
gruyaume/my-blockchain
|
283f5ef0c8c09eff0478dfead3950c720cda2882
|
[
"Apache-2.0"
] | 4
|
2021-11-14T17:16:03.000Z
|
2022-03-17T21:01:42.000Z
|
src/common/new_user_creation.py
|
gruyaume/my-blockchain
|
283f5ef0c8c09eff0478dfead3950c720cda2882
|
[
"Apache-2.0"
] | null | null | null |
src/common/new_user_creation.py
|
gruyaume/my-blockchain
|
283f5ef0c8c09eff0478dfead3950c720cda2882
|
[
"Apache-2.0"
] | 5
|
2021-07-30T14:27:37.000Z
|
2021-12-15T12:08:46.000Z
|
from common.owner import Owner
owner = Owner()
print(f"private key: {owner.private_key.export_key(format='DER')}")
print(f"public key hash: {owner.public_key_hash}")
print(f"public key hex: {owner.public_key_hex}")
| 30.857143
| 67
| 0.75463
| 36
| 216
| 4.361111
| 0.388889
| 0.229299
| 0.152866
| 0.191083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087963
| 216
| 6
| 68
| 36
| 0.796954
| 0
| 0
| 0
| 0
| 0
| 0.625
| 0.412037
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.6
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
3a6858d2ab63ec58a60118a8aa3ec8637f6fb6b9
| 102
|
py
|
Python
|
core/crawler/crawl.py
|
abdallah-elsharif/WRock
|
7cfd4bf29e932bf0048ee357c16cf6c021e7fb81
|
[
"MIT"
] | 14
|
2022-03-13T19:51:24.000Z
|
2022-03-18T07:36:39.000Z
|
core/crawler/crawl.py
|
abdallah-elsharif/WRock
|
7cfd4bf29e932bf0048ee357c16cf6c021e7fb81
|
[
"MIT"
] | null | null | null |
core/crawler/crawl.py
|
abdallah-elsharif/WRock
|
7cfd4bf29e932bf0048ee357c16cf6c021e7fb81
|
[
"MIT"
] | 3
|
2022-03-14T05:58:06.000Z
|
2022-03-14T11:46:47.000Z
|
from core.crawler.crawler import WebCrawler
def crawl(config):
return WebCrawler(config).Start()
| 20.4
| 43
| 0.77451
| 13
| 102
| 6.076923
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 102
| 5
| 44
| 20.4
| 0.88764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
3a92f8edda04a6262981d6d6424b66f06b3a8120
| 157
|
py
|
Python
|
src/frontend/controllers/kitchen.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | 1
|
2018-09-11T19:32:25.000Z
|
2018-09-11T19:32:25.000Z
|
src/frontend/controllers/kitchen.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | null | null | null |
src/frontend/controllers/kitchen.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | null | null | null |
from flask import render_template
from .. import app
@app.route('/kitchen/', methods=['GET'])
def render_kitchen_view():
return render_template('')
| 14.272727
| 40
| 0.707006
| 20
| 157
| 5.35
| 0.65
| 0.261682
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146497
| 157
| 10
| 41
| 15.7
| 0.798507
| 0
| 0
| 0
| 0
| 0
| 0.077419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
3ac71ae0feb98dfe47ceb8f67c4588c0520d9ece
| 230
|
py
|
Python
|
wazimap_ng/points/admin/__init__.py
|
arghyaiitb/wazimap-ng
|
2a77860526d865b8fd0c22a2204f121fdb3b28a0
|
[
"Apache-2.0"
] | 11
|
2019-12-31T20:27:22.000Z
|
2022-03-10T03:55:38.000Z
|
wazimap_ng/points/admin/__init__.py
|
arghyaiitb/wazimap-ng
|
2a77860526d865b8fd0c22a2204f121fdb3b28a0
|
[
"Apache-2.0"
] | 164
|
2020-02-06T15:02:22.000Z
|
2022-03-30T22:42:00.000Z
|
wazimap_ng/points/admin/__init__.py
|
arghyaiitb/wazimap-ng
|
2a77860526d865b8fd0c22a2204f121fdb3b28a0
|
[
"Apache-2.0"
] | 16
|
2020-01-03T20:30:24.000Z
|
2022-01-11T11:05:15.000Z
|
from .theme_admin import ThemeAdmin
from .location_admin import LocationAdmin
from .profilecategory_admin import ProfileCategoryAdmin
from .category_admin import CategoryAdmin
from .coordinate_file_admin import CoordinateFileAdmin
| 46
| 55
| 0.895652
| 26
| 230
| 7.692308
| 0.538462
| 0.275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082609
| 230
| 5
| 56
| 46
| 0.947867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3af33daab91cbdd24da8da2beb3ea99484b254b8
| 23
|
py
|
Python
|
PyColored/__init__.py
|
MrHedryX/PyColours
|
b1363f7354ad938343cf8953ebffe0479aa7a4f6
|
[
"MIT"
] | null | null | null |
PyColored/__init__.py
|
MrHedryX/PyColours
|
b1363f7354ad938343cf8953ebffe0479aa7a4f6
|
[
"MIT"
] | null | null | null |
PyColored/__init__.py
|
MrHedryX/PyColours
|
b1363f7354ad938343cf8953ebffe0479aa7a4f6
|
[
"MIT"
] | null | null | null |
from .colours import *
| 11.5
| 22
| 0.73913
| 3
| 23
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aaf1c6fe8814afe42f0c78715a2b16a28f4063c4
| 169
|
py
|
Python
|
ggpy/cruft/autocode/Event.py
|
hobson/ggpy
|
4e6e6e876c3a4294cd711647051da2d9c1836b60
|
[
"MIT"
] | 1
|
2015-01-26T19:07:45.000Z
|
2015-01-26T19:07:45.000Z
|
ggpy/cruft/autocode/Event.py
|
hobson/ggpy
|
4e6e6e876c3a4294cd711647051da2d9c1836b60
|
[
"MIT"
] | null | null | null |
ggpy/cruft/autocode/Event.py
|
hobson/ggpy
|
4e6e6e876c3a4294cd711647051da2d9c1836b60
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
""" generated source for module Event """
# package: org.ggp.base.util.observer
class Event(object):
""" generated source for class Event """
| 24.142857
| 44
| 0.692308
| 23
| 169
| 5.086957
| 0.73913
| 0.25641
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 169
| 6
| 45
| 28.166667
| 0.818182
| 0.739645
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c92db475a620b89f1b7440b904315aef1e24579d
| 93
|
py
|
Python
|
data/__init__.py
|
ssydasheng/Neural-Kernel-Network
|
2b1540f20445e05705769cfd5808d2810eac8a4f
|
[
"MIT"
] | 67
|
2018-07-03T14:01:08.000Z
|
2021-11-08T10:40:55.000Z
|
data/__init__.py
|
ssydasheng/Neural-Kernel-Network
|
2b1540f20445e05705769cfd5808d2810eac8a4f
|
[
"MIT"
] | 1
|
2019-10-13T12:33:42.000Z
|
2019-10-15T07:14:51.000Z
|
data/__init__.py
|
ssydasheng/Neural-Kernel-Network
|
2b1540f20445e05705769cfd5808d2810eac8a4f
|
[
"MIT"
] | 8
|
2018-07-04T18:57:40.000Z
|
2020-07-31T11:14:11.000Z
|
from .register import *
from .hparams import *
from .data import *
from .timeSeries import *
| 18.6
| 25
| 0.741935
| 12
| 93
| 5.75
| 0.5
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172043
| 93
| 4
| 26
| 23.25
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a3415feadfd677e09cc64c4cb3f96e2fa0a9a1ea
| 160
|
py
|
Python
|
fuzzyfloat/types.py
|
keystonetowersystems/fuzzyfloat
|
551f324180d1b107149e3d3d3b8076f6397cdfb3
|
[
"MIT"
] | null | null | null |
fuzzyfloat/types.py
|
keystonetowersystems/fuzzyfloat
|
551f324180d1b107149e3d3d3b8076f6397cdfb3
|
[
"MIT"
] | null | null | null |
fuzzyfloat/types.py
|
keystonetowersystems/fuzzyfloat
|
551f324180d1b107149e3d3d3b8076f6397cdfb3
|
[
"MIT"
] | null | null | null |
from .meta import FuzzyFloatMeta
class rel_fp(metaclass=FuzzyFloatMeta):
pass
class abs_fp(metaclass=FuzzyFloatMeta, rel_tol=0.0, atol=1e-07):
pass
| 16
| 64
| 0.75625
| 23
| 160
| 5.130435
| 0.652174
| 0.186441
| 0.423729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036765
| 0.15
| 160
| 9
| 65
| 17.777778
| 0.830882
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
6e6f48fa31ca0cb89aadc6b7b4522a4dc7f65a98
| 251
|
py
|
Python
|
sdk/lusid_configuration/api/__init__.py
|
finbourne/lusid-configuration-sdk-python-preview
|
f9ed2bc71042ce1b6f85baf62c9f6603150000dc
|
[
"MIT"
] | null | null | null |
sdk/lusid_configuration/api/__init__.py
|
finbourne/lusid-configuration-sdk-python-preview
|
f9ed2bc71042ce1b6f85baf62c9f6603150000dc
|
[
"MIT"
] | null | null | null |
sdk/lusid_configuration/api/__init__.py
|
finbourne/lusid-configuration-sdk-python-preview
|
f9ed2bc71042ce1b6f85baf62c9f6603150000dc
|
[
"MIT"
] | 1
|
2021-12-09T18:53:23.000Z
|
2021-12-09T18:53:23.000Z
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from lusid_configuration.api.application_metadata_api import ApplicationMetadataApi
from lusid_configuration.api.configuration_sets_api import ConfigurationSetsApi
| 31.375
| 83
| 0.880478
| 30
| 251
| 7
| 0.566667
| 0.085714
| 0.209524
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004386
| 0.091633
| 251
| 7
| 84
| 35.857143
| 0.916667
| 0.163347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6eb6377c66738d2e3e66614c58f3bc58e7ec6530
| 1,353
|
py
|
Python
|
app/core/forms.py
|
mihalw28/book_manager
|
553397a4c25e5f90a02bd794722f77b423c346e5
|
[
"MIT"
] | 1
|
2020-04-22T18:05:14.000Z
|
2020-04-22T18:05:14.000Z
|
app/core/forms.py
|
mihalw28/book_manager
|
553397a4c25e5f90a02bd794722f77b423c346e5
|
[
"MIT"
] | null | null | null |
app/core/forms.py
|
mihalw28/book_manager
|
553397a4c25e5f90a02bd794722f77b423c346e5
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import IntegerField, StringField, SubmitField, TextAreaField
from wtforms.validators import DataRequired, InputRequired, ValidationError
class AddBookForm(FlaskForm):
title = StringField("Title", validators=[DataRequired()])
author = StringField("Author", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
pages_no = IntegerField("Number of pages", validators=[InputRequired()])
submit = SubmitField("Add to library")
def validate_pages_no(self, pages_no):
"""Method validates if number of pages is a positive one."""
if pages_no.data < 1:
raise ValidationError("A book cannot have less than 1 page.")
class UpdateBookForm(FlaskForm):
title = StringField("Title", validators=[DataRequired()])
author = StringField("Author", validators=[DataRequired()])
description = TextAreaField("Description", validators=[DataRequired()])
pages_no = IntegerField("Number of pages", validators=[InputRequired()])
submit = SubmitField("Update book")
def validate_pages_no(self, pages_no):
"""Method validates if number of pages is a positive one."""
if pages_no.data < 1:
raise ValidationError("A book cannot have less than 1 page.")
| 45.1
| 77
| 0.700665
| 144
| 1,353
| 6.506944
| 0.333333
| 0.059765
| 0.055496
| 0.064034
| 0.770544
| 0.770544
| 0.770544
| 0.770544
| 0.770544
| 0.770544
| 0
| 0.00365
| 0.189948
| 1,353
| 29
| 78
| 46.655172
| 0.851277
| 0.080562
| 0
| 0.666667
| 0
| 0
| 0.142027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.142857
| 0
| 0.809524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
42fe7e20aa1719e1564e1e8a7b24964f711a720d
| 48,427
|
py
|
Python
|
heat_cfntools/tests/test_cfn_helper.py
|
citrix-openstack-build/heat-cfntools
|
8ef88a6e864fa49cdc6dda9b1c701a29e0557253
|
[
"Apache-2.0"
] | null | null | null |
heat_cfntools/tests/test_cfn_helper.py
|
citrix-openstack-build/heat-cfntools
|
8ef88a6e864fa49cdc6dda9b1c701a29e0557253
|
[
"Apache-2.0"
] | null | null | null |
heat_cfntools/tests/test_cfn_helper.py
|
citrix-openstack-build/heat-cfntools
|
8ef88a6e864fa49cdc6dda9b1c701a29e0557253
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import boto.cloudformation as cfn
import fixtures
import json
from mox3 import mox
import os
import subprocess
import tempfile
import testtools
import testtools.matchers as ttm
from heat_cfntools.cfntools import cfn_helper
class FakePOpen():
def __init__(self, stdout='', stderr='', returncode=0):
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
def communicate(self):
return (self.stdout, self.stderr)
def wait(self):
pass
class MockPopenTestCase(testtools.TestCase):
def mock_cmd_run(self, command, cwd=None, env=None):
return subprocess.Popen(
command, cwd=cwd, env=env, stderr=-1, stdout=-1)
def mock_unorder_cmd_run(self, command, cwd=None, env=None):
return subprocess.Popen(
command, cwd=cwd, env=env, stderr=-1, stdout=-1).InAnyOrder()
def setUp(self):
super(MockPopenTestCase, self).setUp()
self.m = mox.Mox()
self.m.StubOutWithMock(subprocess, 'Popen')
self.addCleanup(self.m.UnsetStubs)
class TestCommandRunner(MockPopenTestCase):
def test_command_runner(self):
self.mock_cmd_run(['su', 'root', '-c', '/bin/command1']).AndReturn(
FakePOpen('All good'))
self.mock_cmd_run(['su', 'root', '-c', '/bin/command2']).AndReturn(
FakePOpen('Doing something', 'error', -1))
self.m.ReplayAll()
cmd2 = cfn_helper.CommandRunner('/bin/command2')
cmd1 = cfn_helper.CommandRunner('/bin/command1', cmd2)
cmd1.run('root')
self.assertEqual(
'CommandRunner:\n\tcommand: /bin/command1\n\tstdout: All good',
str(cmd1))
self.assertEqual(
'CommandRunner:\n\tcommand: /bin/command2\n\tstatus: -1\n'
'\tstdout: Doing something\n\tstderr: error',
str(cmd2))
self.m.VerifyAll()
class TestPackages(MockPopenTestCase):
def test_yum_install(self):
install_list = []
for pack in ('httpd', 'wordpress', 'mysql-server'):
self.mock_unorder_cmd_run(
['su', 'root', '-c', 'rpm -q %s' % pack]) \
.AndReturn(FakePOpen(returncode=1))
self.mock_unorder_cmd_run(
['su', 'root', '-c',
'yum -y --showduplicates list available %s' % pack]) \
.AndReturn(FakePOpen(returncode=0))
install_list.append(pack)
# This mock call corresponding to 'su root -c yum -y install .*'
# But there is no way to ignore the order of the parameters, so only
# check the return value.
self.mock_cmd_run(mox.IgnoreArg()).AndReturn(FakePOpen(
returncode=0))
self.m.ReplayAll()
packages = {
"yum": {
"mysql-server": [],
"httpd": [],
"wordpress": []
}
}
cfn_helper.PackagesHandler(packages).apply_packages()
self.m.VerifyAll()
def test_zypper_install(self):
install_list = []
for pack in ('httpd', 'wordpress', 'mysql-server'):
self.mock_unorder_cmd_run(
['su', 'root', '-c', 'rpm -q %s' % pack]) \
.AndReturn(FakePOpen(returncode=1))
self.mock_unorder_cmd_run(
['su', 'root', '-c',
'zypper -n --no-refresh search %s' % pack]) \
.AndReturn(FakePOpen(returncode=0))
install_list.append(pack)
# This mock call corresponding to 'su root -c zypper -n install .*'
# But there is no way to ignore the order of the parameters, so only
# check the return value.
self.mock_cmd_run(mox.IgnoreArg()).AndReturn(FakePOpen(
returncode=0))
self.m.ReplayAll()
packages = {
"zypper": {
"mysql-server": [],
"httpd": [],
"wordpress": []
}
}
cfn_helper.PackagesHandler(packages).apply_packages()
self.m.VerifyAll()
def test_apt_install(self):
# This mock call corresponding to
# 'DEBIAN_FRONTEND=noninteractive su root -c apt-get -y install .*'
# But there is no way to ignore the order of the parameters, so only
# check the return value.
self.mock_cmd_run(mox.IgnoreArg()).AndReturn(FakePOpen(
returncode=0))
self.m.ReplayAll()
packages = {
"apt": {
"mysql-server": [],
"httpd": [],
"wordpress": []
}
}
cfn_helper.PackagesHandler(packages).apply_packages()
self.m.VerifyAll()
class TestServicesHandler(MockPopenTestCase):
def test_services_handler_systemd(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(True)
# apply_services
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl enable httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen(returncode=-1))
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl start httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl enable mysqld.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status mysqld.service']
).AndReturn(FakePOpen(returncode=-1))
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl start mysqld.service']
).AndReturn(FakePOpen())
# monitor_services not running
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen(returncode=-1))
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl start httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/services_restarted']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status mysqld.service']
).AndReturn(FakePOpen(returncode=-1))
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl start mysqld.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/services_restarted']
).AndReturn(FakePOpen())
# monitor_services running
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status mysqld.service']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"systemd": {
"mysqld": {"enabled": "true", "ensureRunning": "true"},
"httpd": {"enabled": "true", "ensureRunning": "true"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
# services not running
sh.monitor_services()
# services running
sh.monitor_services()
self.m.VerifyAll()
def test_services_handler_systemd_disabled(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(True)
# apply_services
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl disable httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl stop httpd.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl disable mysqld.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl status mysqld.service']
).AndReturn(FakePOpen())
self.mock_unorder_cmd_run(
['su', 'root', '-c', '/bin/systemctl stop mysqld.service']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"systemd": {
"mysqld": {"enabled": "false", "ensureRunning": "false"},
"httpd": {"enabled": "false", "ensureRunning": "false"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
self.m.VerifyAll()
def test_services_handler_sysv_service_chkconfig(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(False)
os.path.exists('/sbin/service').MultipleTimes().AndReturn(True)
os.path.exists('/sbin/chkconfig').MultipleTimes().AndReturn(True)
# apply_services
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/chkconfig httpd on']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd status']
).AndReturn(FakePOpen(returncode=-1))
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd start']
).AndReturn(FakePOpen())
# monitor_services not running
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd status']
).AndReturn(FakePOpen(returncode=-1))
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd start']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/bin/services_restarted']
).AndReturn(FakePOpen())
# monitor_services running
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd status']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"sysvinit": {
"httpd": {"enabled": "true", "ensureRunning": "true"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
# services not running
sh.monitor_services()
# services running
sh.monitor_services()
self.m.VerifyAll()
def test_services_handler_sysv_disabled_service_chkconfig(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(False)
os.path.exists('/sbin/service').MultipleTimes().AndReturn(True)
os.path.exists('/sbin/chkconfig').MultipleTimes().AndReturn(True)
# apply_services
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/chkconfig httpd off']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd status']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/sbin/service httpd stop']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"sysvinit": {
"httpd": {"enabled": "false", "ensureRunning": "false"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
self.m.VerifyAll()
def test_services_handler_sysv_systemctl(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(True)
# apply_services
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl enable httpd.service']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen(returncode=-1))
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl start httpd.service']
).AndReturn(FakePOpen())
# monitor_services not running
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen(returncode=-1))
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl start httpd.service']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/bin/services_restarted']
).AndReturn(FakePOpen())
# monitor_services running
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"sysvinit": {
"httpd": {"enabled": "true", "ensureRunning": "true"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
# services not running
sh.monitor_services()
# services running
sh.monitor_services()
self.m.VerifyAll()
def test_services_handler_sysv_disabled_systemctl(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(True)
# apply_services
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl disable httpd.service']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl status httpd.service']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/bin/systemctl stop httpd.service']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"sysvinit": {
"httpd": {"enabled": "false", "ensureRunning": "false"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
self.m.VerifyAll()
def test_services_handler_sysv_service_updaterc(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(False)
os.path.exists('/sbin/service').MultipleTimes().AndReturn(False)
os.path.exists('/sbin/chkconfig').MultipleTimes().AndReturn(False)
# apply_services
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/update-rc.d httpd enable']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd status']
).AndReturn(FakePOpen(returncode=-1))
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd start']
).AndReturn(FakePOpen())
# monitor_services not running
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd status']
).AndReturn(FakePOpen(returncode=-1))
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd start']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/bin/services_restarted']
).AndReturn(FakePOpen())
# monitor_services running
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd status']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"sysvinit": {
"httpd": {"enabled": "true", "ensureRunning": "true"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
# services not running
sh.monitor_services()
# services running
sh.monitor_services()
self.m.VerifyAll()
def test_services_handler_sysv_disabled_service_updaterc(self):
self.m.StubOutWithMock(os.path, 'exists')
os.path.exists('/bin/systemctl').MultipleTimes().AndReturn(False)
os.path.exists('/sbin/service').MultipleTimes().AndReturn(False)
os.path.exists('/sbin/chkconfig').MultipleTimes().AndReturn(False)
# apply_services
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/update-rc.d httpd disable']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd status']
).AndReturn(FakePOpen())
self.mock_cmd_run(
['su', 'root', '-c', '/usr/sbin/service httpd stop']
).AndReturn(FakePOpen())
self.m.ReplayAll()
services = {
"sysvinit": {
"httpd": {"enabled": "false", "ensureRunning": "false"}
}
}
hooks = [
cfn_helper.Hook(
'hook1',
'service.restarted',
'Resources.resource1.Metadata',
'root',
'/bin/services_restarted')
]
sh = cfn_helper.ServicesHandler(services, 'resource1', hooks)
sh.apply_services()
self.m.VerifyAll()
class TestHupConfig(MockPopenTestCase):
def test_load_main_section(self):
fcreds = tempfile.NamedTemporaryFile()
fcreds.write('AWSAccessKeyId=foo\nAWSSecretKey=bar\n')
fcreds.flush()
main_conf = tempfile.NamedTemporaryFile()
main_conf.write('''[main]
stack=teststack
credential-file=%s''' % fcreds.name)
main_conf.flush()
mainconfig = cfn_helper.HupConfig([open(main_conf.name)])
self.assertEqual(
'{stack: teststack, credential_file: %s, '
'region: nova, interval:10}' % fcreds.name,
str(mainconfig))
main_conf.close()
main_conf = tempfile.NamedTemporaryFile()
main_conf.write('''[main]
stack=teststack
region=region1
credential-file=%s-invalid
interval=120''' % fcreds.name)
main_conf.flush()
e = self.assertRaises(Exception, cfn_helper.HupConfig,
[open(main_conf.name)])
self.assertIn('invalid credentials file', str(e))
fcreds.close()
def test_hup_config(self):
self.mock_cmd_run(
['su', 'root', '-c', '/bin/cfn-http-restarted']).AndReturn(
FakePOpen('All good'))
self.mock_cmd_run(['su', 'root', '-c', '/bin/hook1']).AndReturn(
FakePOpen('All good'))
self.mock_cmd_run(['su', 'root', '-c', '/bin/hook2']).AndReturn(
FakePOpen('All good'))
self.mock_cmd_run(['su', 'root', '-c', '/bin/hook3']).AndReturn(
FakePOpen('All good'))
self.m.ReplayAll()
hooks_conf = tempfile.NamedTemporaryFile()
def write_hook_conf(f, name, triggers, path, action):
f.write(
'[%s]\ntriggers=%s\npath=%s\naction=%s\nrunas=root\n\n' % (
name, triggers, path, action))
write_hook_conf(
hooks_conf,
'hook2',
'service2.restarted',
'Resources.resource2.Metadata',
'/bin/hook2')
write_hook_conf(
hooks_conf,
'hook1',
'service1.restarted',
'Resources.resource1.Metadata',
'/bin/hook1')
write_hook_conf(
hooks_conf,
'hook3',
'service3.restarted',
'Resources.resource3.Metadata',
'/bin/hook3')
write_hook_conf(
hooks_conf,
'cfn-http-restarted',
'service.restarted',
'Resources.resource.Metadata',
'/bin/cfn-http-restarted')
hooks_conf.flush()
fcreds = tempfile.NamedTemporaryFile()
fcreds.write('AWSAccessKeyId=foo\nAWSSecretKey=bar\n')
fcreds.flush()
main_conf = tempfile.NamedTemporaryFile()
main_conf.write('''[main]
stack=teststack
credential-file=%s
region=region1
interval=120''' % fcreds.name)
main_conf.flush()
mainconfig = cfn_helper.HupConfig([
open(main_conf.name),
open(hooks_conf.name)])
unique_resources = mainconfig.unique_resources_get()
self.assertThat([
'resource',
'resource1',
'resource2',
'resource3',
], ttm.Equals(sorted(unique_resources)))
hooks = sorted(mainconfig.hooks,
key=lambda hook: hook.resource_name_get())
self.assertEqual(len(hooks), 4)
self.assertEqual(
'{cfn-http-restarted, service.restarted,'
' Resources.resource.Metadata, root, /bin/cfn-http-restarted}',
str(hooks[0]))
self.assertEqual(
'{hook1, service1.restarted, Resources.resource1.Metadata,'
' root, /bin/hook1}', str(hooks[1]))
self.assertEqual(
'{hook2, service2.restarted, Resources.resource2.Metadata,'
' root, /bin/hook2}', str(hooks[2]))
self.assertEqual(
'{hook3, service3.restarted, Resources.resource3.Metadata,'
' root, /bin/hook3}', str(hooks[3]))
for hook in hooks:
hook.event(hook.triggers, None, hook.resource_name_get())
hooks_conf.close()
fcreds.close()
main_conf.close()
self.m.VerifyAll()
class TestCfnHelper(testtools.TestCase):
def _check_metadata_content(self, content, value):
with tempfile.NamedTemporaryFile() as metadata_info:
metadata_info.write(content)
metadata_info.flush()
port = cfn_helper.metadata_server_port(metadata_info.name)
self.assertEqual(value, port)
def test_metadata_server_port(self):
self._check_metadata_content("http://172.20.42.42:8000\n", 8000)
def test_metadata_server_port_https(self):
self._check_metadata_content("https://abc.foo.bar:6969\n", 6969)
def test_metadata_server_port_noport(self):
self._check_metadata_content("http://172.20.42.42\n", None)
def test_metadata_server_port_justip(self):
self._check_metadata_content("172.20.42.42", None)
def test_metadata_server_port_weird(self):
self._check_metadata_content("::::", None)
self._check_metadata_content("beforecolons:aftercolons", None)
def test_metadata_server_port_emptyfile(self):
self._check_metadata_content("\n", None)
self._check_metadata_content("", None)
def test_metadata_server_nofile(self):
random_filename = self.getUniqueString()
self.assertEqual(None,
cfn_helper.metadata_server_port(random_filename))
def test_to_boolean(self):
self.assertTrue(cfn_helper.to_boolean(True))
self.assertTrue(cfn_helper.to_boolean('true'))
self.assertTrue(cfn_helper.to_boolean('yes'))
self.assertTrue(cfn_helper.to_boolean('1'))
self.assertTrue(cfn_helper.to_boolean(1))
self.assertFalse(cfn_helper.to_boolean(False))
self.assertFalse(cfn_helper.to_boolean('false'))
self.assertFalse(cfn_helper.to_boolean('no'))
self.assertFalse(cfn_helper.to_boolean('0'))
self.assertFalse(cfn_helper.to_boolean(0))
self.assertFalse(cfn_helper.to_boolean(None))
self.assertFalse(cfn_helper.to_boolean('fingle'))
def test_parse_creds_file(self):
def parse_creds_test(file_contents, creds_match):
with tempfile.NamedTemporaryFile(mode='w') as fcreds:
fcreds.write(file_contents)
fcreds.flush()
creds = cfn_helper.parse_creds_file(fcreds.name)
self.assertThat(creds_match, ttm.Equals(creds))
parse_creds_test(
'AWSAccessKeyId=foo\nAWSSecretKey=bar\n',
{'AWSAccessKeyId': 'foo', 'AWSSecretKey': 'bar'}
)
parse_creds_test(
'AWSAccessKeyId =foo\nAWSSecretKey= bar\n',
{'AWSAccessKeyId': 'foo', 'AWSSecretKey': 'bar'}
)
parse_creds_test(
'AWSAccessKeyId = foo\nAWSSecretKey = bar\n',
{'AWSAccessKeyId': 'foo', 'AWSSecretKey': 'bar'}
)
class TestMetadataRetrieve(testtools.TestCase):
def setUp(self):
super(TestMetadataRetrieve, self).setUp()
self.tdir = self.useFixture(fixtures.TempDir())
self.last_file = os.path.join(self.tdir.path, 'last_metadata')
def test_metadata_retrieve_files(self):
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
"/tmp/foo": {"content": "bar"}}}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
with tempfile.NamedTemporaryFile(mode='w+') as default_file:
default_file.write(md_str)
default_file.flush()
self.assertThat(default_file.name, ttm.FileContains(md_str))
self.assertTrue(
md.retrieve(default_path=default_file.name,
last_path=self.last_file))
self.assertThat(self.last_file, ttm.FileContains(md_str))
self.assertThat(md_data, ttm.Equals(md._metadata))
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(default_path=default_file.name,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
def test_metadata_retrieve_none(self):
md = cfn_helper.Metadata('teststack', None)
default_file = os.path.join(self.tdir.path, 'default_file')
self.assertFalse(md.retrieve(default_path=default_file,
last_path=self.last_file))
self.assertIsNone(md._metadata)
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display()
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(), "")
def test_metadata_retrieve_passed(self):
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
"/tmp/foo": {"content": "bar"}}}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(meta_str=md_data,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertEqual(md_str, str(md))
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display()
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(),
"{\"AWS::CloudFormation::Init\": {\"config\": {"
"\"files\": {\"/tmp/foo\": {\"content\": \"bar\"}"
"}}}}\n")
def test_metadata_retrieve_by_key_passed(self):
md_data = {"foo": {"bar": {"fred.1": "abcd"}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(meta_str=md_data,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertEqual(md_str, str(md))
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display("foo")
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(),
"{\"bar\": {\"fred.1\": \"abcd\"}}\n")
def test_metadata_retrieve_by_nested_key_passed(self):
md_data = {"foo": {"bar": {"fred.1": "abcd"}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(meta_str=md_data,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertEqual(md_str, str(md))
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display("foo.bar.'fred.1'")
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(),
'"abcd"\n')
def test_metadata_retrieve_key_none(self):
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
"/tmp/foo": {"content": "bar"}}}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(meta_str=md_data,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertEqual(md_str, str(md))
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display("no_key")
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(), "")
def test_metadata_retrieve_by_nested_key_none(self):
md_data = {"foo": {"bar": {"fred.1": "abcd"}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(meta_str=md_data,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertEqual(md_str, str(md))
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display("foo.fred")
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(), "")
def test_metadata_retrieve_by_nested_key_none_with_matching_string(self):
md_data = {"foo": "bar"}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(md.retrieve(meta_str=md_data,
last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertEqual(md_str, str(md))
displayed = self.useFixture(fixtures.StringStream('stdout'))
fake_stdout = displayed.stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', fake_stdout))
md.display("foo.bar")
fake_stdout.flush()
self.assertEqual(displayed.getDetails()['stdout'].as_text(), "")
def test_metadata_creates_cache(self):
temp_home = tempfile.mkdtemp()
def cleanup_temp_home(thome):
os.unlink(os.path.join(thome, 'cache', 'last_metadata'))
os.rmdir(os.path.join(thome, 'cache'))
os.rmdir(os.path.join(thome))
self.addCleanup(cleanup_temp_home, temp_home)
last_path = os.path.join(temp_home, 'cache', 'last_metadata')
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
"/tmp/foo": {"content": "bar"}}}}}
md_str = json.dumps(md_data)
md = cfn_helper.Metadata('teststack', None)
self.assertFalse(os.path.exists(last_path),
"last_metadata file already exists")
self.assertTrue(md.retrieve(meta_str=md_str, last_path=last_path))
self.assertTrue(os.path.exists(last_path),
"last_metadata file should exist")
# Ensure created dirs and file have right perms
self.assertTrue(os.stat(last_path).st_mode & 0o600 == 0o600)
self.assertTrue(
os.stat(os.path.dirname(last_path)).st_mode & 0o700 == 0o700)
def test_is_valid_metadata(self):
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
"/tmp/foo": {"content": "bar"}}}}}
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(
md.retrieve(meta_str=md_data, last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
self.assertTrue(md._is_valid_metadata())
self.assertThat(
md_data['AWS::CloudFormation::Init'], ttm.Equals(md._metadata))
def test_remote_metadata(self):
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
"/tmp/foo": {"content": "bar"}}}}}
m = mox.Mox()
m.StubOutWithMock(
cfn.CloudFormationConnection, 'describe_stack_resource')
cfn.CloudFormationConnection.describe_stack_resource(
'teststack', None).MultipleTimes().AndReturn({
'DescribeStackResourceResponse': {
'DescribeStackResourceResult': {
'StackResourceDetail': {'Metadata': md_data}}}})
m.ReplayAll()
try:
md = cfn_helper.Metadata(
'teststack',
None,
access_key='foo',
secret_key='bar')
self.assertTrue(md.retrieve(last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
with tempfile.NamedTemporaryFile(mode='w') as fcreds:
fcreds.write('AWSAccessKeyId=foo\nAWSSecretKey=bar\n')
fcreds.flush()
md = cfn_helper.Metadata(
'teststack', None, credentials_file=fcreds.name)
self.assertTrue(md.retrieve(last_path=self.last_file))
self.assertThat(md_data, ttm.Equals(md._metadata))
m.VerifyAll()
finally:
m.UnsetStubs()
def test_nova_meta_with_cache(self):
meta_in = {"uuid": "f9431d18-d971-434d-9044-5b38f5b4646f",
"availability_zone": "nova",
"hostname": "as-wikidatabase-4ykioj3lgi57.novalocal",
"launch_index": 0,
"meta": {},
"public_keys": {"heat_key": "ssh-rsa etc...\n"},
"name": "as-WikiDatabase-4ykioj3lgi57"}
md_str = json.dumps(meta_in)
md = cfn_helper.Metadata('teststack', None)
with tempfile.NamedTemporaryFile(mode='w+') as default_file:
default_file.write(md_str)
default_file.flush()
self.assertThat(default_file.name, ttm.FileContains(md_str))
meta_out = md.get_nova_meta(cache_path=default_file.name)
self.assertEqual(meta_in, meta_out)
def test_nova_meta_curl(self):
url = 'http://169.254.169.254/openstack/2012-08-10/meta_data.json'
temp_home = tempfile.mkdtemp()
cache_path = os.path.join(temp_home, 'meta_data.json')
def cleanup_temp_home(thome):
os.unlink(cache_path)
os.rmdir(thome)
self.m = mox.Mox()
self.addCleanup(self.m.UnsetStubs)
self.addCleanup(cleanup_temp_home, temp_home)
meta_in = {"uuid": "f9431d18-d971-434d-9044-5b38f5b4646f",
"availability_zone": "nova",
"hostname": "as-wikidatabase-4ykioj3lgi57.novalocal",
"launch_index": 0,
"meta": {"freddy": "is hungry"},
"public_keys": {"heat_key": "ssh-rsa etc...\n"},
"name": "as-WikiDatabase-4ykioj3lgi57"}
md_str = json.dumps(meta_in)
def write_cache_file(*params, **kwargs):
with open(cache_path, 'w+') as cache_file:
cache_file.write(md_str)
cache_file.flush()
self.assertThat(cache_file.name, ttm.FileContains(md_str))
self.m.StubOutWithMock(subprocess, 'Popen')
subprocess.Popen(['su', 'root', '-c',
'curl -o %s %s' % (cache_path, url)],
cwd=None, env=None, stderr=-1, stdout=-1)\
.WithSideEffects(write_cache_file)\
.AndReturn(FakePOpen('Downloaded', '', 0))
self.m.ReplayAll()
md = cfn_helper.Metadata('teststack', None)
meta_out = md.get_nova_meta(cache_path=cache_path)
self.assertEqual(meta_in, meta_out)
self.m.VerifyAll()
def test_nova_meta_curl_corrupt(self):
url = 'http://169.254.169.254/openstack/2012-08-10/meta_data.json'
temp_home = tempfile.mkdtemp()
cache_path = os.path.join(temp_home, 'meta_data.json')
def cleanup_temp_home(thome):
os.unlink(cache_path)
os.rmdir(thome)
self.m = mox.Mox()
self.addCleanup(self.m.UnsetStubs)
self.addCleanup(cleanup_temp_home, temp_home)
md_str = "this { is not really json"
def write_cache_file(*params, **kwargs):
with open(cache_path, 'w+') as cache_file:
cache_file.write(md_str)
cache_file.flush()
self.assertThat(cache_file.name, ttm.FileContains(md_str))
self.m.StubOutWithMock(subprocess, 'Popen')
subprocess.Popen(['su', 'root', '-c',
'curl -o %s %s' % (cache_path, url)],
cwd=None, env=None, stderr=-1, stdout=-1)\
.WithSideEffects(write_cache_file)\
.AndReturn(FakePOpen('Downloaded', '', 0))
self.m.ReplayAll()
md = cfn_helper.Metadata('teststack', None)
meta_out = md.get_nova_meta(cache_path=cache_path)
self.assertEqual(None, meta_out)
self.m.VerifyAll()
def test_nova_meta_curl_failed(self):
url = 'http://169.254.169.254/openstack/2012-08-10/meta_data.json'
temp_home = tempfile.mkdtemp()
cache_path = os.path.join(temp_home, 'meta_data.json')
def cleanup_temp_home(thome):
os.rmdir(thome)
self.m = mox.Mox()
self.addCleanup(self.m.UnsetStubs)
self.addCleanup(cleanup_temp_home, temp_home)
self.m.StubOutWithMock(subprocess, 'Popen')
subprocess.Popen(['su', 'root', '-c',
'curl -o %s %s' % (cache_path, url)],
cwd=None, env=None, stderr=-1, stdout=-1)\
.AndReturn(FakePOpen('Failed', '', 1))
self.m.ReplayAll()
md = cfn_helper.Metadata('teststack', None)
meta_out = md.get_nova_meta(cache_path=cache_path)
self.assertEqual(None, meta_out)
self.m.VerifyAll()
def test_get_tags(self):
self.m = mox.Mox()
self.addCleanup(self.m.UnsetStubs)
fake_tags = {'foo': 'fee',
'apple': 'red'}
md_data = {"uuid": "f9431d18-d971-434d-9044-5b38f5b4646f",
"availability_zone": "nova",
"hostname": "as-wikidatabase-4ykioj3lgi57.novalocal",
"launch_index": 0,
"meta": fake_tags,
"public_keys": {"heat_key": "ssh-rsa etc...\n"},
"name": "as-WikiDatabase-4ykioj3lgi57"}
tags_expect = fake_tags
tags_expect['InstanceId'] = md_data['uuid']
md = cfn_helper.Metadata('teststack', None)
self.m.StubOutWithMock(md, 'get_nova_meta')
md.get_nova_meta().AndReturn(md_data)
self.m.ReplayAll()
tags = md.get_tags()
self.assertEqual(tags_expect, tags)
self.m.VerifyAll()
def test_get_instance_id(self):
self.m = mox.Mox()
self.addCleanup(self.m.UnsetStubs)
uuid = "f9431d18-d971-434d-9044-5b38f5b4646f"
md_data = {"uuid": uuid,
"availability_zone": "nova",
"hostname": "as-wikidatabase-4ykioj3lgi57.novalocal",
"launch_index": 0,
"public_keys": {"heat_key": "ssh-rsa etc...\n"},
"name": "as-WikiDatabase-4ykioj3lgi57"}
md = cfn_helper.Metadata('teststack', None)
self.m.StubOutWithMock(md, 'get_nova_meta')
md.get_nova_meta().AndReturn(md_data)
self.m.ReplayAll()
self.assertEqual(md.get_instance_id(), uuid)
self.m.VerifyAll()
class TestCfnInit(MockPopenTestCase):
def setUp(self):
super(TestCfnInit, self).setUp()
self.tdir = self.useFixture(fixtures.TempDir())
self.last_file = os.path.join(self.tdir.path, 'last_metadata')
def test_cfn_init(self):
with tempfile.NamedTemporaryFile(mode='w+') as foo_file:
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
foo_file.name: {"content": "bar"}}}}}
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(
md.retrieve(meta_str=md_data, last_path=self.last_file))
md.cfn_init()
self.assertThat(foo_file.name, ttm.FileContains('bar'))
def test_cfn_init_with_ignore_errors_false(self):
self.mock_cmd_run(['su', 'root', '-c', '/bin/command1']).AndReturn(
FakePOpen('Doing something', 'error', -1))
self.m.ReplayAll()
md_data = {"AWS::CloudFormation::Init": {"config": {"commands": {
"00_foo": {"command": "/bin/command1",
"ignoreErrors": "false"}}}}}
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(
md.retrieve(meta_str=md_data, last_path=self.last_file))
self.assertRaises(cfn_helper.CommandsHandlerRunError, md.cfn_init)
def test_cfn_init_with_ignore_errors_true(self):
self.mock_cmd_run(['su', 'root', '-c', '/bin/command1']).AndReturn(
FakePOpen('Doing something', 'error', -1))
self.mock_cmd_run(['su', 'root', '-c', '/bin/command2']).AndReturn(
FakePOpen('All good'))
self.m.ReplayAll()
md_data = {"AWS::CloudFormation::Init": {"config": {"commands": {
"00_foo": {"command": "/bin/command1",
"ignoreErrors": "true"},
"01_bar": {"command": "/bin/command2",
"ignoreErrors": "false"}
}}}}
md = cfn_helper.Metadata('teststack', None)
self.assertTrue(
md.retrieve(meta_str=md_data, last_path=self.last_file))
md.cfn_init()
class TestSourcesHandler(MockPopenTestCase):
def test_apply_sources_empty(self):
sh = cfn_helper.SourcesHandler({})
sh.apply_sources()
def _test_apply_sources(self, url, end_file):
dest = tempfile.mkdtemp()
self.addCleanup(os.rmdir, dest)
sources = {dest: url}
td = os.path.dirname(end_file)
self.m.StubOutWithMock(tempfile, 'mkdtemp')
tempfile.mkdtemp().AndReturn(td)
er = "mkdir -p '%s'; cd '%s'; curl -s '%s' | gunzip | tar -xvf -"
cmd = ['su', 'root', '-c',
er % (dest, dest, url)]
self.mock_cmd_run(cmd).AndReturn(FakePOpen('Curl good'))
self.m.ReplayAll()
sh = cfn_helper.SourcesHandler(sources)
sh.apply_sources()
def test_apply_sources_github(self):
url = "https://github.com/NoSuchProject/tarball/NoSuchTarball"
td = tempfile.mkdtemp()
self.addCleanup(os.rmdir, td)
end_file = '%s/NoSuchProject-NoSuchTarball.tar.gz' % td
self._test_apply_sources(url, end_file)
def test_apply_sources_general(self):
url = "https://website.no.existe/a/b/c/file.tar.gz"
td = tempfile.mkdtemp()
self.addCleanup(os.rmdir, td)
end_file = '%s/file.tar.gz' % td
self._test_apply_sources(url, end_file)
def test_apply_source_cmd(self):
sh = cfn_helper.SourcesHandler({})
er = "mkdir -p '%s'; cd '%s'; curl -s '%s' | %s | tar -xvf -"
dest = '/tmp'
# test tgz
url = 'http://www.example.com/a.tgz'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "gunzip"), cmd)
# test tar.gz
url = 'http://www.example.com/a.tar.gz'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "gunzip"), cmd)
# test github - tarball 1
url = 'https://github.com/openstack/heat-cfntools/tarball/master'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "gunzip"), cmd)
# test github - tarball 2
url = 'https://github.com/openstack/heat-cfntools/tarball/master/'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "gunzip"), cmd)
# test tbz2
url = 'http://www.example.com/a.tbz2'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "bunzip2"), cmd)
# test tar.bz2
url = 'http://www.example.com/a.tar.bz2'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "bunzip2"), cmd)
# test zip
er = "mkdir -p '%s'; cd '%s'; curl -s -o '%s' '%s' && unzip -o '%s'"
url = 'http://www.example.com/a.zip'
d = "/tmp/tmp2I0yNK"
tmp = "%s/a.zip" % d
self.m.StubOutWithMock(tempfile, 'mkdtemp')
tempfile.mkdtemp().AndReturn(d)
self.m.ReplayAll()
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, tmp, url, tmp), cmd)
# test gz
er = "mkdir -p '%s'; cd '%s'; curl -s '%s' | %s > '%s'"
url = 'http://www.example.com/a.sh.gz'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "gunzip", "a.sh"), cmd)
# test bz2
url = 'http://www.example.com/a.sh.bz2'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual(er % (dest, dest, url, "bunzip2", "a.sh"), cmd)
# test other
url = 'http://www.example.com/a.sh'
cmd = sh._apply_source_cmd(dest, url)
self.assertEqual("", cmd)
| 37.080398
| 77
| 0.574803
| 5,261
| 48,427
| 5.110815
| 0.089717
| 0.01562
| 0.018224
| 0.028117
| 0.819213
| 0.786968
| 0.74762
| 0.72683
| 0.708346
| 0.695701
| 0
| 0.011802
| 0.282611
| 48,427
| 1,305
| 78
| 37.108812
| 0.762154
| 0.036385
| 0
| 0.671815
| 0
| 0.007722
| 0.190949
| 0.0427
| 0
| 0
| 0
| 0
| 0.092664
| 1
| 0.062741
| false
| 0.003861
| 0.009653
| 0.002896
| 0.084942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6e12bf3c6a96788f08fd718e490c142c0c3be633
| 1,259
|
py
|
Python
|
ffprobe/FFVideoFrame.py
|
ifij775/ffprobe-python
|
73868263717825b08345446f2dc90bc8011a7624
|
[
"MIT"
] | null | null | null |
ffprobe/FFVideoFrame.py
|
ifij775/ffprobe-python
|
73868263717825b08345446f2dc90bc8011a7624
|
[
"MIT"
] | null | null | null |
ffprobe/FFVideoFrame.py
|
ifij775/ffprobe-python
|
73868263717825b08345446f2dc90bc8011a7624
|
[
"MIT"
] | null | null | null |
from ffprobe.FFFrame import FFFrame
class FFVideoFrame(FFFrame):
def width(self):
return int(self._data['width'])
def height(self):
return int(self._data['height'])
def frame_size(self):
return (self.width(), self.height())
def pixel_format(self):
return self._data['pix_fmt']
def sample_aspect_ratio(self):
return self._data['sample_aspect_ratio']
def pict_type(self):
return self._data['pict_type']
def coded_picture_number(self):
return int(self._data['coded_picture_number'])
def display_picture_number(self):
return int(self._data['display_picture_number'])
def interlaced(self):
return (self._data['interlaced_frame']=='1')
def top_field_first(self):
return (self._data['top_field_first']=='1')
def repeat_pict(self):
return (self._data['repeat_pict']=='1')
def color_range(self):
return self._data['color_range']
def color_space(self):
return self._data['color_space']
def color_transfer(self):
return self._data['color_transfer']
def color_primaries(self):
return self._data['color_primaries']
def chroma_location(self):
return self._data['chroma_location']
| 34.972222
| 56
| 0.661636
| 162
| 1,259
| 4.839506
| 0.253086
| 0.204082
| 0.214286
| 0.252551
| 0.257653
| 0.086735
| 0.086735
| 0
| 0
| 0
| 0
| 0.003015
| 0.20969
| 1,259
| 35
| 57
| 35.971429
| 0.784925
| 0
| 0
| 0
| 0
| 0
| 0.158062
| 0.017474
| 0
| 0
| 0
| 0
| 0
| 1
| 0.470588
| false
| 0
| 0.029412
| 0.470588
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
282e93476958c14847d0387442847c1084f198d0
| 10,691
|
py
|
Python
|
astm/tests/test_client.py
|
Iskander1b/python-astm
|
606a77407e59c2f2dd12d65a7b2d2e3c141ad8d9
|
[
"BSD-3-Clause"
] | 38
|
2015-06-11T06:43:02.000Z
|
2022-03-01T18:21:07.000Z
|
astm/tests/test_client.py
|
Iskander1b/python-astm
|
606a77407e59c2f2dd12d65a7b2d2e3c141ad8d9
|
[
"BSD-3-Clause"
] | 7
|
2016-08-12T10:16:34.000Z
|
2021-02-11T15:43:34.000Z
|
astm/tests/test_client.py
|
Iskander1b/python-astm
|
606a77407e59c2f2dd12d65a7b2d2e3c141ad8d9
|
[
"BSD-3-Clause"
] | 39
|
2015-08-10T16:49:33.000Z
|
2021-12-26T10:27:07.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
import unittest
from astm import codec
from astm import constants
from astm.exceptions import NotAccepted
from astm.client import Client
from astm.tests.utils import DummyMixIn
class DummyClient(DummyMixIn, Client):
def __init__(self, *args, **kwargs):
super(DummyClient, self).__init__(*args, **kwargs)
self.timeout = None
def create_socket(self, family, type):
pass
def connect(self, address):
pass
class emitter(object):
def __init__(self, *args):
self.outbox = list(args)
self.pos = 0
self.inbox = []
def __iter__(self):
return self
def next(self):
if self.pos >= len(self.outbox):
raise StopIteration
item = self.outbox[self.pos]
self.pos += 1
return item
__next__ = next
def send(self, value):
self.inbox.append(value)
return self.next()
def put(self, record):
self.outbox.append(record)
def simple_emitter():
yield ['H']
yield ['L']
class ClientTestCase(unittest.TestCase):
def test_open_connection(self):
client = DummyClient(simple_emitter)
client.handle_connect()
self.assertEqual(client.outbox[0], constants.ENQ)
def test_fail_on_enq(self):
client = DummyClient(emitter)
self.assertRaises(NotAccepted, client.on_enq)
def test_fail_on_eot(self):
client = DummyClient(emitter)
self.assertRaises(NotAccepted, client.on_eot)
def test_fail_on_message(self):
client = DummyClient(emitter)
self.assertRaises(NotAccepted, client.on_message)
def test_callback_on_sent_failure(self):
def emitter():
yield ['H']
assert not (yield ['P'])
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_ack()
client.on_nak()
def test_emitter_may_send_new_record_after_nak_response(self):
def emitter():
yield ['H']
assert (yield ['P'])
ok = yield ['O']
if not ok:
yield ['R']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_ack()
client.on_ack()
client.on_nak()
self.assertEqual(client.outbox[-1][2:3], b'R')
def test_empty_emitter(self):
def emitter():
if False:
yield
client = DummyClient(emitter)
client.handle_connect()
self.assertEqual(client.outbox[-1], constants.ENQ)
client.on_ack()
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], None)
def test_early_yield(self):
def emitter():
yield ['P']
if False:
yield ['H']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
self.assertRaises(AssertionError, client.on_ack)
def test_late_ack(self):
def emitter():
if False:
yield ['H']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
self.assertEqual(client.outbox[-1], constants.ENQ)
client.on_ack()
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], None)
client.on_ack()
self.assertEqual(client.outbox[-1], None)
def test_dummy_usage(self):
def emitter():
yield ['H']
ok = yield ['P']
assert ok
ok = yield ['O']
assert ok
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
self.assertEqual(client.outbox[-1], constants.ENQ)
client.on_ack()
self.assertEqual(client.outbox[-1][1:3], b'1H')
client.on_ack()
self.assertEqual(client.outbox[-1][1:3], b'2P')
client.on_ack()
self.assertEqual(client.outbox[-1][1:3], b'3O')
client.on_ack()
self.assertEqual(client.outbox[-1][1:3], b'4L')
client.on_ack()
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], constants.ENQ)
client.on_ack()
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], None)
def test_reject_header(self):
def emitter():
assert (yield ['H'])
yield ['P']
yield ['O']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
self.assertRaises(AssertionError, client.on_nak)
def test_nak_callback(self):
def emitter():
yield ['H']
assert not (yield ['P'])
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_ack()
client.on_nak()
client.on_ack()
def test_emit_after_nak(self):
def emitter():
yield ['H']
assert not (yield ['P'])
yield ['O']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_ack()
client.on_nak()
client.on_ack()
def test_terminate_on_exception_after_nake(self):
def emitter():
yield ['H']
assert (yield ['P'])
yield ['O']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_ack()
self.assertRaises(AssertionError, client.on_nak)
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], None)
def test_messages_workflow(self):
def emitter():
yield ['H']
yield ['C']
yield ['P']
yield ['O']
yield ['O']
yield ['P']
yield ['C']
yield ['O']
yield ['O']
yield ['C']
yield ['R']
yield ['C']
yield ['R']
yield ['R']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
while client.outbox[-1] is not None:
client.on_ack()
def test_session_in_loop(self):
def emitter():
for i in range(2):
yield ['H']
yield ['P']
yield ['O']
yield ['L']
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
while client.outbox[-1] is not None:
client.on_ack()
self.assertEqual(list(client.outbox),
[b'\x05',
b'\x021H\r\x0389\r\n',
b'\x022P\r\x0392\r\n',
b'\x023O\r\x0392\r\n',
b'\x024L\r\x0390\r\n',
b'\x04',
b'\x05',
b'\x021H\r\x0389\r\n',
b'\x022P\r\x0392\r\n',
b'\x023O\r\x0392\r\n',
b'\x024L\r\x0390\r\n',
b'\x04',
b'\x05',
b'\x04',
None])
def test_reject_terminator(self):
def emitter():
assert (yield ['H'])
assert (yield ['P'])
assert (yield ['O'])
assert (yield ['L'])
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_ack()
client.on_ack()
client.on_ack()
self.assertEqual(client.outbox[-1][1:3], b'4L')
self.assertRaises(AssertionError, client.on_nak)
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], None)
def test_timeout_handler(self):
def emitter():
assert (yield ['H'])
assert (yield ['P'])
assert (yield ['O'])
assert (yield ['L'])
client = DummyClient(emitter)
client.handle_connect()
client.on_ack()
client.on_timeout()
self.assertEqual(client.outbox[-2], constants.EOT)
self.assertEqual(client.outbox[-1], None)
def test_chunked_response(self):
def emitter():
assert (yield ['H', 'foo', 'bar'])
assert (yield ['L', 'bar', 'baz'])
client = DummyClient(emitter, chunk_size=12)
client.handle_connect()
client.on_ack()
self.assertTrue(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x021H|foo\x1750\r\n')
client.on_ack()
self.assertFalse(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x022|bar\r\x03F3\r\n')
client.on_ack()
self.assertTrue(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x023L|bar\x1747\r\n')
client.on_ack()
self.assertFalse(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x024|baz\r\x03FD\r\n')
client.on_ack()
self.assertEqual(client.outbox[-1], constants.ENQ)
client.on_ack()
self.assertEqual(client.outbox[-1], None)
def test_bulk_mode(self):
def emitter():
assert (yield ['H', 'foo', 'bar'])
assert (yield ['L', 'bar', 'baz'])
client = DummyClient(emitter, chunk_size=12, bulk_mode=True)
client.handle_connect()
client.on_ack()
self.assertTrue(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x021H|foo\x1750\r\n')
client.on_ack()
self.assertTrue(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x022|bar\r\x1707\r\n')
client.on_ack()
self.assertTrue(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x023L|bar\x1747\r\n')
client.on_ack()
self.assertFalse(codec.is_chunked_message(client.outbox[-1]))
self.assertEqual(client.outbox[-1], b'\x024|baz\r\x03FD\r\n')
client.on_ack()
self.assertEqual(client.outbox[-1], constants.ENQ)
client.on_ack()
self.assertEqual(client.outbox[-1], None)
if __name__ == '__main__':
unittest.main()
| 30.286119
| 69
| 0.55542
| 1,251
| 10,691
| 4.603517
| 0.138289
| 0.077791
| 0.085952
| 0.173468
| 0.76489
| 0.730682
| 0.717659
| 0.712971
| 0.693697
| 0.653238
| 0
| 0.026376
| 0.308484
| 10,691
| 352
| 70
| 30.372159
| 0.752604
| 0.018614
| 0
| 0.745033
| 0
| 0
| 0.041973
| 0.008013
| 0
| 0
| 0
| 0
| 0.241722
| 1
| 0.149007
| false
| 0.006623
| 0.019868
| 0.003311
| 0.192053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9544ea04693dd462ba5326ad18ba1eb6b2058f6a
| 35,662
|
py
|
Python
|
tests/test_query.py
|
Yiling-J/pharos
|
a6dd80bd7c0475d78e6490735e3e5fd6eebc28c5
|
[
"BSD-3-Clause"
] | 1
|
2021-12-03T16:28:41.000Z
|
2021-12-03T16:28:41.000Z
|
tests/test_query.py
|
Yiling-J/pharos
|
a6dd80bd7c0475d78e6490735e3e5fd6eebc28c5
|
[
"BSD-3-Clause"
] | 14
|
2021-04-08T14:16:27.000Z
|
2021-05-24T15:15:11.000Z
|
tests/test_query.py
|
Yiling-J/pharos
|
a6dd80bd7c0475d78e6490735e3e5fd6eebc28c5
|
[
"BSD-3-Clause"
] | null | null | null |
import yaml
from unittest import TestCase, mock
from jinja2 import PackageLoader, Environment, FileSystemLoader
from kubernetes.dynamic import exceptions as api_exceptions
from pharos import models, fields, exceptions, lookups, backend, jinja
from pharos.jinja import to_yaml
from pharos.backend import TemplateBackend
class BaseCase(TestCase):
def setUp(self):
self.dynamic_client = mock.Mock()
self.client = mock.Mock()
self.client.settings.enable_chunk = True
self.client.settings.chunk_size = 100
self.client.settings.jinja_loader = PackageLoader("tests", "./")
self.client.settings.template_engine = "pharos.jinja.JinjaEngine"
self.client.dynamic_client = self.dynamic_client
class DeploymentTestCase(BaseCase):
def test_no_client(self):
with self.assertRaises(exceptions.ClientNotSet):
len(models.Deployment.objects.all())
def test_chunk_iterator(self):
mock_response = mock.Mock()
response_lambda = lambda token: {
"metadata": {"continue": token},
"items": [
{
"id": token,
"metadata": {
"ownerReferences": [{"kind": "Apple", "uid": "123"}],
"name": "test",
},
}
],
}
# should call 6 times, and get END signal, so 7 won't be called
mock_response.to_dict.side_effect = [
response_lambda(f"{i}") for i in [1, 2, 3, 4, 5, "END", 7]
]
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
query = models.Deployment.objects.using(self.client).all()
self.assertEqual(len(query), 6)
expected_call = [
mock.call.get(_continue=None, limit=100),
mock.call.get(_continue="1", limit=100),
mock.call.get(_continue="2", limit=100),
mock.call.get(_continue="3", limit=100),
mock.call.get(_continue="4", limit=100),
mock.call.get(_continue="5", limit=100),
]
self.assertEqual(
self.dynamic_client.resources.get.return_value.method_calls, expected_call
)
def test_limit_with_iterator(self):
mock_response = mock.Mock()
response_lambda = lambda token: {
"metadata": {"continue": token},
"items": [
{
"id": token,
"metadata": {
"ownerReferences": [{"kind": "Apple", "uid": "123"}],
"name": "test",
},
}
],
}
# should call 3 times only
mock_response.to_dict.side_effect = [
response_lambda(f"{i}") for i in [1, 2, 3, 4, 5, "END", 7]
]
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
query = models.Deployment.objects.using(self.client).limit(3)
self.assertEqual(len(query), 3)
expected_call = [
mock.call.get(_continue=None, limit=100),
mock.call.get(_continue="1", limit=100),
mock.call.get(_continue="2", limit=100),
]
self.assertEqual(
self.dynamic_client.resources.get.return_value.method_calls, expected_call
)
def test_deployment_query_basic(self):
test_cases = [
{
"query": models.Deployment.objects.using(self.client).all(),
"api_call": {},
},
{
"query": models.Deployment.objects.using(self.client).filter(
name="apple"
),
"api_call": {
"name": "apple",
},
},
{
"query": models.Deployment.objects.using(self.client).filter(
name="apple", namespace="orange"
),
"api_call": {
"name": "apple",
"namespace": "orange",
},
},
{
"query": models.Deployment.objects.using(self.client)
.filter(name="apple")
.filter(namespace="orange"),
"api_call": {
"name": "apple",
"namespace": "orange",
},
},
{
"query": models.Deployment.objects.using(self.client).filter(
selector="app in (a)"
),
"api_call": {
"label_selector": "app in (a)",
},
},
{
"query": models.Deployment.objects.using(self.client)
.filter(selector="app in (a)")
.filter(selector="app=b"),
"api_call": {
"label_selector": "app in (a),app=b",
},
},
{
"query": models.Deployment.objects.using(self.client).filter(
field_selector="name=foo"
),
"api_call": {
"field_selector": "name=foo",
},
},
{
"query": models.Deployment.objects.using(self.client)
.filter(field_selector="name=foo")
.filter(field_selector="type=bar"),
"api_call": {
"field_selector": "name=foo,type=bar",
},
},
]
self.dynamic_client.resources.get.return_value.get.return_value.to_dict.side_effect = lambda: {
"metadata": {},
"items": ["test"],
}
for case in test_cases:
with self.subTest(case=case):
len(case["query"])
self.assertEqual(
self.dynamic_client.resources.method_calls,
[mock.call.get(api_version="v1", kind="Deployment")],
)
self.assertEqual(
self.dynamic_client.resources.get.return_value.method_calls,
[mock.call.get(**case["api_call"], _continue=None, limit=100)],
)
self.dynamic_client.reset_mock()
models.Deployment.objects.using(self.client).get(
name="apple", namespace="orange"
)
self.assertEqual(
self.dynamic_client.resources.get.return_value.method_calls,
[
mock.call.get(
name="apple", namespace="orange", _continue=None, limit=100
)
],
)
def test_owner(self):
mock_data = {"kind": "Apple", "metadata": {"uid": "123"}}
mock_owner = models.Deployment(client=None, k8s_object=mock_data)
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [
{
"id": 1,
"metadata": {
"ownerReferences": [{"kind": "Apple", "uid": "123"}],
"name": "test",
},
},
{
"id": 2,
"metadata": {"ownerReferences": [{"kind": "Appl", "uid": "124"}]},
},
{
"id": 3,
"metadata": {"ownerReferences": [{"kind": "Apple", "uid": "125"}]},
},
{"id": 4, "metadata": {"ownerReferences": [{"kind": "Apple"}]}},
{
"id": 6,
"metadata": {"ownerReferences": [{"kind": "Apple", "uid": "123"}]},
},
],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
query = models.Deployment.objects.using(self.client).filter(owner=mock_owner)
self.assertEqual(len(query), 2)
mock_owner2 = models.Deployment(
client=None, k8s_object={"kind": "Apple", "metadata": {"uid": "124"}}
)
query = models.Deployment.objects.using(self.client).filter(
owner__in=[mock_owner, mock_owner2]
)
self.assertEqual(len(query), 3)
deployment = query[0]
self.assertEqual(deployment.name, "test")
def test_deployment_pods(self):
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {"uid": "123"},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
},
)
mock_rs_response = mock.Mock()
mock_rs_response.to_dict.return_value = {
"metadata": {},
"items": [
{
"id": 1,
"metadata": {
"ownerReferences": [{"kind": "ReplicaSet", "uid": "123"}],
"uid": "234",
},
},
{
"id": 2,
"metadata": {
"ownerReferences": [{"kind": "ReplicaSet", "uid": "124"}],
"uid": "235",
},
},
{
"id": 3,
"metadata": {
"ownerReferences": [{"kind": "ReplicaSet", "uid": "123"}],
"uid": "236",
},
},
],
}
mock_pod_response = mock.Mock()
mock_pod_response.to_dict.return_value = {
"metadata": {},
"items": [
{
"id": 1,
"metadata": {
"ownerReferences": [{"kind": "ReplicaSet", "uid": "234"}]
},
},
{
"id": 2,
"metadata": {
"ownerReferences": [{"kind": "ReplicaSet", "uid": "235"}]
},
},
{"id": 4, "metadata": {"ownerReferences": [{"kind": "ReplicaSet"}]}},
],
}
# pod come first because owner filter is POST operator
self.dynamic_client.resources.get.return_value.get.side_effect = [
mock_pod_response,
mock_rs_response,
]
self.assertEqual(len(deployment.pods.all()), 1)
def test_refresh(self):
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {"uid": "123", "name": "foo"},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
},
)
self.assertEqual(deployment.name, "foo")
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {"metadata": {"name": "bar"}}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
deployment.refresh()
self.assertEqual(deployment.name, "bar")
def test_delete(self):
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {
"name": "nginx-deployment",
"annotations": {
"deployment.kubernetes.io/revision": "1",
"pharos.py/template": "test.yaml",
"pharos.py/variable": "deployment-nginx-deployment-default",
},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
}
},
)
mock_response = {
"metadata": {
"name": "nginx-deployment",
"namespace": "default",
"annotations": {
"deployment.kubernetes.io/revision": "1",
"pharos.py/template": "test.yaml",
"pharos.py/variable": "deployment-nginx-deployment-default",
},
},
"json": {"label_name": "foo"},
}
self.dynamic_client.resources.get.return_value.get.return_value.to_dict.return_value = (
mock_response
)
deployment.delete()
self.assertSequenceEqual(
self.dynamic_client.resources.method_calls,
[
mock.call.get(api_version="v1", kind="Deployment"),
mock.call.get(api_version="pharos.py/v1", kind="Variable"),
mock.call.get(api_version="v1", kind="Deployment"),
],
)
self.assertSequenceEqual(
self.dynamic_client.resources.get.return_value.method_calls,
[
mock.call.get(name="nginx-deployment", namespace="default"),
mock.call.delete("deployment-nginx-deployment-default", None),
mock.call.delete("nginx-deployment", "default"),
],
)
def test_create_deployment_wrong_resource(self):
mock_response = {
"metadata": {
"name": "foobar",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
}
}
self.dynamic_client.resources.get.return_value.create.return_value.to_dict.return_value = (
mock_response
)
with self.assertRaises(exceptions.ResourceNotMatch):
models.Service.objects.using(self.client).create(
"test.yaml", {"label_name": "foo"}
)
class ServicePodsTestCase(BaseCase):
def test_service_pods(self):
service = models.Service(
client=self.client,
k8s_object={
"metadata": {"uid": "123"},
"spec": {"selector": {"foo": "bar"}},
},
)
mock_rs_response = mock.Mock()
mock_rs_response.to_dict.return_value = {}
self.dynamic_client.resources.get.return_value.get.return_value = (
mock_rs_response
)
len(service.pods.all())
self.assertEqual(
self.dynamic_client.resources.get.return_value.method_calls,
[mock.call.get(_continue=None, label_selector="foo=bar", limit=100, namespace=None)],
)
class CustomLookup(lookups.Lookup):
name = "foo"
type = lookups.Lookup.POST
def validate(self, obj, data):
return True
fields.JsonPathField.add_lookup(CustomLookup)
class CustomModel(models.Model):
id = fields.JsonPathField(path="id")
task = fields.JsonPathField(path="job.task")
class Meta:
api_version = "v1"
kind = "CustomModel"
class CustomModelTestCase(BaseCase):
def test_custom_model(self):
mock_data = {
"kind": "CustomModel",
"job": {"task": "task1"},
"metadata": {"name": "custom", "namespace": "default"},
}
mock_obj = CustomModel(client=None, k8s_object=mock_data)
self.assertEqual(mock_obj.task, "task1")
self.assertEqual(mock_obj.name, "custom")
self.assertEqual(mock_obj.namespace, "default")
def test_custom_filed_filter(self):
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [
{"id": 1, "job": {"task": "task1"}},
{"id": 2, "job": {"task": "task2"}},
{"id": 3, "job": {"task": "task3"}},
],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
queryset = CustomModel.objects.using(self.client).filter(task="task3")
self.assertEqual(len(queryset), 1)
self.assertEqual(queryset[0].task, "task3")
queryset = CustomModel.objects.using(self.client).filter(
task__in=["task1", "task3"]
)
self.assertEqual(len(queryset), 2)
self.assertEqual(queryset[0].task, "task1")
self.assertEqual(queryset[1].task, "task3")
def test_custom_lookup(self):
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [{"id": 1, "job": {"task": "task1"}}],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
queryset = CustomModel.objects.using(self.client).filter(task__foo="task3")
self.assertEqual(len(queryset), 1)
def test_contains(self):
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [
{"id": 1, "job": {"task": "foo"}},
{"id": 2, "job": {"task": "bar"}},
{"id": 3, "job": {"task": "barfoobar"}},
],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
queryset = CustomModel.objects.using(self.client).filter(task__contains="foo")
self.assertEqual(len(queryset), 2)
def test_contains_list(self):
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [
{"id": 1, "job": {"task": ["foo"]}},
{"id": 2, "job": {"task": ["foo", "bar"]}},
{"id": 3, "job": {"task": ["foo", "bar", "new"]}},
],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
queryset = CustomModel.objects.using(self.client).filter(
task__contains=["foo", "new"]
)
self.assertEqual(len(queryset), 1)
self.assertEqual(queryset[0].task, ["foo", "bar", "new"])
def test_startswith(self):
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [
{"id": 1, "job": {"task": "foofoo"}},
{"id": 2, "job": {"task": "fobar"}},
{"id": 3, "job": {"task": "barfoobar"}},
],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
queryset = CustomModel.objects.using(self.client).filter(task__startswith="foo")
self.assertEqual(len(queryset), 1)
def test_compare(self):
mock_response = mock.Mock()
mock_response.to_dict.side_effect = lambda: {
"metadata": {},
"items": [
{"id": 1, "job": {"task": "foofoo"}},
{"id": 2, "job": {"task": "fobar"}},
{"id": 3, "job": {"task": "barfoobar"}},
],
}
self.dynamic_client.resources.get.return_value.get.return_value = mock_response
queryset = CustomModel.objects.using(self.client).filter(id__gt=1)
self.assertEqual(len(queryset), 2)
queryset = CustomModel.objects.using(self.client).filter(id__gt=2)
self.assertEqual(len(queryset), 1)
queryset = CustomModel.objects.using(self.client).filter(id__gte=2)
self.assertEqual(len(queryset), 2)
queryset = CustomModel.objects.using(self.client).filter(id__lt=4)
self.assertEqual(len(queryset), 3)
queryset = CustomModel.objects.using(self.client).filter(id__lt=1)
self.assertEqual(len(queryset), 0)
queryset = CustomModel.objects.using(self.client).filter(id__lte=1)
self.assertEqual(len(queryset), 1)
class Step:
parent = None
client = None
class GetSpec(Step):
parent = mock.call.resources
def __init__(self, api_version, kind, inherit=False):
self.api_version = api_version
self.kind = kind
self.inherit = inherit
@property
def call(self):
return self.parent.get(api_version=self.api_version, kind=self.kind)
class GetResource(Step):
parent = mock.call.resources.get()
def __init__(self, name, namespace, inherit=False, limit=False):
self.name = name
self.namespace = namespace
self.inherit = inherit
self.limit = limit
@property
def call(self):
params = {'name': self.name, 'namespace': self.namespace}
if self.limit:
params['_continue'] = None
params['limit'] = 100
return self.parent.get(**params)
class CreateResource(Step):
parent = mock.call.resources.get()
def __init__(
self,
template,
variable,
namespace="default",
inherit=False,
internal=False,
dry_run=False,
):
self.template = template
self.variable = variable
self.namespace = namespace
self.inherit = inherit
self.internal = internal
self.dry_run = dry_run
@property
def call(self):
loader = FileSystemLoader("./tests")
engine = jinja.JinjaEngine(self.client, loader=loader, internal=self.internal)
template_backend = backend.TemplateBackend()
template_backend.set_engine(engine)
body = template_backend.render(
self.namespace, self.template, self.variable, self.internal
)
params = {"body": body, "namespace": self.namespace}
if self.dry_run:
params["query_params"] = [("dryRun", "All")]
return self.parent.create(**params)
class UpdateResource(Step):
parent = mock.call.resources.get()
def __init__(
self,
template,
variable,
namespace="default",
inherit=False,
internal=False,
dry_run=False,
resource_version=None
):
self.template = template
self.variable = variable
self.namespace = namespace
self.inherit = inherit
self.internal = internal
self.dry_run = dry_run
self.resource_version = resource_version
@property
def call(self):
loader = FileSystemLoader("./tests")
engine = jinja.JinjaEngine(self.client, loader=loader, internal=self.internal)
template_backend = backend.TemplateBackend()
template_backend.set_engine(engine)
body = template_backend.render(
self.namespace, self.template, self.variable, self.internal
)
body["metadata"]["resourceVersion"] = self.resource_version
params = {"body": body, "namespace": self.namespace}
params["query_params"] = []
if self.dry_run:
params["query_params"] = [("dryRun", "All")]
return self.parent.replace(**params)
class DeleteResource(Step):
parent = mock.call.resources.get()
def __init__(self, name, namespace, inherit=False):
self.name = name
self.namespace = namespace
self.inherit = inherit
@property
def call(self):
return self.parent.delete(self.name, self.namespace)
class ToDict(Step):
parent = mock.call.resources.get().create()
def __init__(self, inherit=False):
self.inherit = inherit
@property
def call(self):
return self.parent.to_dict()
class ResourceCreateTestCase(BaseCase):
def assertQuery(self, steps, query):
expected_calls = []
for step in steps:
step.client = self.client
if step.inherit:
step.parent = expected_calls[-1]
expected_calls.append(step.call)
query()
self.assertSequenceEqual(self.dynamic_client.mock_calls, expected_calls)
def test_create_deployment(self):
mock_response = {
"metadata": {
"name": "foobar",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
}
}
self.dynamic_client.resources.get.return_value.create.return_value.to_dict.return_value = (
mock_response
)
expected_steps = [
GetSpec("v1", "Deployment"),
CreateResource("test.yaml", {"label_name": "foo"}, inherit=True),
ToDict(inherit=True),
GetSpec("apiextensions.k8s.io/v1", "CustomResourceDefinition"),
CreateResource("variable_crd.yaml", {}, inherit=True, internal=True),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
CreateResource(
"variables.yaml",
{"name": "deployment-foobar-default", "value": {"label_name": "foo"}},
inherit=True,
internal=True,
),
ToDict(inherit=True),
]
query = lambda: models.Deployment.objects.using(self.client).create(
"test.yaml", {"label_name": "foo"}
)
self.assertQuery(expected_steps, query)
def test_create_deployment_namespace(self):
mock_response = {
"metadata": {
"name": "foobar",
"namespace": "test",
"annotations": {"pharos.py/template": "test.yaml"},
}
}
self.dynamic_client.resources.get.return_value.create.return_value.to_dict.return_value = (
mock_response
)
expected_steps = [
GetSpec("v1", "Deployment"),
CreateResource(
"test.yaml", {"label_name": "foo"}, inherit=True, namespace="test"
),
ToDict(inherit=True),
GetSpec("apiextensions.k8s.io/v1", "CustomResourceDefinition"),
CreateResource("variable_crd.yaml", {}, inherit=True, internal=True),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
CreateResource(
"variables.yaml",
{"name": "deployment-foobar-test", "value": {"label_name": "foo"}},
inherit=True,
internal=True,
namespace="test",
),
ToDict(inherit=True),
]
query = lambda: models.Deployment.objects.using(self.client).create(
"test.yaml", {"label_name": "foo"}, namespace="test"
)
self.assertQuery(expected_steps, query)
def test_create_deployment_dry(self):
mock_response = {
"metadata": {
"name": "foobar",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
}
}
self.dynamic_client.resources.get.return_value.create.return_value.to_dict.return_value = (
mock_response
)
expected_steps = [
GetSpec("v1", "Deployment"),
CreateResource(
"test.yaml", {"label_name": "foo"}, inherit=True, dry_run=True
),
ToDict(inherit=True),
]
query = lambda: models.Deployment.objects.using(self.client).create(
"test.yaml", {"label_name": "foo"}, dry_run=True
)
self.assertQuery(expected_steps, query)
class ResourceUpdateTestCase(BaseCase):
def assertQuery(self, steps, query):
expected_calls = []
for step in steps:
step.client = self.client
if step.inherit:
step.parent = expected_calls[-1]
expected_calls.append(step.call)
query()
self.assertSequenceEqual(self.dynamic_client.mock_calls, expected_calls)
def test_sync_deployment(self):
mock_response = {
"metadata": {
"name": "foobar",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
}
}
self.dynamic_client.resources.get.return_value.create.return_value.to_dict.return_value = (
mock_response
)
self.dynamic_client.resources.get.return_value.replace.return_value.to_dict.return_value = (
mock_response
)
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {
"name": "nginx-deployment",
"annotations": {
"deployment.kubernetes.io/revision": "1",
},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
}
},
)
query = lambda: deployment.sync("test.yaml", {"label_name": "foo"})
expected_steps = [
GetSpec("v1", "Deployment"),
GetResource('nginx-deployment', 'default', inherit=True),
ToDict(inherit=True),
GetSpec("v1", "Deployment"),
UpdateResource(
"test.yaml", {"label_name": "foo"}, inherit=True
),
ToDict(inherit=True),
GetSpec("apiextensions.k8s.io/v1", "CustomResourceDefinition"),
CreateResource("variable_crd.yaml", {}, inherit=True, internal=True),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
DeleteResource('deployment-foobar-default', None),
GetSpec("pharos.py/v1", "Variable"),
CreateResource(
"variables.yaml",
{"name": "deployment-foobar-default", "value": {"label_name": "foo"}},
inherit=True,
internal=True,
),
ToDict(inherit=True),
]
self.assertQuery(expected_steps, query)
def test_update_deployment(self):
mock_response = {
"metadata": {
"name": "nginx-deployment",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
},
"json": {"label_name": "foo"},
}
self.dynamic_client.resources.get.return_value.get.return_value.to_dict.return_value = (
mock_response
)
self.dynamic_client.resources.get.return_value.replace.return_value.to_dict.return_value = (
mock_response
)
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {
"name": "nginx-deployment",
"annotations": {
"deployment.kubernetes.io/revision": "1",
"pharos.py/template": "test.yaml",
"pharos.py/variable": "deployment-nginx-deployment-default",
},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
}
},
)
query = lambda: deployment.deploy()
expected_steps = [
GetSpec("v1", "Deployment"),
GetResource('nginx-deployment', 'default', inherit=True),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
GetResource('deployment-nginx-deployment-default', 'default', inherit=True, limit=True),
ToDict(inherit=True),
GetSpec("v1", "Deployment"),
UpdateResource(
"test.yaml", {"label_name": "foo"}, inherit=True
),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
UpdateResource(
"variables.yaml",
{"name": "deployment-nginx-deployment-default", "value": {"label_name": "foo"}},
namespace='default',
inherit=True,
internal=True,
),
ToDict(inherit=True)
]
self.assertQuery(expected_steps, query)
def test_update_deployment_dry(self):
mock_response = {
"metadata": {
"name": "nginx-deployment",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
},
"json": {"label_name": "foo"},
}
self.dynamic_client.resources.get.return_value.get.return_value.to_dict.return_value = (
mock_response
)
self.dynamic_client.resources.get.return_value.replace.return_value.to_dict.return_value = (
mock_response
)
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {
"name": "nginx-deployment",
"annotations": {
"deployment.kubernetes.io/revision": "1",
"pharos.py/template": "test.yaml",
"pharos.py/variable": "deployment-nginx-deployment-default",
},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
}
},
)
query = lambda: deployment.deploy(dry_run=True)
expected_steps = [
GetSpec("v1", "Deployment"),
GetResource('nginx-deployment', 'default', inherit=True),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
GetResource('deployment-nginx-deployment-default', 'default', inherit=True, limit=True),
ToDict(inherit=True),
GetSpec("v1", "Deployment"),
UpdateResource(
"test.yaml", {"label_name": "foo"}, inherit=True, dry_run=True
),
ToDict(inherit=True),
]
self.assertQuery(expected_steps, query)
def test_update_deployment_variable(self):
mock_response = {
"metadata": {
"name": "nginx-deployment",
"namespace": "default",
"annotations": {"pharos.py/template": "test.yaml"},
},
"json": {"label_name": "foo"},
}
self.dynamic_client.resources.get.return_value.get.return_value.to_dict.return_value = (
mock_response
)
self.dynamic_client.resources.get.return_value.replace.return_value.to_dict.return_value = (
mock_response
)
deployment = models.Deployment(
client=self.client,
k8s_object={
"metadata": {
"name": "nginx-deployment",
"annotations": {
"deployment.kubernetes.io/revision": "1",
"pharos.py/template": "test.yaml",
"pharos.py/variable": "deployment-nginx-deployment-default",
},
"spec": {"selector": {"matchLabels": {"app": "test"}}},
}
},
)
deployment.set_variable({"label_name": "bar"})
query = lambda: deployment.deploy()
expected_steps = [
GetSpec("v1", "Deployment"),
GetResource('nginx-deployment', 'default', inherit=True),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
GetResource('deployment-nginx-deployment-default', 'default', inherit=True, limit=True),
ToDict(inherit=True),
GetSpec("v1", "Deployment"),
UpdateResource(
"test.yaml", {"label_name": "bar"}, inherit=True
),
ToDict(inherit=True),
GetSpec("pharos.py/v1", "Variable"),
UpdateResource(
"variables.yaml",
{"name": "deployment-nginx-deployment-default", "value": {"label_name": "bar"}},
namespace='default',
inherit=True,
internal=True,
),
ToDict(inherit=True)
]
self.assertQuery(expected_steps, query)
| 35.877264
| 103
| 0.51517
| 3,235
| 35,662
| 5.527975
| 0.070788
| 0.044903
| 0.037578
| 0.049432
| 0.812783
| 0.776939
| 0.745736
| 0.731701
| 0.710563
| 0.686126
| 0
| 0.010455
| 0.34827
| 35,662
| 993
| 104
| 35.913394
| 0.758971
| 0.003898
| 0
| 0.57461
| 0
| 0
| 0.144402
| 0.02379
| 0
| 0
| 0
| 0
| 0.053452
| 1
| 0.044543
| false
| 0
| 0.007795
| 0.004454
| 0.091314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
959f6e1cc40b940116e8a1e57c955d1368605067
| 58
|
py
|
Python
|
__init__.py
|
hoefkensj/reroot
|
890e6086272577d599ced9fe52ddac1bfce60d85
|
[
"Unlicense"
] | null | null | null |
__init__.py
|
hoefkensj/reroot
|
890e6086272577d599ced9fe52ddac1bfce60d85
|
[
"Unlicense"
] | null | null | null |
__init__.py
|
hoefkensj/reroot
|
890e6086272577d599ced9fe52ddac1bfce60d85
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python
from . import main
from . import cfg
| 19.333333
| 21
| 0.724138
| 10
| 58
| 4.2
| 0.8
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155172
| 58
| 3
| 22
| 19.333333
| 0.857143
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
252201a349373fe37cf0e804e0412ea1052bd627
| 236
|
py
|
Python
|
pages/urls.py
|
Louis86/ecommerce
|
d47f13bfe4772b33a9cb6b8ec08183525fc4655d
|
[
"MIT"
] | null | null | null |
pages/urls.py
|
Louis86/ecommerce
|
d47f13bfe4772b33a9cb6b8ec08183525fc4655d
|
[
"MIT"
] | null | null | null |
pages/urls.py
|
Louis86/ecommerce
|
d47f13bfe4772b33a9cb6b8ec08183525fc4655d
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
#add parmelink
urlpatterns = [
path('', views.index, name='index'),
#path('', views.index, {'pagename':''}, name='home'),
#path('<str:pagename>', views.index, name='index')
]
| 26.222222
| 57
| 0.631356
| 29
| 236
| 5.137931
| 0.482759
| 0.201342
| 0.187919
| 0.255034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 236
| 8
| 58
| 29.5
| 0.745
| 0.483051
| 0
| 0
| 0
| 0
| 0.042017
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
25457beb1e16b4da6d66c068294d2ffd7aad3767
| 16,530
|
py
|
Python
|
AuxillaryFunctions/GWASPlots.py
|
daverblair/CrypticPhenotypeAnalysisScripts
|
0d722f079549ac68b7863ac885f7295d236806b0
|
[
"MIT"
] | null | null | null |
AuxillaryFunctions/GWASPlots.py
|
daverblair/CrypticPhenotypeAnalysisScripts
|
0d722f079549ac68b7863ac885f7295d236806b0
|
[
"MIT"
] | null | null | null |
AuxillaryFunctions/GWASPlots.py
|
daverblair/CrypticPhenotypeAnalysisScripts
|
0d722f079549ac68b7863ac885f7295d236806b0
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
from scipy.stats.mstats import mquantiles
from scipy.stats import spearmanr,chi2,beta
import matplotlib.pyplot as plt
from matplotlib import cm
import seaborn as sns
sns.set(context='talk',color_codes=True,style='ticks',font='Arial',font_scale=2,rc={'axes.linewidth':5,"font.weight":"bold",'axes.labelweight':"bold",'xtick.major.width':4,'xtick.minor.width': 2})
cmap = cm.get_cmap('viridis', 12)
color_list=[cmap(x) for x in [0.0,0.1,0.25,0.5,0.75,0.9,1.0]]
grey_color=(0.25, 0.25, 0.25)
red_color = '#d10e00'
blue_color='#5DA4FF'
def LambdaGC(p_val_vec,quantile_list=[0.5],scaling_factor=None):
quantile_list=np.array(quantile_list)
obs_pval_quantiles=mquantiles(p_val_vec,prob=quantile_list,alphap=1.0,betap=1.0)
if scaling_factor is not None:
lambda_unscaled = chi2.ppf(1.0-obs_pval_quantiles, 1) / chi2.ppf(1.0-quantile_list,1)
return 1+(lambda_unscaled-1.0)*(scaling_factor/p_val_vec.shape[0])
else:
return chi2.ppf(1.0-obs_pval_quantiles, 1) / chi2.ppf(1.0-quantile_list,1)
def QQPlot(data_table,p_value_column=None,maf_column=None,freq_bins=None,n_quantiles=1000,error_ci=0.95,min_p=1e-30,hide_hla=False,error_type='experimental',lambda_gc_scale=None):
f, axis = plt.subplots(1, 1,figsize=(8,8))
axis.spines['right'].set_visible(False)
axis.spines['top'].set_visible(False)
if p_value_column==None:
p_value_column='P'
if maf_column==None:
if 'MAF' in data_table.columns:
maf_column='MAF'
else:
data_table['MAF']=np.zeros(len(data_table))*np.nan
if hide_hla:
chr6 = data_table.loc[(data_table.CHROM==6)]
excluded=chr6.index[np.logical_and(chr6.POS>=28477797,chr6.POS<=33448354)]
p_maf_table=data_table.drop(excluded)[[maf_column,p_value_column]]
elif maf_column is not None:
p_maf_table=data_table[[maf_column,p_value_column]]
else:
p_maf_table=data_table[[p_value_column]]
assert error_type in ['experimental','theoretical'],"Error type must be in ['experimental','theoretical']"
min_vals_obs=[]
min_vals_exp=[]
if freq_bins is None:
p_input= p_maf_table[p_value_column].values
p_input[p_input<min_p]=min_p
quantile_thresholds = np.concatenate([np.arange(1,np.floor(0.5*n_quantiles))/p_input.shape[0], np.logspace(np.log10(np.floor(0.5*n_quantiles)/p_input.shape[0]), 0, int(np.ceil(0.5*n_quantiles))+1)[:-1]])
obs_quantiles = mquantiles(p_input, prob=quantile_thresholds, alphap=0.0, betap=1.0, limit=(0.0, 1.0))
axis.plot(-np.log10(quantile_thresholds),-np.log10(obs_quantiles),'.',color=color_list[0],ms=15)
if lambda_gc_scale is not None:
axis.text(1,5,r'$\lambda_{IF}$'+'={0:1.2f}'.format(LambdaGC(p_input)[0])+' ('+r'$\lambda^{'+'{0:d}'.format(lambda_gc_scale)+'}_{IF}$'+'={0:1.3f}'.format(LambdaGC(p_input,scaling_factor=lambda_gc_scale)[0])+')',fontsize=24,fontweight='bold',color=color_list[0])
else:
axis.text(1,5,r'$\lambda_{IF}$'+'={0:1.2f}'.format(LambdaGC(p_input)[0]),fontsize=24,fontweight='bold',color=color_list[0])
min_vals_obs+=[obs_quantiles.min()]
min_vals_exp+=[quantile_thresholds.min()]
if error_type=='experimental':
ci_vecs = beta.interval(error_ci, len(p_maf_table)*quantile_thresholds, len(p_maf_table) - len(p_maf_table)*quantile_thresholds)
axis.fill_between( -np.log10(quantile_thresholds), -np.log10(obs_quantiles/quantile_thresholds*ci_vecs[0]), -np.log10(obs_quantiles/quantile_thresholds*ci_vecs[1]), color=color_list[0], alpha=0.25, label='{0:2d}% CI'.format(int(100*error_ci)))
else:
for i in range(len(freq_bins)-2):
p_input= p_maf_table[np.logical_and(p_maf_table[maf_column]>=freq_bins[i],p_maf_table[maf_column]<freq_bins[i+1])][p_value_column].values
p_input[p_input<min_p]=min_p
quantile_thresholds = np.concatenate([np.arange(1,np.floor(0.5*n_quantiles))/p_input.shape[0], np.logspace(np.log10(np.floor(0.5*n_quantiles)/p_input.shape[0]), 0, int(np.ceil(0.5*n_quantiles))+1)[:-1]])
obs_quantiles = mquantiles(p_input, prob=quantile_thresholds, alphap=0.0, betap=1.0, limit=(0.0, 1.0))
axis.plot(-np.log10(quantile_thresholds),-np.log10(obs_quantiles),'.',ms=15,color=color_list[(i*2)%len(color_list)],label=r'{0:.1e}$\leq$ MAF$<${1:.1e}'.format(freq_bins[i],freq_bins[i+1]))
if error_type=='experimental':
ci_vecs = beta.interval(error_ci, len(p_maf_table)*quantile_thresholds, len(p_maf_table) - len(p_maf_table)*quantile_thresholds)
axis.fill_between( -np.log10(quantile_thresholds), -np.log10(obs_quantiles/quantile_thresholds*ci_vecs[0]), -np.log10(obs_quantiles/quantile_thresholds*ci_vecs[1]), color=color_list[(i*2)%len(color_list)], alpha=0.25, label='{0:2d}% CI'.format(int(100*error_ci)))
if lambda_gc_scale is not None:
axis.text(1,5-i,r'$\lambda_{IF}$'+'={0:1.2f}'.format(LambdaGC(p_input)[0])+' ('+r'$\lambda^{'+'{0:d}'.format(lambda_gc_scale)+'}_{IF}$'+'={0:1.3f}'.format(LambdaGC(p_input,scaling_factor=lambda_gc_scale)[0])+')',fontsize=24,fontweight='bold',color=color_list[i*2])
else:
axis.text(1,5-i,r'$\lambda_{IF}$'+'={0:1.2f}'.format(LambdaGC(p_input)[0]),fontsize=24,fontweight='bold',color=color_list[i*2])
min_vals_obs+=[obs_quantiles.min()]
min_vals_exp+=[quantile_thresholds.min()]
i+=1
p_input= p_maf_table[np.logical_and(p_maf_table[maf_column]>=freq_bins[i],p_maf_table[maf_column]<=freq_bins[i+1])][p_value_column].values
p_input[p_input<min_p]=min_p
quantile_thresholds = np.concatenate([np.arange(1,np.floor(0.5*n_quantiles))/p_input.shape[0], np.logspace(np.log10(np.floor(0.5*n_quantiles)/p_input.shape[0]), 0, int(np.ceil(0.5*n_quantiles))+1)[:-1]])
obs_quantiles = mquantiles(p_input, prob=quantile_thresholds, alphap=0.0, betap=1.0, limit=(0.0, 1.0))
axis.plot(-np.log10(quantile_thresholds),-np.log10(obs_quantiles),'o',color=color_list[(i*2)%len(color_list)],mew=0.0,label=r'{0:.1e}$\leq$ MAF$\leq${1:.1e}'.format(freq_bins[i],0.5))
if error_type=='experimental':
ci_vecs = beta.interval(error_ci, len(p_maf_table)*quantile_thresholds, len(p_maf_table) - len(p_maf_table)*quantile_thresholds)
axis.fill_between( -np.log10(quantile_thresholds), -np.log10(obs_quantiles/quantile_thresholds*ci_vecs[0]), -np.log10(obs_quantiles/quantile_thresholds*ci_vecs[1]), color=color_list[(i*2)%len(color_list)], alpha=0.25, label='{0:2d}% CI'.format(int(100*error_ci)))
if lambda_gc_scale is not None:
axis.text(1,5-i,r'$\lambda_{IF}$'+'={0:1.2f}'.format(LambdaGC(p_input)[0])+' ('+r'$\lambda^{'+'{0:d}'.format(lambda_gc_scale)+'}_{IF}$'+'={0:1.3f}'.format(LambdaGC(p_input,scaling_factor=lambda_gc_scale)[0])+')',fontsize=24,fontweight='bold',color=color_list[i*2])
else:
axis.text(1,5-i,r'$\lambda_{IF}$'+'={0:1.2f}'.format(LambdaGC(p_input)[0]),fontsize=24,fontweight='bold',color=color_list[i*2])
min_vals_obs+=[obs_quantiles.min()]
min_vals_exp+=[quantile_thresholds.min()]
axis.set_xlim(0.0,np.ceil(-np.log10(min(min_vals_exp))))
exp_p_vals = np.linspace(0,axis.get_xlim()[1],100)
if error_type=='theoretical':
ci_vecs = beta.interval(error_ci, len(p_maf_table)*(10**(-1.0*exp_p_vals)), len(p_maf_table) - len(p_maf_table)*(10**(-1.0*exp_p_vals)))
axis.fill_between(exp_p_vals, -np.log10(ci_vecs[0]), -np.log10(ci_vecs[1]), color=grey_color, alpha=0.25, label='{0:2d}% CI'.format(int(100*error_ci)))
axis.plot(exp_p_vals,exp_p_vals,'--',color=red_color,lw=3.0)
axis.set_ylim(0.0,np.ceil(-np.log10(min(min(min_vals_obs),ci_vecs[0].min(),min(min_vals_obs))))+1)
axis.legend(loc='upper left',frameon=False,fontsize=14)
axis.set_xlabel(r'$\log_{10}$(P-Value)'+'\nExpected',fontsize=24)
axis.set_ylabel(r'$\log_{10}$(P-Value)'+'\nObserved',fontsize=24)
return f,axis
def ManhattanPlot(data_table,p_value_column='P',chrom_column='CHROM',pos_column='POS',allele_freq_window=None,maf_column=None,marked_column=None,all_sig_thresh=[5e-8],chrom_colors = [color_list[0],color_list[3]],alpha_min=1.0,min_p=1e-30,hide_hla=False,thin_data=True,thin_data_thresh=1e-4):
f, axis = plt.subplots(1, 1,figsize=(24,8))
axis.spines['right'].set_visible(False)
axis.spines['top'].set_visible(False)
axis.spines['bottom'].set_visible(False)
included_columns=[chrom_column,pos_column,p_value_column]
if marked_column is not None:
included_columns+=[marked_column]
if allele_freq_window is not None:
if maf_column is None:
fig_table=data_table[np.logical_and(data_table.MAF>=allele_freq_window[0],data_table.MAF<allele_freq_window[1])][included_columns]
else:
fig_table=data_table[np.logical_and(data_table[maf_column]>=allele_freq_window[0],data_table[maf_column]<allele_freq_window[1])][included_columns]
else:
fig_table=data_table[included_columns]
if hide_hla:
chr6 = fig_table.loc[(fig_table[chrom_column]==6)]
excluded=chr6.index[np.logical_and(chr6[pos_column]>=28477797,chr6[pos_column]<=33448354)]
fig_table=fig_table.drop(excluded)[included_columns]
axis.set_xlim(0.0,1.05)
axis.set_ylim(0.0,np.ceil(min(-np.log10(fig_table[p_value_column].min()),-np.log10(min_p))))
all_chrom = np.unique(fig_table[chrom_column])
all_chrom.sort()
offsets = np.zeros(all_chrom.shape[0])
total_bps=0.0
for i,c in enumerate(all_chrom):
offsets[i]=total_bps
total_bps+=np.max(fig_table.loc[fig_table[chrom_column]==c][pos_column].values)
offsets/=total_bps
offsets=np.append(offsets,1.0)
for i,c in enumerate(all_chrom):
current_chrom = fig_table.loc[fig_table[chrom_column]==c]
plot_table=pd.DataFrame(index=current_chrom.index)
plot_table['logP']=-np.log10(current_chrom[p_value_column])
plot_table.loc[plot_table.logP>(-np.log10(min_p))]=-np.log10(min_p)
plot_table['pos']=(current_chrom[pos_column]/total_bps)+offsets[i]
if marked_column is not None:
plot_table['mark']=current_chrom[marked_column]
if thin_data:
#thins p-values less than 1e-5 by rounding and taking only unique values
plot_table['logP_R']=plot_table['logP'].values
plot_table.loc[plot_table.logP<(-np.log10(thin_data_thresh)),'logP_R']=np.round(plot_table.loc[plot_table.logP<(-np.log10(thin_data_thresh))]['logP_R'].values,1)
plot_table['pos_R']=np.round(plot_table['pos']*5,2)
plot_table=plot_table.drop_duplicates(['logP_R','pos_R'])
rgba_colors = np.zeros((plot_table.shape[0],4))
rgba_colors[:,0:4] = np.array(chrom_colors[i%2])
alpha_levels = (1.0-alpha_min)*(plot_table.logP)/(-np.log10(min_p))+alpha_min
alpha_levels[alpha_levels>1.0]=1.0
rgba_colors[:,3]=alpha_levels
axis.scatter(plot_table.pos,plot_table.logP,s=75.0,marker='o',color=rgba_colors,lw=0.0)
if marked_column is not None:
axis.scatter(plot_table.loc[plot_table['mark']==True].pos,plot_table.loc[plot_table['mark']==True].logP,s=75.0,marker='*',color=np.array(red_color),lw=0.0)
for sig_thresh in all_sig_thresh:
axis.hlines(-np.log10(sig_thresh),0.0,1.0,linestyle='--',color=red_color,alpha=0.75,lw=2.0)
axis.text(0.1,-np.log10(sig_thresh)+0.05*axis.get_ylim()[1],r'Sig. Level {0:.1e}'.format(sig_thresh),fontsize=12)
axis.set_ylabel(r"$P$-Value"+'\n'+r'($-\log_{10}$-Scale)',fontsize=24)
axis.set_xlabel('Chromsome',fontsize=24)
chrom_locators = offsets[:-1]+(offsets[1:]-offsets[:-1])/2.0
axis.xaxis.set_major_locator(plt.FixedLocator(chrom_locators[0::2]))
axis.xaxis.set_major_formatter(plt.FixedFormatter(np.array(all_chrom,dtype=np.str)[0::2]))
axis.xaxis.set_minor_locator(plt.FixedLocator(chrom_locators[1::2]))
return f,axis
def ManhattanPlot(data_table,p_value_column='P',chrom_column='CHROM',pos_column='POS',allele_freq_window=None,maf_column=None,marked_column=None,snp_to_gene=None,all_sig_thresh=[5e-8],chrom_colors = [color_list[0],color_list[3]],alpha_min=1.0,min_p=1e-30,hide_hla=False,thin_data=True,thin_data_thresh=1e-6):
f, axis = plt.subplots(1, 1,figsize=(24,8))
axis.spines['right'].set_visible(False)
axis.spines['top'].set_visible(False)
axis.spines['bottom'].set_visible(False)
included_columns=[chrom_column,pos_column,p_value_column]
if marked_column is not None:
included_columns+=[marked_column]
if allele_freq_window is not None:
if maf_column is None:
fig_table=data_table[np.logical_and(data_table.MAF>=allele_freq_window[0],data_table.MAF<allele_freq_window[1])][included_columns]
else:
fig_table=data_table[np.logical_and(data_table[maf_column]>=allele_freq_window[0],data_table[maf_column]<allele_freq_window[1])][included_columns]
else:
fig_table=data_table[included_columns]
if hide_hla:
chr6 = fig_table.loc[(fig_table[chrom_column]==6)]
excluded=chr6.index[np.logical_and(chr6[pos_column]>=28477797,chr6[pos_column]<=33448354)]
fig_table=fig_table.drop(excluded)[included_columns]
axis.set_xlim(0.0,1.05)
axis.set_ylim(0.0,np.ceil(min(-np.log10(fig_table[p_value_column].min()),-np.log10(min_p))))
all_chrom = np.unique(fig_table[chrom_column])
all_chrom.sort()
offsets = np.zeros(all_chrom.shape[0])
total_bps=0.0
for i,c in enumerate(all_chrom):
offsets[i]=total_bps
total_bps+=np.max(fig_table.loc[fig_table[chrom_column]==c][pos_column].values)
offsets/=total_bps
offsets=np.append(offsets,1.0)
for i,c in enumerate(all_chrom):
current_chrom = fig_table.loc[fig_table[chrom_column]==c]
plot_table=pd.DataFrame(index=current_chrom.index)
plot_table['logP']=-np.log10(current_chrom[p_value_column])
plot_table.loc[plot_table.logP>(-np.log10(min_p))]=-np.log10(min_p)
plot_table['pos']=(current_chrom[pos_column]/total_bps)+offsets[i]
if marked_column is not None:
plot_table['mark']=current_chrom[marked_column]
if thin_data:
#thins p-values less than thin_data_thresh by rounding and taking only unique values
plot_table['logP_R']=plot_table['logP'].values
plot_table.loc[plot_table.logP<(-np.log10(thin_data_thresh)),'logP_R']=np.round(plot_table.loc[plot_table.logP<(-np.log10(thin_data_thresh))]['logP_R'].values,1)
plot_table['pos_R']=np.round(plot_table['pos']*5,2)
plot_table=plot_table.drop_duplicates(['logP_R','pos_R'])
rgba_colors = np.zeros((plot_table.shape[0],4))
rgba_colors[:,0:4] = np.array(chrom_colors[i%2])
alpha_levels = (1.0-alpha_min)*(plot_table.logP)/(-np.log10(min_p))+alpha_min
alpha_levels[alpha_levels>1.0]=1.0
rgba_colors[:,3]=alpha_levels
axis.scatter(plot_table.pos,plot_table.logP,s=75.0,marker='o',color=rgba_colors,lw=0.0)
if marked_column is not None:
axis.scatter(plot_table.loc[plot_table['mark']==True].pos,plot_table.loc[plot_table['mark']==True].logP,s=75.0,marker='*',color=red_color,lw=0.0)
for sig_thresh in all_sig_thresh:
axis.hlines(-np.log10(sig_thresh),0.0,1.0,linestyle='--',color=red_color,alpha=0.75,lw=2.0)
axis.text(0.1,-np.log10(sig_thresh)+0.05*axis.get_ylim()[1],r'Sig. Level {0:.1e}'.format(sig_thresh),fontsize=18)
if snp_to_gene is not None:
for snp,gene_list in snp_to_gene.items():
x_loc=((fig_table.loc[snp][pos_column]/total_bps)+offsets[int(fig_table.loc[snp][chrom_column])-1])+0.001*axis.get_ylim()[1]
y_loc=-np.log10(fig_table.loc[snp][p_value_column])
axis.text(x_loc,y_loc,'\n'.join(gene_list),fontsize=18,fontstyle='italic')
axis.set_ylabel(r"$P$-Value"+'\n'+r'($-\log_{10}$-Scale)',fontsize=24)
axis.set_xlabel('Chromsome',fontsize=24)
chrom_locators = offsets[:-1]+(offsets[1:]-offsets[:-1])/2.0
axis.xaxis.set_major_locator(plt.FixedLocator(chrom_locators[0::2]))
axis.xaxis.set_major_formatter(plt.FixedFormatter(np.array(all_chrom,dtype=np.str)[0::2]))
axis.xaxis.set_minor_locator(plt.FixedLocator(chrom_locators[1::2]))
return f,axis
| 54.554455
| 308
| 0.691773
| 2,768
| 16,530
| 3.873194
| 0.092486
| 0.040295
| 0.018468
| 0.013432
| 0.842925
| 0.819886
| 0.81373
| 0.808507
| 0.798246
| 0.794609
| 0
| 0.045375
| 0.133394
| 16,530
| 302
| 309
| 54.735099
| 0.703037
| 0.009316
| 0
| 0.69163
| 0
| 0
| 0.061317
| 0.001832
| 0
| 0
| 0
| 0
| 0.004405
| 1
| 0.017621
| false
| 0
| 0.030837
| 0
| 0.070485
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c2812c4acc0e3d78a5fbc26ae842cc83cc6fc435
| 40
|
py
|
Python
|
double_debias/__init__.py
|
joe5saia/double_debias
|
d01ad21c1c5ca9b3790cfb62f68657a7604768f4
|
[
"MIT"
] | 3
|
2021-06-08T06:46:58.000Z
|
2022-02-26T13:34:47.000Z
|
double_debias/__init__.py
|
joe5saia/double_debias
|
d01ad21c1c5ca9b3790cfb62f68657a7604768f4
|
[
"MIT"
] | 1
|
2020-12-01T13:09:39.000Z
|
2020-12-01T13:09:39.000Z
|
double_debias/__init__.py
|
joe5saia/double_debias
|
d01ad21c1c5ca9b3790cfb62f68657a7604768f4
|
[
"MIT"
] | 1
|
2022-02-26T13:34:48.000Z
|
2022-02-26T13:34:48.000Z
|
from .double_debias import DoubleDebias
| 20
| 39
| 0.875
| 5
| 40
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c28deef028683bbb5f7d3760f80e67e0907a5c97
| 23,477
|
py
|
Python
|
ipychart/plots.py
|
nicohlr/ipychart
|
fab49fb798363e775c0ad2456f9d22ac0048d6fb
|
[
"MIT"
] | 45
|
2020-08-05T19:32:13.000Z
|
2022-03-25T09:17:41.000Z
|
ipychart/plots.py
|
nicohlr/ipychart
|
fab49fb798363e775c0ad2456f9d22ac0048d6fb
|
[
"MIT"
] | 3
|
2021-05-07T12:45:37.000Z
|
2022-01-21T20:58:37.000Z
|
ipychart/plots.py
|
nicohlr/ipychart
|
fab49fb798363e775c0ad2456f9d22ac0048d6fb
|
[
"MIT"
] | 7
|
2020-08-29T02:56:03.000Z
|
2021-10-04T21:20:21.000Z
|
import pandas as pd
import numpy as np
from typing import Union
from pandas.api.types import is_numeric_dtype
from sklearn.neighbors import KernelDensity
from sklearn.model_selection import GridSearchCV
from .chart import Chart
from .utils.plots_utils import (_create_chart_options,
_create_chart_data_agg,
_create_chart_data_count)
def countplot(data: pd.DataFrame,
x: str,
hue: str = None,
horizontal: bool = False,
dataset_options: dict = None,
options: dict = None,
colorscheme: str = None,
zoom: bool = True) -> Chart:
"""
Show the counts of observations in each categorical bin using bars.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
hue (str, optional): Grouping variable that will produce points
with different colors. Defaults to None.
horizontal (bool, optional): Draw the bar chart horizontally.
Defaults to False.
dataset_options (dict, optional): Options related to the dataset
object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the
chart. Defaults to None.
zoom (bool, optional): Allow the user to zoom on the Chart once it
is created. Defaults to True.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
data = _create_chart_data_count(
data=data,
x=x,
hue=hue,
dataset_options=dataset_options
)
if horizontal:
options = _create_chart_options(
kind='count',
options=options,
x='Count',
y=x,
hue=hue
)
else:
options = _create_chart_options(
kind='count',
options=options,
x=x,
y='Count',
hue=hue
)
kind = 'horizontalBar' if horizontal else 'bar'
return Chart(
data=data,
kind=kind,
options=options,
colorscheme=colorscheme,
zoom=zoom
)
def distplot(data: pd.DataFrame,
x: str,
bandwidth: Union[float, str] = 'auto',
gridsize: int = 1000,
dataset_options: dict = None,
options: dict = None,
colorscheme: str = None,
zoom: bool = True, **kwargs) -> Chart:
"""
Fit and plot a univariate kernel density estimate on a line chart.
This is useful to have a representation of the distribution of the
data.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
bandwidth ([float, str], optional): Parameter which affect how
“smooth” the resulting curve is. If set to 'auto', the optimal
bandwidth is found using gridsearch. Defaults to 'auto'.
gridsize (int, optional): Number of discrete points in the
evaluation grid. Defaults to 1000.
dataset_options (dict, optional): Options related to the dataset
object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
zoom (bool, optional): Allow the user to zoom on the Chart once it
is created. Defaults to True.
kwargs (optionnal): Other keyword arguments are passed down to
scikit-learn's KernelDensity class.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
assert is_numeric_dtype(data[x]), 'x must be a numeric column'
if isinstance(bandwidth, str):
assert bandwidth == 'auto', "bandwidth must be an int or 'auto'"
if dataset_options is None:
dataset_options = {}
# Remove outliers to find max and min values for the x axis
iqr = data[x].quantile(0.95) - data[x].quantile(0.05)
data_truncated = data[x][
~((data[x] < (data[x].quantile(0.05) - 0.5 * iqr)) |
(data[x] > (data[x].quantile(0.95) + 0.5 * iqr)))
]
max_val, min_val = (int(data_truncated.max()) + 1,
int(data_truncated.min()))
max_val, min_val = (max_val + 0.05 * (max_val + abs(min_val)),
min_val - 0.05 * (max_val + abs(min_val)))
# Create grid which will be used to compute kde
_, step = np.linspace(min_val, max_val, num=gridsize, retstep=True)
x_grid = np.round(np.arange(min_val, max_val, step), 5)
# If bandwidth is 'auto', find the best bandwidh using gridsearch
if bandwidth == 'auto':
grid = GridSearchCV(KernelDensity(),
{'bandwidth': np.linspace(0.1, 2, 30)}, cv=5)
grid.fit(data[x].dropna().to_numpy()[:, None])
bandwidth = grid.best_params_['bandwidth']
kde_skl = KernelDensity(bandwidth=bandwidth, **kwargs)
kde_skl.fit(data[x].dropna().to_numpy()[:, np.newaxis])
pdf = np.exp(kde_skl.score_samples(x_grid[:, np.newaxis]))
data = {
'labels': x_grid.tolist(),
'datasets': [{'data': pdf.tolist(), 'pointRadius': 0,
**dataset_options}]
}
options = _create_chart_options(
kind='count',
options=options,
x=x,
y=f'Density (bandwidth: {bandwidth.round(4)})',
hue=None
)
# Add ticks formatting to options if not already set
# This will not break because keys are created in the
# _create_chart_options method called previouly
maxtickslimit = 10
ticks_format_function = (
"function(value, index, values) {if (Math.abs(value) >= 1) {"
"return Math.round(value);} else {return value.toFixed(3);}}"
)
if 'ticks' not in options['scales']['xAxes'][0]:
options['scales']['xAxes'][0].update(
{'ticks': {'maxTicksLimit': maxtickslimit,
'callback': ticks_format_function}}
)
else:
ticks_options = options['scales']['xAxes'][0]['ticks']
if 'maxTicksLimit' not in ticks_options:
ticks_options['maxTicksLimit'] = maxtickslimit
if 'callback' not in ticks_options:
ticks_options['callback'] = ticks_format_function
return Chart(
data=data,
kind='line',
options=options,
colorscheme=colorscheme,
zoom=zoom
)
def lineplot(data: pd.DataFrame,
x: str,
y: str,
hue: str = None,
agg: str = 'mean',
dataset_options: Union[dict, list] = None,
options: dict = None,
colorscheme: str = None,
zoom: bool = True) -> Chart:
"""
A line chart is a way of plotting data points on a line. Often, it is
used to show a trend in the data, or the comparison of two data sets.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
hue (str, optional): Grouping variable that will produce points with
different colors. Defaults to None.
agg (str, optional): The aggregator used to gather data (ex: 'median'
or 'mean'). Defaults to None.
dataset_options ([dict, list], optional): Options related to the
dataset object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
zoom (bool, optional): Allow the user to zoom on the Chart once it is
created. Defaults to True.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
data = _create_chart_data_agg(
data=data,
kind='line',
x=x,
y=y,
hue=hue,
agg=agg,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='line',
options=options,
x=x,
y=y,
hue=hue,
agg=agg
)
return Chart(
data=data,
kind='line',
options=options,
colorscheme=colorscheme,
zoom=zoom
)
def barplot(data: pd.DataFrame,
x: str,
y: str,
hue: str = None,
horizontal: bool = False,
agg: str = 'mean',
dataset_options: Union[dict, list] = None,
options: dict = None,
colorscheme: str = None,
zoom: bool = True) -> Chart:
"""
A bar chart provides a way of showing data values represented as
vertical bars. It is sometimes used to show a trend in the data,
and the comparison of multiple data sets side by side.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
hue (str, optional): Grouping variable that will produce points with
different colors. Defaults to None.
horizontal (bool): Draw the bar chart horizontally. Defaults to False.
agg (str, optional): The aggregator used to gather data (ex: 'median'
or 'mean'). Defaults to None.
dataset_options ([dict, list], optional): Options related to the
dataset object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
zoom (bool, optional): Allow the user to zoom on the Chart once it is
created. Defaults to True.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
data = _create_chart_data_agg(
data=data,
kind='bar',
x=x,
y=y,
hue=hue,
agg=agg,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='bar',
options=options,
x=x,
y=y,
hue=hue,
agg=agg
)
kind = 'horizontalBar' if horizontal else 'bar'
return Chart(
data=data,
kind=kind,
options=options,
colorscheme=colorscheme,
zoom=zoom
)
def radarplot(data: pd.DataFrame,
x: str,
y: str,
hue: str = None,
agg: str = 'mean',
dataset_options: Union[dict, list] = None,
options: dict = None,
colorscheme: str = None) -> Chart:
"""
A radar chart is a way of showing multiple data points and the
variation between them. They are often useful for comparing the
points of two or more different data sets.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
hue (str, optional): Grouping variable that will produce points with
different colors. Defaults to None.
agg (str, optional): The aggregator used to gather data (ex: 'median'
or 'mean'). Defaults to None.
dataset_options ([dict, list], optional): Options related to the
dataset object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
data = _create_chart_data_agg(
data=data,
kind='radar',
x=x,
y=y,
hue=hue,
agg=agg,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='radar',
options=options,
x=x,
y=y,
hue=hue,
agg=agg
)
return Chart(
data=data,
kind='radar',
options=options,
colorscheme=colorscheme
)
def doughnutplot(data: pd.DataFrame,
x: str,
y: str,
agg: str = 'mean',
dataset_options: dict = None,
options: dict = None,
colorscheme: str = None) -> Chart:
"""
Pie and doughnut charts are excellent at showing the relational
proportions between data.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
agg (str, optional): The aggregator used to gather data (ex: 'median'
or 'mean'). Defaults to None.
dataset_options (dict, optional): Options related to the dataset
object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
if y:
data = _create_chart_data_agg(
data=data,
kind='doughnut',
x=x,
y=y,
agg=agg,
dataset_options=dataset_options
)
else:
data = _create_chart_data_count(
data=data,
x=x,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='doughnut',
options=options,
x=x,
y=y,
hue=None,
agg=agg
)
return Chart(
data=data,
kind='doughnut',
options=options,
colorscheme=colorscheme
)
def pieplot(data: pd.DataFrame,
x: str,
y: str = None,
agg: str = 'mean',
dataset_options: dict = None,
options: dict = None,
colorscheme: str = None) -> Chart:
"""
Pie and doughnut charts are excellent at showing the relational
proportions between data.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
agg (str, optional): The aggregator used to gather data (ex: 'median'
or 'mean'). Defaults to None.
dataset_options (dict, optional):
Options related to the dataset object (i.e. options
concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
if y:
data = _create_chart_data_agg(
data=data,
kind='pie',
x=x,
y=y,
agg=agg,
dataset_options=dataset_options
)
else:
data = _create_chart_data_count(
data=data,
x=x,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='pie',
options=options,
x=x,
y=y,
hue=None,
agg=agg
)
return Chart(
data=data,
kind='pie',
options=options,
colorscheme=colorscheme
)
def polarplot(data: pd.DataFrame,
x: str,
y: str = None,
agg: str = 'mean',
dataset_options: dict = None,
options: dict = None,
colorscheme: str = None) -> Chart:
"""
Polar area charts are similar to pie charts, but each segment has the
same angle - the radius of the segment differs depending on the value.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
agg (str, optional): The aggregator used to gather data (ex: 'median'
or 'mean'). Defaults to None.
dataset_options (dict, optional): Options related to the dataset
object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
if y:
data = _create_chart_data_agg(
data=data,
kind='polarArea',
x=x,
y=y,
agg=agg,
dataset_options=dataset_options
)
else:
data = _create_chart_data_count(
data=data,
x=x,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='polarArea',
options=options,
x=x,
y=y,
hue=None,
agg=agg
)
return Chart(
data=data,
kind='polarArea',
options=options,
colorscheme=colorscheme
)
def scatterplot(data: pd.DataFrame,
x: str,
y: str,
hue: str = None,
dataset_options: Union[dict, list] = None,
options: dict = None,
colorscheme: str = None,
zoom: bool = True) -> Chart:
"""
Scatter charts are based on basic line charts with the x axis changed
to a linear axis.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
hue (str, optional): Grouping variable that will produce points with
different colors. Defaults to None.
dataset_options ([dict, list], optional): Options related to the
dataset object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
zoom (bool, optional): Allow the user to zoom on the Chart once it is
created. Defaults to True.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
data = _create_chart_data_agg(
data=data,
kind='scatter',
x=x,
y=y,
hue=hue,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='scatter',
options=options,
x=x,
y=y,
hue=hue
)
return Chart(
data=data,
kind='scatter',
options=options,
colorscheme=colorscheme,
zoom=zoom
)
def bubbleplot(data: pd.DataFrame,
x: str,
y: str,
r: str,
hue: str = None,
dataset_options: Union[dict, list] = None,
options: dict = None,
colorscheme: str = None,
zoom: bool = True) -> Chart:
"""
A bubble chart is used to display three-dimension data.
The location of the bubble is determined by the first two dimensions
and the corresponding horizontal and vertical axes.
The third dimension is represented by the radius of the individual
bubbles.
Args:
data (pd.DataFrame): The dataframe used to draw the chart.
x (str): Column of the dataframe used as datapoints for x Axis.
y (str): Column of the dataframe used as datapoints for y Axis.
r (str, optional): Column of the dataframe used as radius for bubbles.
hue (str, optional): Grouping variable that will produce points with
different colors. Defaults to None.
dataset_options ([dict, list], optional): Options related to the
dataset object (i.e. options concerning your data). Defaults to {}.
options (dict, optional): Options to configure the chart. This
dictionary corresponds to the "options" argument of Chart.js.
Defaults to None.
colorscheme (str, optional): Colorscheme to use when drawing the chart.
Defaults to None.
zoom (bool, optional): Allow the user to zoom on the Chart once it is
created. Defaults to True.
Returns:
[ipychart.Chart]: A chart which display the data using ipychart.
"""
if dataset_options is None:
dataset_options = {}
data = _create_chart_data_agg(
data=data,
kind='bubble',
x=x,
y=y,
r=r,
hue=hue,
dataset_options=dataset_options
)
options = _create_chart_options(
kind='bubble',
options=options,
x=x,
y=y,
hue=hue
)
return Chart(
data=data,
kind='bubble',
options=options,
colorscheme=colorscheme,
zoom=zoom
)
| 29.309613
| 79
| 0.579802
| 2,822
| 23,477
| 4.750532
| 0.105599
| 0.06788
| 0.033418
| 0.028346
| 0.777711
| 0.758839
| 0.740489
| 0.726018
| 0.712517
| 0.707743
| 0
| 0.003019
| 0.336798
| 23,477
| 800
| 80
| 29.34625
| 0.857996
| 0.477957
| 0
| 0.736318
| 0
| 0.002488
| 0.053458
| 0.001853
| 0
| 0
| 0
| 0
| 0.004975
| 1
| 0.024876
| false
| 0
| 0.019901
| 0
| 0.069652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c2f8a70ab8519221d14ff44835b8084e47d21ee7
| 283
|
py
|
Python
|
tests/__init__.py
|
alekLukanen/pyDist
|
ffb2c3feb20afba078fec7381c8785eb1e2b0543
|
[
"MIT"
] | 5
|
2017-12-24T08:11:16.000Z
|
2019-02-07T22:13:26.000Z
|
tests/__init__.py
|
alekLukanen/pyDist
|
ffb2c3feb20afba078fec7381c8785eb1e2b0543
|
[
"MIT"
] | 1
|
2021-06-01T23:17:31.000Z
|
2021-06-01T23:17:31.000Z
|
tests/__init__.py
|
alekLukanen/pyDist
|
ffb2c3feb20afba078fec7381c8785eb1e2b0543
|
[
"MIT"
] | null | null | null |
import tests.test_clusterStructures
import tests.test_basePackage
import tests.test_nodeEndpoints
print('(TEST DIR __INIT__.py) loaded the test functions into'
'the current namespace')
print('+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+'
'+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+')
| 28.3
| 62
| 0.568905
| 26
| 283
| 5.923077
| 0.615385
| 0.214286
| 0.292208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14841
| 283
| 9
| 63
| 31.444444
| 0.639004
| 0
| 0
| 0
| 0
| 0
| 0.498169
| 0.227106
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.428571
| 0.285714
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6c0fb0f7a3bb2112370a7551deae91b5e52d23b9
| 8,152
|
py
|
Python
|
test/api/test_resize.py
|
mrz1988/lilies
|
9525770fabab7e142ebedc40ab5d0c8027aa90ba
|
[
"MIT"
] | null | null | null |
test/api/test_resize.py
|
mrz1988/lilies
|
9525770fabab7e142ebedc40ab5d0c8027aa90ba
|
[
"MIT"
] | 51
|
2019-06-18T16:35:56.000Z
|
2021-02-23T00:32:23.000Z
|
test/api/test_resize.py
|
mrz1988/lilies
|
9525770fabab7e142ebedc40ab5d0c8027aa90ba
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
from lilies import wilt, resize
from lilies.objects.lilyblock import LilyBlock
class TestResize(unittest.TestCase):
def setUp(self):
self.strings = [
"hello",
"dfDfEEFdfaC",
"Mister John",
"mr. jOhn",
"iSn't it",
"comma,separated,values",
"trailing,comma,",
",",
]
self.block_str = os.linesep.join(self.strings)
self.twenty_chars = "12345678901234567890"
def test_empty_align_fails(self):
with self.assertRaises(TypeError):
resize(self.twenty_chars, 20, align="")
def test_garbage_align_fails(self):
with self.assertRaises(TypeError):
resize(self.twenty_chars, 20, align="garbage")
def test_garbage_x_align_fails(self):
with self.assertRaises(TypeError):
resize(self.twenty_chars, align="top garbage")
def test_garbage_y_align_fails(self):
with self.assertRaises(TypeError):
resize(self.twenty_chars, align="garbage left")
def test_resize_vertically_respects_fill_character(self):
resized = resize(" ", 5, 3, "center", character="a")
expected = "\n".join(["aaaaa", "a a", "aaaaa"])
self.assertEqual(expected, wilt(resized))
def test_resize_to_same_size_leaves_unchanged(self):
resized = resize(self.twenty_chars, 20, add_elipsis=False)
self.assertEqual(self.twenty_chars, wilt(resized))
def test_resize_same_width_with_elipsis_leaves_unchanged(self):
resized = resize(self.twenty_chars, 20, add_elipsis=True)
self.assertEqual(self.twenty_chars, wilt(resized))
def test_resize_same_width_centered_leaves_unchanged(self):
resized = resize(self.twenty_chars, 20, align="center")
self.assertEqual(self.twenty_chars, wilt(resized))
def test_resize_smaller_left_no_elipsis_truncates(self):
expected = self.twenty_chars[:10]
resized = resize(
self.twenty_chars, 10, align="left", add_elipsis=False
)
self.assertEqual(expected, wilt(resized))
def test_resize_smaller_left_with_elipsis_truncates(self):
expected = self.twenty_chars[:8] + ".."
resized = resize(self.twenty_chars, 10, align="left", add_elipsis=True)
self.assertEqual(expected, wilt(resized))
def test_resize_smaller_right_with_elipsis_truncates(self):
# we basically expect this to be the same. Don't truncate left side.
expected = self.twenty_chars[:8] + ".."
resized = resize(
self.twenty_chars, 10, align="right", add_elipsis=True
)
self.assertEqual(expected, wilt(resized))
def test_resize_smaller_center_with_elipsis_truncates(self):
# we basically expect this to be the same. Don't truncate left side.
expected = self.twenty_chars[:8] + ".."
resized = resize(
self.twenty_chars, 10, align="center", add_elipsis=True
)
self.assertEqual(expected, wilt(resized))
def test_resize_horiz_larger_left(self):
expected = self.twenty_chars + " " * 20
resized = resize(self.twenty_chars, 40, align="left")
self.assertEqual(expected, wilt(resized))
def test_resize_horiz_larger_right(self):
expected = " " * 20 + self.twenty_chars
resized = resize(self.twenty_chars, 40, align="right")
self.assertEqual(expected, wilt(resized))
def test_resize_horiz_larger_center_even(self):
expected = " " * 10 + self.twenty_chars + " " * 10
resized = resize(self.twenty_chars, 40, align="center")
self.assertEqual(expected, wilt(resized))
def test_resize_horiz_larger_center_odd(self):
expected = " " * 9 + self.twenty_chars + " " * 10
resized = resize(self.twenty_chars, 39, align="center")
self.assertEqual(expected, wilt(resized))
def test_resize_block_horiz_larger(self):
control_group = [
"hello ",
"dfDfEEFdfaC ",
"Mister John ",
"mr. jOhn ",
"iSn't it ",
"comma,separated,values ",
"trailing,comma, ",
", ",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, width=23)
self.assertEqual(control, wilt(result))
def test_resize_block_vert_top_larger(self):
control_group = [
"hello ",
"dfDfEEFdfaC ",
"Mister John ",
"mr. jOhn ",
"iSn't it ",
"comma,separated,values",
"trailing,comma, ",
", ",
" ",
" ",
" ",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, height=11, align="top")
self.assertEqual(control, wilt(result))
def test_resize_block_vert_bottom_larger(self):
control_group = [
" ",
" ",
"hello ",
"dfDfEEFdfaC ",
"Mister John ",
"mr. jOhn ",
"iSn't it ",
"comma,separated,values",
"trailing,comma, ",
", ",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, height=10, align="bottom")
self.assertEqual(control, wilt(result))
def test_resize_block_vert_center_larger(self):
control_group = [
" ",
" ",
"hello ",
"dfDfEEFdfaC ",
"Mister John ",
"mr. jOhn ",
"iSn't it ",
"comma,separated,values",
"trailing,comma, ",
", ",
" ",
" ",
" ",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, height=13, align="center left")
self.assertEqual(control, wilt(result))
def test_resize_block_vert_top_smaller(self):
control_group = [
"hello ",
"dfDfEEFdfaC",
"Mister John",
"mr. jOhn ",
"iSn't it ",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, height=5, align="top")
self.assertEqual(control, wilt(result))
def test_resize_block_vert_bottom_smaller(self):
control_group = [
"dfDfEEFdfaC ",
"Mister John ",
"mr. jOhn ",
"iSn't it ",
"comma,separated,values",
"trailing,comma, ",
", ",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, height=7, align="bottom")
self.assertEqual(control, wilt(result))
def test_resize_block_vert_center_smaller(self):
control_group = [
"dfDfEEFdfaC ",
"Mister John ",
"mr. jOhn ",
"iSn't it ",
"comma,separated,values",
]
control = os.linesep.join(control_group)
block = LilyBlock(self.block_str)
result = resize(block, height=5, align="center left")
self.assertEqual(control, wilt(result))
| 37.223744
| 79
| 0.529318
| 797
| 8,152
| 5.202008
| 0.138018
| 0.065123
| 0.097685
| 0.075977
| 0.853835
| 0.843946
| 0.84274
| 0.810178
| 0.785094
| 0.734443
| 0
| 0.014652
| 0.363714
| 8,152
| 218
| 80
| 37.394495
| 0.784654
| 0.02159
| 0
| 0.619048
| 0
| 0
| 0.189037
| 0.019318
| 0
| 0
| 0
| 0
| 0.121693
| 1
| 0.126984
| false
| 0
| 0.021164
| 0
| 0.153439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dd25412f0d4b82222c1d80577095b8b5b8fa3a83
| 3,659
|
py
|
Python
|
tests/test_bootstrap_field_parameters.py
|
akx/django-bootstrap5
|
2fbe688c30061122e08981f65bf87ec35fcf28ad
|
[
"BSD-3-Clause"
] | 118
|
2021-03-15T14:01:49.000Z
|
2022-03-29T06:40:46.000Z
|
tests/test_bootstrap_field_parameters.py
|
akx/django-bootstrap5
|
2fbe688c30061122e08981f65bf87ec35fcf28ad
|
[
"BSD-3-Clause"
] | 97
|
2021-03-19T05:44:28.000Z
|
2022-03-31T09:05:29.000Z
|
tests/test_bootstrap_field_parameters.py
|
akx/django-bootstrap5
|
2fbe688c30061122e08981f65bf87ec35fcf28ad
|
[
"BSD-3-Clause"
] | 33
|
2021-03-22T14:45:08.000Z
|
2022-02-23T18:12:23.000Z
|
from django import forms
from tests.base import BootstrapTestCase
class CharFieldTestForm(forms.Form):
test = forms.CharField()
class BootstrapFieldParameterTestCase(BootstrapTestCase):
"""Test `bootstrap_field` parameters`."""
def test_wrapper_class(self):
"""Test field with default CharField widget."""
form = CharFieldTestForm()
self.assertHTMLEqual(
self.render("{% bootstrap_field form.test %}", context={"form": form}),
(
'<div class="django_bootstrap5-req mb-3">'
'<label for="id_test" class="form-label">Test</label>'
'<input class="form-control" id="id_test" name="test" placeholder="Test" required type="text">'
"</div>"
),
)
self.assertHTMLEqual(
self.render("{% bootstrap_field form.test inline_wrapper_class='foo' %}", context={"form": form}),
(
'<div class="django_bootstrap5-req mb-3">'
'<label for="id_test" class="form-label">Test</label>'
'<input class="form-control" id="id_test" name="test" placeholder="Test" required type="text">'
"</div>"
),
)
self.assertHTMLEqual(
self.render("{% bootstrap_field form.test wrapper_class='foo' %}", context={"form": form}),
(
'<div class="django_bootstrap5-req foo">'
'<label for="id_test" class="form-label">Test</label>'
'<input class="form-control" id="id_test" name="test" placeholder="Test" required type="text">'
"</div>"
),
)
self.assertHTMLEqual(
self.render("{% bootstrap_field form.test wrapper_class=None %}", context={"form": form}),
(
'<div class="django_bootstrap5-req">'
'<label for="id_test" class="form-label">Test</label>'
'<input class="form-control" id="id_test" name="test" placeholder="Test" required type="text">'
"</div>"
),
)
def test_inline_wrapper_class(self):
"""Test field with default CharField widget."""
form = CharFieldTestForm()
self.assertHTMLEqual(
self.render("{% bootstrap_field form.test layout='inline' %}", context={"form": form}),
(
'<div class="col-12 django_bootstrap5-req">'
'<label class="visually-hidden" for="id_test">Test</label>'
'<input type="text" name="test" class="form-control" placeholder="Test" required id="id_test">'
"</div>"
),
)
self.assertHTMLEqual(
self.render("{% bootstrap_field form.test layout='inline' wrapper_class='foo' %}", context={"form": form}),
(
'<div class="col-12 django_bootstrap5-req">'
'<label class="visually-hidden" for="id_test">Test</label>'
'<input type="text" name="test" class="form-control" placeholder="Test" required id="id_test">'
"</div>"
),
)
self.assertHTMLEqual(
self.render(
"{% bootstrap_field form.test layout='inline' inline_wrapper_class='foo' %}", context={"form": form}
),
(
'<div class="col-12 django_bootstrap5-req foo">'
'<label class="visually-hidden" for="id_test">Test</label>'
'<input type="text" name="test" class="form-control" placeholder="Test" required id="id_test">'
"</div>"
),
)
| 38.925532
| 119
| 0.537032
| 365
| 3,659
| 5.271233
| 0.131507
| 0.043659
| 0.08368
| 0.105509
| 0.890333
| 0.887214
| 0.887214
| 0.887214
| 0.865385
| 0.862266
| 0
| 0.005913
| 0.306641
| 3,659
| 93
| 120
| 39.344086
| 0.752464
| 0.032523
| 0
| 0.567568
| 0
| 0.094595
| 0.5
| 0.143019
| 0
| 0
| 0
| 0
| 0.094595
| 1
| 0.027027
| false
| 0
| 0.027027
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dd44ea40086f4538136724946734eeeeabbb5c69
| 27
|
py
|
Python
|
api/tasks/__init__.py
|
C-Canchola/texel
|
9ebb12cf38b78608394f44767f55236845f0f9bc
|
[
"MIT"
] | null | null | null |
api/tasks/__init__.py
|
C-Canchola/texel
|
9ebb12cf38b78608394f44767f55236845f0f9bc
|
[
"MIT"
] | null | null | null |
api/tasks/__init__.py
|
C-Canchola/texel
|
9ebb12cf38b78608394f44767f55236845f0f9bc
|
[
"MIT"
] | null | null | null |
from . import task_manager
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
06d5871238bbd12943d1289a2c8967575b92cdbb
| 43
|
py
|
Python
|
run.py
|
williamjacksn/inventory
|
1422307b12db671c73b8bf43193151b3c19df9ee
|
[
"MIT"
] | 3
|
2017-11-26T17:04:46.000Z
|
2020-08-05T23:57:07.000Z
|
run.py
|
williamjacksn/inventory
|
1422307b12db671c73b8bf43193151b3c19df9ee
|
[
"MIT"
] | 17
|
2019-12-22T14:38:18.000Z
|
2022-03-25T23:01:49.000Z
|
run.py
|
williamjacksn/inventory
|
1422307b12db671c73b8bf43193151b3c19df9ee
|
[
"MIT"
] | 1
|
2019-09-09T09:48:39.000Z
|
2019-09-09T09:48:39.000Z
|
import inventory.app
inventory.app.main()
| 10.75
| 20
| 0.790698
| 6
| 43
| 5.666667
| 0.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 3
| 21
| 14.333333
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
06fa3683acad0246167de299a7577258d9b3f919
| 119
|
py
|
Python
|
rascil/processing_components/util/__init__.py
|
SKA-ScienceDataProcessor/rascil
|
bd3b47f779e18e184781e2928ad1539d1fdc1c9b
|
[
"Apache-2.0"
] | 7
|
2019-12-14T13:42:33.000Z
|
2022-01-28T03:31:45.000Z
|
rascil/processing_components/util/__init__.py
|
SKA-ScienceDataProcessor/rascil
|
bd3b47f779e18e184781e2928ad1539d1fdc1c9b
|
[
"Apache-2.0"
] | 6
|
2020-01-08T09:40:08.000Z
|
2020-06-11T14:56:13.000Z
|
rascil/processing_components/util/__init__.py
|
SKA-ScienceDataProcessor/rascil
|
bd3b47f779e18e184781e2928ad1539d1fdc1c9b
|
[
"Apache-2.0"
] | 3
|
2020-01-14T11:14:16.000Z
|
2020-09-15T05:21:06.000Z
|
from .array_functions import *
from .compass_bearing import *
from .coordinate_support import *
from .sizeof import *
| 19.833333
| 33
| 0.789916
| 15
| 119
| 6.066667
| 0.6
| 0.32967
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 119
| 5
| 34
| 23.8
| 0.892157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
660eab63eb3ca2052064ee9c7ef03b71f457fa7e
| 2,361
|
py
|
Python
|
PyBank/main.py
|
erictonian/python-challenge
|
04318a90e619f5944792672d2587d2f211367dda
|
[
"MIT"
] | null | null | null |
PyBank/main.py
|
erictonian/python-challenge
|
04318a90e619f5944792672d2587d2f211367dda
|
[
"MIT"
] | null | null | null |
PyBank/main.py
|
erictonian/python-challenge
|
04318a90e619f5944792672d2587d2f211367dda
|
[
"MIT"
] | null | null | null |
import csv
pybank = "budget_data.csv"
with open(pybank, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
count = sum(1 for row in csvreader)
with open(pybank, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
total_sum = sum(int(row[1]) for row in csvreader)
rows = []
rows_stagger = []
with open(pybank, 'r', newline='') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in csvreader:
rows.append(int(row[1]))
with open(pybank, 'r', newline='') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
next(csvfile)
next(csvfile)
for row in csvreader:
rows_stagger.append(int(row[1]))
dfrows = [x1-x2 for (x1,x2) in zip(rows_stagger, rows)]
dfrows_avg = (sum(dfrows))/(count -1)
with open(pybank, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
max_val = max(int(row[1]) for row in csvreader)
with open(pybank, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
max_month = [str(row[0]) for row in csvreader if int(row[1]) == max_val]
with open(pybank, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
min_val = min(int(row[1]) for row in csvreader)
with open(pybank, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
min_month = [str(row[0]) for row in csvreader if int(row[1]) == min_val]
print(f"Total Months: {count}")
print(f"Total Sum: ${total_sum}.00")
print(f"Average Change: ${round(dfrows_avg, 2)}")
print(f"Greatest Increase in Profits: {max_month[0]} (${max_val}.00)")
print(f"Greatest Decrease in Profits: {min_month[0]} (${min_val}.00)")
f = open("pybank_results.txt", 'w')
f.write(f"Total Months: {count}\n")
f = open("pybank_results.txt", 'a')
f.write(f"Total Sum: ${total_sum}.00\n")
f = open("pybank_results.txt", 'a')
f.write(f"Average Change: ${round(dfrows_avg, 2)}\n")
f = open("pybank_results.txt", 'a')
f.write(f"Greatest Increase in Profits: {max_month[0]} (${max_val}.00)\n")
f = open("pybank_results.txt", 'a')
f.write(f"Greatest Decrease in Profits: {min_month[0]} (${min_val}.00)\n")
f.close
| 34.217391
| 80
| 0.643795
| 362
| 2,361
| 4.118785
| 0.157459
| 0.056338
| 0.075117
| 0.159624
| 0.834339
| 0.816231
| 0.792757
| 0.702884
| 0.702884
| 0.702884
| 0
| 0.017143
| 0.184668
| 2,361
| 69
| 81
| 34.217391
| 0.757403
| 0
| 0
| 0.54386
| 0
| 0
| 0.232007
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.017544
| 0.087719
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
661bc7e21529e78baaf002daea130f0c9404d9d0
| 139
|
py
|
Python
|
data_construction_allInOne/error.py
|
DiracSea/multi_linear_regression
|
ab047c2c0769e0389c5e01719f1afbc1db70beb0
|
[
"MIT"
] | null | null | null |
data_construction_allInOne/error.py
|
DiracSea/multi_linear_regression
|
ab047c2c0769e0389c5e01719f1afbc1db70beb0
|
[
"MIT"
] | null | null | null |
data_construction_allInOne/error.py
|
DiracSea/multi_linear_regression
|
ab047c2c0769e0389c5e01719f1afbc1db70beb0
|
[
"MIT"
] | null | null | null |
class EmptyRainfallError(ValueError):
pass
class EmptyWaterlevelError(ValueError):
pass
class NoMethodError(ValueError):
pass
| 17.375
| 39
| 0.776978
| 12
| 139
| 9
| 0.5
| 0.388889
| 0.351852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158273
| 139
| 8
| 40
| 17.375
| 0.923077
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
b0758f8598d3ea2a1e90c9b938da03feb778cbd6
| 122
|
py
|
Python
|
data/hospital_level/raw/DH_hospital/download.py
|
666Chao666/covid19-severity-prediction
|
f7ef4ea5f3109fdb4246d2bf90d07fbf048d3706
|
[
"MIT"
] | 2
|
2020-05-15T14:42:02.000Z
|
2020-05-22T08:51:47.000Z
|
data_new/hospital_level/raw/DH_hospital/download.py
|
rahul263-stack/covid19-severity-prediction
|
f581adb2fccb12d5ab3f3c59ee120f484703edf5
|
[
"MIT"
] | null | null | null |
data_new/hospital_level/raw/DH_hospital/download.py
|
rahul263-stack/covid19-severity-prediction
|
f581adb2fccb12d5ab3f3c59ee120f484703edf5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import os
os.system('wget https://data.medicare.gov/api/views/xubh-q36u/rows.csv -O DH_hospital.csv')
| 24.4
| 91
| 0.745902
| 22
| 122
| 4.090909
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.065574
| 122
| 4
| 92
| 30.5
| 0.763158
| 0.139344
| 0
| 0
| 0
| 0.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b090c6ef25b52c5b25c8e27f639cfd5284798c8e
| 37,258
|
py
|
Python
|
keras_contrib/applications/fully_convolutional_networks.py
|
ahundt/keras-contrib
|
65ade7af19c86f4b9888acfd2ed31245f4f5c770
|
[
"MIT"
] | 7
|
2017-07-22T09:05:44.000Z
|
2019-04-30T02:08:04.000Z
|
keras_contrib/applications/fully_convolutional_networks.py
|
ahundt/keras-contrib
|
65ade7af19c86f4b9888acfd2ed31245f4f5c770
|
[
"MIT"
] | 1
|
2017-12-26T02:59:59.000Z
|
2017-12-26T02:59:59.000Z
|
keras_contrib/applications/fully_convolutional_networks.py
|
ahundt/keras-contrib
|
65ade7af19c86f4b9888acfd2ed31245f4f5c770
|
[
"MIT"
] | 11
|
2017-07-06T14:11:51.000Z
|
2021-08-21T23:18:20.000Z
|
""" Fully Convolutional Networks
Based on the paper:
- [Fully Convolutional Networks for Semantic Segmentation](https://arxiv.org/abs/1605.06211)
Implementation adapted from [Keras-FCN](https://github.com/aurora95/Keras-FCN).
"""
import numpy as np
import matplotlib.pyplot as plt
import os
import sys
from keras import backend as K
from keras_contrib.applications import densenet
from keras.models import Model
from keras.regularizers import l2
from keras.layers import Conv2D
from keras.layers import BatchNormalization
from keras.layers import Activation
from keras.layers import MaxPooling2D
from keras.layers import Add
from keras.engine import Layer
from keras_applications.imagenet_utils import _obtain_input_shape
import keras.backend as K
import tensorflow as tf
def conv_relu(nb_filter, nb_row, nb_col, subsample=(1, 1), border_mode='same', bias=True, w_decay=0.01):
def f(x):
with K.name_scope('conv_relu'):
x = Conv2D(filters=nb_filter, kernel_size=(nb_row, nb_col), stride=subsample, use_bias=bias,
kernel_initializer="he_normal", W_regularizer=l2(w_decay), border_mode=border_mode)(x)
x = Activation("relu")(x)
return x
return f
def conv_bn(nb_filter, nb_row, nb_col, subsample=(1, 1), border_mode='same', bias=True, w_decay=0.01):
def f(x):
with K.name_scope('conv_bn'):
x = Conv2D(filters=nb_filter, kernel_size=(nb_row, nb_col), stride=subsample, use_bias=bias,
kernel_initializer="he_normal", W_regularizer=l2(w_decay), border_mode=border_mode)(x)
x = BatchNormalization(mode=0, axis=-1)(x)
return x
return f
def conv_bn_relu(nb_filter, nb_row, nb_col, subsample=(1, 1), border_mode='same', bias=True, w_decay=0.01):
def f(x):
with K.name_scope('conv_bn_relu'):
x = Conv2D(filters=nb_filter, kernel_size=(nb_row, nb_col), stride=subsample, use_bias=bias,
kernel_initializer="he_normal", W_regularizer=l2(w_decay), border_mode=border_mode)(x)
x = BatchNormalization(mode=0, axis=-1)(x)
x = Activation("relu")(x)
return x
return f
def bn_relu_conv(nb_filter, nb_row, nb_col, subsample=(1, 1), border_mode='same', bias=True, w_decay=0.01):
def f(x):
with K.name_scope('bn_relu_conv'):
x = BatchNormalization(mode=0, axis=-1)(x)
x = Activation("relu")(x)
x = Conv2D(filters=nb_filter, kernel_size=(nb_row, nb_col), stride=subsample, use_bias=bias,
kernel_initializer="he_normal", W_regularizer=l2(w_decay), border_mode=border_mode)(x)
return x
return f
def atrous_conv_bn(nb_filter, nb_row, nb_col, atrous_rate=(2, 2), subsample=(1, 1), border_mode='same', bias=True, w_decay=0.01):
def f(x):
with K.name_scope('atrous_conv_bn'):
x = Conv2D(filters=nb_filter, kernel_size=(nb_row, nb_col), dilation_rate=atrous_rate, stride=subsample, use_bias=bias,
kernel_initializer="he_normal", kernel_regularizer=l2(w_decay), padding=border_mode)(x)
x = BatchNormalization(mode=0, axis=-1)(x)
return x
return f
def atrous_conv_bn_relu(nb_filter, nb_row, nb_col, atrous_rate=(2, 2), subsample=(1, 1), border_mode='same', bias=True, w_decay=0.01):
def f(x):
with K.name_scope('atrous_conv_bn_relu'):
x = Conv2D(filters=nb_filter, kernel_size=(nb_row, nb_col), dilation_rate=atrous_rate, stride=subsample, use_bias=bias,
kernel_initializer="he_normal", kernel_regularizer=l2(w_decay), padding=border_mode)(x)
x = BatchNormalization(mode=0, axis=-1)(x)
x = Activation("relu")(x)
return x
return f
def get_weights_path_vgg16():
TF_WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5'
weights_path = get_file(
'vgg16_weights_tf_dim_ordering_tf_kernels.h5', TF_WEIGHTS_PATH, cache_subdir='models')
return weights_path
def get_weights_path_resnet():
TF_WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_tf_dim_ordering_tf_kernels.h5'
weights_path = get_file(
'resnet50_weights_tf_dim_ordering_tf_kernels.h5', TF_WEIGHTS_PATH, cache_subdir='models')
def resize_images_bilinear(X, height_factor=1, width_factor=1, target_height=None, target_width=None, data_format='default'):
'''Resizes the images contained in a 4D tensor of shape
- [batch, channels, height, width] (for 'channels_first' data_format)
- [batch, height, width, channels] (for 'channels_last' data_format)
by a factor of (height_factor, width_factor). Both factors should be
positive integers.
'''
if data_format == 'default':
data_format = K.image_data_format()
if data_format == 'channels_first':
original_shape = K.int_shape(X)
if target_height and target_width:
new_shape = tf.constant(
np.array((target_height, target_width)).astype('int32'))
else:
new_shape = tf.shape(X)[2:]
new_shape *= tf.constant(
np.array([height_factor, width_factor]).astype('int32'))
X = K.permute_dimensions(X, [0, 2, 3, 1])
X = tf.image.resize_bilinear(X, new_shape)
X = K.permute_dimensions(X, [0, 3, 1, 2])
if target_height and target_width:
X.set_shape((None, None, target_height, target_width))
else:
X.set_shape(
(None, None, original_shape[2] * height_factor, original_shape[3] * width_factor))
return X
elif data_format == 'channels_last':
original_shape = K.int_shape(X)
if target_height and target_width:
new_shape = tf.constant(
np.array((target_height, target_width)).astype('int32'))
else:
new_shape = tf.shape(X)[1:3]
new_shape *= tf.constant(
np.array([height_factor, width_factor]).astype('int32'))
X = tf.image.resize_bilinear(X, new_shape)
if target_height and target_width:
X.set_shape((None, target_height, target_width, None))
else:
X.set_shape(
(None, original_shape[1] * height_factor, original_shape[2] * width_factor, None))
return X
else:
raise Exception('Invalid data_format: ' + data_format)
class BilinearUpSampling2D(Layer):
""" Bilinear Upsampling
TODO: remove and replace with UpSampling2D when https://github.com/keras-team/keras/pull/9303 is available
"""
def __init__(self, size=(1, 1), target_size=None, data_format='default', **kwargs):
if data_format == 'default':
data_format = K.image_data_format()
self.size = tuple(size)
if target_size is not None:
self.target_size = tuple(target_size)
else:
self.target_size = None
assert data_format in {
'channels_last', 'channels_first'}, 'data_format must be in {tf, th}'
self.data_format = data_format
self.input_spec = [InputSpec(ndim=4)]
super(BilinearUpSampling2D, self).__init__(**kwargs)
def compute_output_shape(self, input_shape):
if self.data_format == 'channels_first':
width = int(self.size[0] * input_shape[2]
if input_shape[2] is not None else None)
height = int(self.size[1] * input_shape[3]
if input_shape[3] is not None else None)
if self.target_size is not None:
width = self.target_size[0]
height = self.target_size[1]
return (input_shape[0],
input_shape[1],
width,
height)
elif self.data_format == 'channels_last':
width = int(self.size[0] * input_shape[1]
if input_shape[1] is not None else None)
height = int(self.size[1] * input_shape[2]
if input_shape[2] is not None else None)
if self.target_size is not None:
width = self.target_size[0]
height = self.target_size[1]
return (input_shape[0],
width,
height,
input_shape[3])
else:
raise Exception('Invalid data_format: ' + self.data_format)
def call(self, x, mask=None):
if self.target_size is not None:
return resize_images_bilinear(x, target_height=self.target_size[0], target_width=self.target_size[1], data_format=self.data_format)
else:
return resize_images_bilinear(x, height_factor=self.size[0], width_factor=self.size[1], data_format=self.data_format)
def get_config(self):
config = {'size': self.size, 'target_size': self.target_size}
base_config = super(BilinearUpSampling2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
# The original help functions from keras does not have weight regularizers, so I modified them.
# Also, I changed these two functions into functional style
def identity_block(kernel_size, filters, stage, block, weight_decay=0., batch_momentum=0.99):
'''The identity_block is the block that has no conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), name=conv_name_base + '2a',
kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size),
padding='same', name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base +
'2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
x = Add()([x, input_tensor])
x = Activation('relu')(x)
return x
return f
def conv_block(kernel_size, filters, stage, block, weight_decay=0., strides=(2, 2), batch_momentum=0.99):
'''conv_block is the block that has a conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
Note that from stage 3, the first conv layer at main path is with strides=(2,2)
And the shortcut should have strides=(2,2) as well
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), strides=strides,
name=conv_name_base + '2a', kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size), padding='same',
name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base +
'2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
shortcut = Conv2D(nb_filter3, (1, 1), strides=strides,
name=conv_name_base + '1', kernel_regularizer=l2(weight_decay))(input_tensor)
shortcut = BatchNormalization(
axis=bn_axis, name=bn_name_base + '1', momentum=batch_momentum)(shortcut)
x = Add()([x, shortcut])
x = Activation('relu')(x)
return x
return f
# Atrous-Convolution version of residual blocks
def atrous_identity_block(kernel_size, filters, stage, block, weight_decay=0., atrous_rate=(2, 2), batch_momentum=0.99):
'''The identity_block is the block that has no conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), name=conv_name_base + '2a',
kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size), dilation_rate=atrous_rate,
padding='same', name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base +
'2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
x = Add()([x, input_tensor])
x = Activation('relu')(x)
return x
return f
def atrous_conv_block(kernel_size, filters, stage, block, weight_decay=0., strides=(1, 1), atrous_rate=(2, 2), batch_momentum=0.99):
'''conv_block is the block that has a conv layer at shortcut
# Arguments
kernel_size: defualt 3, the kernel size of middle conv layer at main path
filters: list of integers, the nb_filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
'''
def f(input_tensor):
nb_filter1, nb_filter2, nb_filter3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(nb_filter1, (1, 1), strides=strides,
name=conv_name_base + '2a', kernel_regularizer=l2(weight_decay))(input_tensor)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2a', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter2, (kernel_size, kernel_size), padding='same', dilation_rate=atrous_rate,
name=conv_name_base + '2b', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2b', momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = Conv2D(nb_filter3, (1, 1), name=conv_name_base +
'2c', kernel_regularizer=l2(weight_decay))(x)
x = BatchNormalization(
axis=bn_axis, name=bn_name_base + '2c', momentum=batch_momentum)(x)
shortcut = Conv2D(nb_filter3, (1, 1), strides=strides,
name=conv_name_base + '1', kernel_regularizer=l2(weight_decay))(input_tensor)
shortcut = BatchNormalization(
axis=bn_axis, name=bn_name_base + '1', momentum=batch_momentum)(shortcut)
x = Add()([x, shortcut])
x = Activation('relu')(x)
return x
return f
def top(x, input_shape, classes, activation, weight_decay):
x = Conv2D(classes, (1, 1), activation='linear',
padding='same', kernel_regularizer=l2(weight_decay),
use_bias=False)(x)
if K.image_data_format() == 'channels_first':
channel, row, col = input_shape
else:
row, col, channel = input_shape
# TODO(ahundt) this is modified for the sigmoid case! also use loss_shape
if activation is 'sigmoid':
x = Reshape((row * col * classes,))(x)
return x
def FCN_Vgg16_32s(input_shape=None, weight_decay=0., batch_momentum=0.9, batch_shape=None, classes=21):
if batch_shape:
img_input = Input(batch_shape=batch_shape)
image_size = batch_shape[1:3]
else:
img_input = Input(shape=input_shape)
image_size = input_shape[0:2]
# Block 1
x = Conv2D(64, (3, 3), activation='relu', padding='same',
name='block1_conv1', kernel_regularizer=l2(weight_decay))(img_input)
x = Conv2D(64, (3, 3), activation='relu', padding='same',
name='block1_conv2', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)
# Block 2
x = Conv2D(128, (3, 3), activation='relu', padding='same',
name='block2_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same',
name='block2_conv2', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
# Block 3
x = Conv2D(256, (3, 3), activation='relu', padding='same',
name='block3_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same',
name='block3_conv2', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same',
name='block3_conv3', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
# Block 4
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name='block4_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name='block4_conv2', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name='block4_conv3', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)
# Block 5
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name='block5_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name='block5_conv2', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name='block5_conv3', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x)
# Convolutional layers transfered from fully-connected layers
x = Conv2D(4096, (7, 7), activation='relu', padding='same',
name='fc1', kernel_regularizer=l2(weight_decay))(x)
x = Dropout(0.5)(x)
x = Conv2D(4096, (1, 1), activation='relu', padding='same',
name='fc2', kernel_regularizer=l2(weight_decay))(x)
x = Dropout(0.5)(x)
# classifying layer
x = Conv2D(classes, (1, 1), kernel_initializer='he_normal', activation='linear',
padding='valid', strides=(1, 1), kernel_regularizer=l2(weight_decay))(x)
x = BilinearUpSampling2D(size=(32, 32))(x)
model = Model(img_input, x)
weights_path = os.path.expanduser(os.path.join(
'~', '.keras/models/fcn_vgg16_weights_tf_dim_ordering_tf_kernels.h5'))
model.load_weights(weights_path, by_name=True)
return model
def AtrousFCN_Vgg16_16s(input_shape=None, weight_decay=0., batch_momentum=0.9, batch_shape=None,
classes=21, weights_path=None, upsample=True, input_tensor=None, include_top=False,
dilation_rate=(2, 2), name=''):
if batch_shape:
img_input = Input(tensor=input_tensor, batch_shape=batch_shape)
if upsample:
image_size = batch_shape[1:3]
else:
img_input = Input(tensor=input_tensor, shape=input_shape)
if upsample:
image_size = input_shape[0:2]
# Block 1
x = Conv2D(64, (3, 3), activation='relu', padding='same',
name=name + 'block1_conv1', kernel_regularizer=l2(weight_decay))(img_input)
x = Conv2D(64, (3, 3), activation='relu', padding='same',
name=name + 'block1_conv2', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)
# Block 2
x = Conv2D(128, (3, 3), activation='relu', padding='same',
name=name + 'block2_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same',
name=name + 'block2_conv2', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
# Block 3
x = Conv2D(256, (3, 3), activation='relu', padding='same',
name=name + 'block3_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same',
name=name + 'block3_conv2', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same',
name=name + 'block3_conv3', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
# Block 4
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name=name + 'block4_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name=name + 'block4_conv2', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name=name + 'block4_conv3', kernel_regularizer=l2(weight_decay))(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)
# Block 5
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name=name + 'block5_conv1', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name=name + 'block5_conv2', kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same',
name=name + 'block5_conv3', kernel_regularizer=l2(weight_decay))(x)
if dilation_rate == 1 or dilation_rate == (1, 1):
x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)
if include_top:
# Convolutional layers transfered from fully-connected layers
x = Conv2D(4096, (7, 7), activation='relu', padding='same', dilation_rate=dilation_rate,
name='fc1', kernel_regularizer=l2(weight_decay))(x)
x = Dropout(0.5)(x)
x = Conv2D(4096, (1, 1), activation='relu', padding='same',
name='fc2', kernel_regularizer=l2(weight_decay))(x)
x = Dropout(0.5)(x)
# classifying layer
x = Conv2D(classes, (1, 1), kernel_initializer='he_normal', activation='linear',
padding='valid', strides=(1, 1), kernel_regularizer=l2(weight_decay))(x)
if upsample:
x = BilinearUpSampling2D(target_size=tuple(image_size))(x)
model = Model(img_input, x)
if weights_path is None:
weights_path = os.path.expanduser(os.path.join(
'~', '.keras/models/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5'))
if not os.path.exists(weights_path):
temp_weights_path = os.path.expanduser(os.path.join(
'~', '.keras/models/fcn_vgg16_weights_tf_dim_ordering_tf_kernels.h5'))
if not os.path.exists(temp_weights_path):
# download the model if we don't have it yet
temp_model = keras.applications.vgg16.VGG16(include_top=False)
temp_model.save_weights(weights_path)
del(temp_model)
model.load_weights(weights_path, by_name=True, reshape=True)
model.save_weights(weights_path)
else:
model.load_weights(weights_path)
else:
model.load_weights(weights_path)
return model
def FCN_Resnet50_32s(input_shape=None, weight_decay=0., batch_momentum=0.9, batch_shape=None, classes=21):
if batch_shape:
img_input = Input(batch_shape=batch_shape)
image_size = batch_shape[1:3]
else:
img_input = Input(shape=input_shape)
image_size = input_shape[0:2]
bn_axis = 3
x = Conv2D(64, (7, 7), strides=(2, 2), padding='same',
name='conv1', kernel_regularizer=l2(weight_decay))(img_input)
x = BatchNormalization(axis=bn_axis, name='bn_conv1')(x)
x = Activation('relu')(x)
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = conv_block(3, [64, 64, 256], stage=2, block='a', strides=(1, 1))(x)
x = identity_block(3, [64, 64, 256], stage=2, block='b')(x)
x = identity_block(3, [64, 64, 256], stage=2, block='c')(x)
x = conv_block(3, [128, 128, 512], stage=3, block='a')(x)
x = identity_block(3, [128, 128, 512], stage=3, block='b')(x)
x = identity_block(3, [128, 128, 512], stage=3, block='c')(x)
x = identity_block(3, [128, 128, 512], stage=3, block='d')(x)
x = conv_block(3, [256, 256, 1024], stage=4, block='a')(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='b')(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='c')(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='d')(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='e')(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='f')(x)
x = conv_block(3, [512, 512, 2048], stage=5, block='a')(x)
x = identity_block(3, [512, 512, 2048], stage=5, block='b')(x)
x = identity_block(3, [512, 512, 2048], stage=5, block='c')(x)
# classifying layer
x = Conv2D(classes, (1, 1), kernel_initializer='he_normal', activation='linear',
padding='valid', strides=(1, 1), kernel_regularizer=l2(weight_decay))(x)
x = BilinearUpSampling2D(size=(32, 32))(x)
model = Model(img_input, x)
weights_path = os.path.expanduser(os.path.join(
'~', '.keras/models/fcn_resnet50_weights_tf_dim_ordering_tf_kernels.h5'))
model.load_weights(weights_path, by_name=True)
return model
def AtrousFCN_Resnet50_16s(input_shape=None, weight_decay=0., batch_momentum=0.9, batch_shape=None, classes=21,
include_top=False, upsample=False):
if input_shape is None and input_tensor is not None:
batch_shape = keras.backend.int_shape(input_tensor)
if batch_shape:
img_input = Input(batch_shape=batch_shape)
image_size = batch_shape[1:3]
elif input_shape is not None:
img_input = Input(shape=input_shape)
image_size = input_shape[0:2]
bn_axis = 3
x = Conv2D(64, (7, 7), strides=(2, 2), padding='same',
name='conv1', kernel_regularizer=l2(weight_decay))(img_input)
x = BatchNormalization(axis=bn_axis, name='bn_conv1',
momentum=batch_momentum)(x)
x = Activation('relu')(x)
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = conv_block(3, [64, 64, 256], stage=2, block='a', weight_decay=weight_decay, strides=(
1, 1), batch_momentum=batch_momentum)(x)
x = identity_block(3, [64, 64, 256], stage=2, block='b',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [64, 64, 256], stage=2, block='c',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = conv_block(3, [128, 128, 512], stage=3, block='a',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [128, 128, 512], stage=3, block='b',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [128, 128, 512], stage=3, block='c',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [128, 128, 512], stage=3, block='d',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = conv_block(3, [256, 256, 1024], stage=4, block='a',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='b',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='c',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='d',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='e',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = identity_block(3, [256, 256, 1024], stage=4, block='f',
weight_decay=weight_decay, batch_momentum=batch_momentum)(x)
x = atrous_conv_block(3, [512, 512, 2048], stage=5, block='a', weight_decay=weight_decay, atrous_rate=(
2, 2), batch_momentum=batch_momentum)(x)
x = atrous_identity_block(3, [512, 512, 2048], stage=5, block='b', weight_decay=weight_decay, atrous_rate=(
2, 2), batch_momentum=batch_momentum)(x)
x = atrous_identity_block(3, [512, 512, 2048], stage=5, block='c', weight_decay=weight_decay, atrous_rate=(
2, 2), batch_momentum=batch_momentum)(x)
# classifying layer
# x = Conv2D(classes, (3, 3), dilation_rate=(2, 2), kernel_initializer='normal', activation='linear', padding='same', strides=(1, 1), kernel_regularizer=l2(weight_decay))(x)
x = Conv2D(classes, (1, 1), kernel_initializer='he_normal', activation='linear',
padding='same', strides=(1, 1), kernel_regularizer=l2(weight_decay))(x)
if upsample:
x = BilinearUpSampling2D(target_size=tuple(image_size))(x)
model = Model(img_input, x)
if weights_path is None:
weights_path = os.path.expanduser(os.path.join(
'~', '.keras/models/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5'))
if not os.path.exists(weights_path):
temp_weights_path = os.path.expanduser(os.path.join(
'~', '.keras/models/fcn_resnet50_weights_tf_dim_ordering_tf_kernels.h5'))
if not os.path.exists(temp_weights_path):
# download the model if we don't have it yet
temp_model = keras.applications.resnet50.ResNet50(include_top=False)
temp_model.save_weights(weights_path)
del(temp_model)
model.load_weights(weights_path, by_name=True, reshape=True)
model.save_weights(weights_path)
else:
model.load_weights(weights_path)
else:
model.load_weights(weights_path)
return model
def Atrous_DenseNet(input_shape=None, weight_decay=1E-4,
batch_momentum=0.9, batch_shape=None, classes=21,
include_top=False, activation='sigmoid'):
# TODO(ahundt) pass the parameters but use defaults for now
if include_top is True:
# TODO(ahundt) Softmax is pre-applied, so need different train, inference, evaluate.
# TODO(ahundt) for multi-label try per class sigmoid top as follows:
# x = Reshape((row * col * classes))(x)
# x = Activation('sigmoid')(x)
# x = Reshape((row, col, classes))(x)
return densenet.DenseNet(depth=None, nb_dense_block=3, growth_rate=32,
nb_filter=-1, nb_layers_per_block=[6, 12, 24, 16],
bottleneck=True, reduction=0.5, dropout_rate=0.2,
weight_decay=1E-4,
include_top=True, top='segmentation',
weights=None, input_tensor=None,
input_shape=input_shape,
classes=classes, transition_dilation_rate=2,
transition_kernel_size=(1, 1),
transition_pooling=None)
# if batch_shape:
# img_input = Input(batch_shape=batch_shape)
# image_size = batch_shape[1:3]
# else:
# img_input = Input(shape=input_shape)
# image_size = input_shape[0:2]
input_shape = _obtain_input_shape(input_shape,
default_size=32,
min_size=16,
data_format=K.image_data_format(),
include_top=False)
img_input = Input(shape=input_shape)
x = densenet.__create_dense_net(classes, img_input,
depth=None, nb_dense_block=3, growth_rate=32,
nb_filter=-1, nb_layers_per_block=[6, 12, 24, 16],
bottleneck=True, reduction=0.5, dropout_rate=0.2,
weight_decay=1E-4, top='segmentation',
input_shape=input_shape,
transition_dilation_rate=2,
transition_kernel_size=(1, 1),
transition_pooling=None,
include_top=include_top)
x = top(x, input_shape, classes, activation, weight_decay)
model = Model(img_input, x, name='Atrous_DenseNet')
# TODO(ahundt) add weight loading
return model
def DenseNet_FCN(input_shape=None, weight_decay=1E-4,
batch_momentum=0.9, batch_shape=None, classes=21,
include_top=False, activation='sigmoid'):
if include_top is True:
# TODO(ahundt) Softmax is pre-applied, so need different train, inference, evaluate.
# TODO(ahundt) for multi-label try per class sigmoid top as follows:
# x = Reshape((row * col * classes))(x)
# x = Activation('sigmoid')(x)
# x = Reshape((row, col, classes))(x)
return densenet.DenseNetFCN(input_shape=input_shape,
weights=None, classes=classes,
nb_layers_per_block=[4, 5, 7, 10, 12, 15],
growth_rate=16,
dropout_rate=0.2)
# if batch_shape:
# img_input = Input(batch_shape=batch_shape)
# image_size = batch_shape[1:3]
# else:
# img_input = Input(shape=input_shape)
# image_size = input_shape[0:2]
input_shape = _obtain_input_shape(input_shape,
default_size=32,
min_size=16,
data_format=K.image_data_format(),
include_top=False)
img_input = Input(shape=input_shape)
x = densenet.__create_fcn_dense_net(classes, img_input,
input_shape=input_shape,
nb_layers_per_block=[
4, 5, 7, 10, 12, 15],
growth_rate=16,
dropout_rate=0.2,
include_top=include_top)
x = top(x, input_shape, classes, activation, weight_decay)
# TODO(ahundt) add weight loading
model = Model(img_input, x, name='DenseNet_FCN')
return model
| 46.983607
| 178
| 0.614982
| 5,005
| 37,258
| 4.354246
| 0.06993
| 0.00982
| 0.047079
| 0.059652
| 0.852934
| 0.83793
| 0.819208
| 0.812279
| 0.803653
| 0.798559
| 0
| 0.04587
| 0.25981
| 37,258
| 792
| 179
| 47.042929
| 0.744361
| 0.108916
| 0
| 0.650086
| 0
| 0.003431
| 0.06884
| 0.014193
| 0
| 0
| 0
| 0.002525
| 0.001715
| 1
| 0.058319
| false
| 0
| 0.02916
| 0
| 0.152659
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b0b3b1f8b04c735b0ed907a626e9ffae49327bbb
| 25
|
py
|
Python
|
robit/__init__.py
|
ToxicFrazzles/robit
|
5f727b9f191e06fe51e78a8eb75a65b94a571202
|
[
"MIT"
] | null | null | null |
robit/__init__.py
|
ToxicFrazzles/robit
|
5f727b9f191e06fe51e78a8eb75a65b94a571202
|
[
"MIT"
] | null | null | null |
robit/__init__.py
|
ToxicFrazzles/robit
|
5f727b9f191e06fe51e78a8eb75a65b94a571202
|
[
"MIT"
] | null | null | null |
from .robit import Robit
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b0e307cff66013a1b05ffa1a1b6b91ea97568b4f
| 89
|
py
|
Python
|
src/API/__init__.py
|
Public-Health-Bioinformatics/sequdas-upload
|
a22f090f9cd3b5ecfe0bae487016622b9b80651d
|
[
"MIT"
] | 9
|
2015-11-24T21:51:42.000Z
|
2020-10-21T20:16:24.000Z
|
src/API/__init__.py
|
Public-Health-Bioinformatics/sequdas-upload
|
a22f090f9cd3b5ecfe0bae487016622b9b80651d
|
[
"MIT"
] | 6
|
2016-09-13T20:38:57.000Z
|
2019-02-21T18:31:22.000Z
|
src/API/__init__.py
|
Public-Health-Bioinformatics/sequdas-upload
|
a22f090f9cd3b5ecfe0bae487016622b9b80651d
|
[
"MIT"
] | 1
|
2018-10-07T00:55:43.000Z
|
2018-10-07T00:55:43.000Z
|
from apiCalls import ApiCalls
from config import read_config_option, write_config_option
| 29.666667
| 58
| 0.88764
| 13
| 89
| 5.769231
| 0.538462
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101124
| 89
| 2
| 59
| 44.5
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b0fd004ad59e87a6ebd6c31a0a7a047d92874206
| 8,478
|
py
|
Python
|
tests/test_buildmaps.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 8
|
2019-05-06T11:42:41.000Z
|
2021-10-08T14:57:12.000Z
|
tests/test_buildmaps.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 75
|
2019-03-01T23:25:26.000Z
|
2022-01-29T21:40:27.000Z
|
tests/test_buildmaps.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 3
|
2020-01-30T19:10:19.000Z
|
2022-03-08T14:57:38.000Z
|
from __future__ import division, absolute_import, print_function
import unittest
import numpy.testing as testing
import numpy as np
import healpy as hp
from numpy import random
import healsparse
class BuildMapsTestCase(unittest.TestCase):
def test_build_maps_single(self):
"""
Test building a map for a single-value field
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
n_rand = 1000
ra = np.random.random(n_rand) * 360.0
dec = np.random.random(n_rand) * 180.0 - 90.0
# Create an empty map
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
# Look up all the values, make sure they're all UNSEEN
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True), hp.UNSEEN)
# Fail to append because of wrong dtype
pixel = np.arange(4000, 20000)
values = np.ones_like(pixel, dtype=np.float32)
self.assertRaises(ValueError, sparse_map.update_values_pix, pixel, values)
# Append a bunch of pixels
values = np.ones_like(pixel, dtype=np.float64)
sparse_map.update_values_pix(pixel, values)
# Make a healpix map for comparison
hpmap = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
hpmap[pixel] = values
theta = np.radians(90.0 - dec)
phi = np.radians(ra)
ipnest_test = hp.ang2pix(nside_map, theta, phi, nest=True)
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True), hpmap[ipnest_test])
# Replace the pixels
values += 1
sparse_map.update_values_pix(pixel, values)
hpmap[pixel] = values
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True), hpmap[ipnest_test])
# Replace and append more pixels
# Note that these are lower-number pixels, so the map is out of order
pixel2 = np.arange(3000) + 2000
values2 = np.ones_like(pixel2, dtype=np.float64)
sparse_map.update_values_pix(pixel2, values2)
hpmap[pixel2] = values2
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True), hpmap[ipnest_test])
# Test making empty maps
sparse_map2 = healsparse.HealSparseMap.make_empty_like(sparse_map)
self.assertEqual(sparse_map2.nside_coverage, sparse_map.nside_coverage)
self.assertEqual(sparse_map2.nside_sparse, sparse_map.nside_sparse)
self.assertEqual(sparse_map2.dtype, sparse_map.dtype)
self.assertEqual(sparse_map2._sentinel, sparse_map._sentinel)
sparse_map2b = healsparse.HealSparseMap.make_empty_like(sparse_map, cov_pixels=[0, 2])
self.assertEqual(sparse_map2b.nside_coverage, sparse_map.nside_coverage)
self.assertEqual(sparse_map2b.nside_sparse, sparse_map.nside_sparse)
self.assertEqual(sparse_map2b.dtype, sparse_map.dtype)
self.assertEqual(sparse_map2b._sentinel, sparse_map._sentinel)
self.assertEqual(len(sparse_map2b._sparse_map),
sparse_map2._cov_map.nfine_per_cov*3)
testing.assert_array_equal(sparse_map2b._sparse_map, sparse_map._sentinel)
sparse_map2 = healsparse.HealSparseMap.make_empty_like(sparse_map, nside_coverage=16)
self.assertEqual(sparse_map2.nside_coverage, 16)
self.assertEqual(sparse_map2.nside_sparse, sparse_map.nside_sparse)
self.assertEqual(sparse_map2.dtype, sparse_map.dtype)
self.assertEqual(sparse_map2._sentinel, sparse_map._sentinel)
sparse_map2 = healsparse.HealSparseMap.make_empty_like(sparse_map, nside_sparse=128)
self.assertEqual(sparse_map2.nside_coverage, sparse_map.nside_coverage)
self.assertEqual(sparse_map2.nside_sparse, 128)
self.assertEqual(sparse_map2.dtype, sparse_map.dtype)
self.assertEqual(sparse_map2._sentinel, sparse_map._sentinel)
sparse_map2 = healsparse.HealSparseMap.make_empty_like(sparse_map, dtype=np.int32, sentinel=0)
self.assertEqual(sparse_map2.nside_coverage, sparse_map.nside_coverage)
self.assertEqual(sparse_map2.nside_sparse, sparse_map.nside_sparse)
self.assertEqual(sparse_map2.dtype, np.int32)
def test_build_maps_recarray(self):
"""
Testing building a map for a recarray
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
n_rand = 1000
ra = np.random.random(n_rand) * 360.0
dec = np.random.random(n_rand) * 180.0 - 90.0
# Create an empty map
dtype = [('col1', 'f4'), ('col2', 'f8')]
self.assertRaises(RuntimeError, healsparse.HealSparseMap.make_empty, nside_coverage,
nside_map, dtype)
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, primary='col1')
# Look up all the values, make sure they're all UNSEEN
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col1'], hp.UNSEEN)
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col2'], hp.UNSEEN)
pixel = np.arange(4000, 20000)
values = np.zeros_like(pixel, dtype=dtype)
values['col1'] = 1.0
values['col2'] = 2.0
sparse_map.update_values_pix(pixel, values)
# Make healpix maps for comparison
hpmapCol1 = np.zeros(hp.nside2npix(nside_map), dtype=np.float32) + hp.UNSEEN
hpmapCol2 = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
hpmapCol1[pixel] = values['col1']
hpmapCol2[pixel] = values['col2']
theta = np.radians(90.0 - dec)
phi = np.radians(ra)
ipnest_test = hp.ang2pix(nside_map, theta, phi, nest=True)
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col1'],
hpmapCol1[ipnest_test])
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col2'],
hpmapCol2[ipnest_test])
# Replace the pixels
values['col1'] += 1
values['col2'] += 1
sparse_map.update_values_pix(pixel, values)
hpmapCol1[pixel] = values['col1']
hpmapCol2[pixel] = values['col2']
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col1'],
hpmapCol1[ipnest_test])
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col2'],
hpmapCol2[ipnest_test])
# Replace and append more pixels
# Note that these are lower-number pixels, so the map is out of order
pixel2 = np.arange(3000) + 2000
values2 = np.zeros_like(pixel2, dtype=dtype)
values2['col1'] = 1.0
values2['col2'] = 2.0
sparse_map.update_values_pix(pixel2, values2)
hpmapCol1[pixel2] = values2['col1']
hpmapCol2[pixel2] = values2['col2']
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col1'],
hpmapCol1[ipnest_test])
testing.assert_almost_equal(sparse_map.get_values_pos(ra, dec, lonlat=True)['col2'],
hpmapCol2[ipnest_test])
# Test making empty maps
sparse_map2 = healsparse.HealSparseMap.make_empty_like(sparse_map)
self.assertEqual(sparse_map2.nside_coverage, sparse_map.nside_coverage)
self.assertEqual(sparse_map2.nside_sparse, sparse_map.nside_sparse)
self.assertEqual(sparse_map2.dtype, sparse_map.dtype)
self.assertEqual(sparse_map2._sentinel, sparse_map._sentinel)
sparse_map2b = healsparse.HealSparseMap.make_empty_like(sparse_map, cov_pixels=[0, 2])
self.assertEqual(sparse_map2b.nside_coverage, sparse_map.nside_coverage)
self.assertEqual(sparse_map2b.nside_sparse, sparse_map.nside_sparse)
self.assertEqual(sparse_map2b.dtype, sparse_map.dtype)
self.assertEqual(sparse_map2b._sentinel, sparse_map._sentinel)
self.assertEqual(len(sparse_map2b._sparse_map),
sparse_map2._cov_map.nfine_per_cov*3)
testing.assert_array_equal(sparse_map2b._sparse_map['col1'], sparse_map._sentinel)
testing.assert_array_equal(sparse_map2b._sparse_map['col2'], hp.UNSEEN)
if __name__ == '__main__':
unittest.main()
| 45.827027
| 106
| 0.67858
| 1,092
| 8,478
| 4.994505
| 0.132784
| 0.09736
| 0.10396
| 0.087092
| 0.845251
| 0.839384
| 0.828933
| 0.79703
| 0.706637
| 0.695086
| 0
| 0.035741
| 0.224463
| 8,478
| 184
| 107
| 46.076087
| 0.793764
| 0.075607
| 0
| 0.627907
| 0
| 0
| 0.014405
| 0
| 0
| 0
| 0
| 0
| 0.356589
| 1
| 0.015504
| false
| 0
| 0.054264
| 0
| 0.077519
| 0.007752
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b0233970b3c27cfa9c2c7592b2e023a2a52b57ce
| 135
|
py
|
Python
|
src/modules/__init__.py
|
Nobregaigor/Optical-Mark-Registration-PDF-Reader
|
0c6d8d652fa4e52d04098787c022b7235cd2b8ac
|
[
"MIT"
] | null | null | null |
src/modules/__init__.py
|
Nobregaigor/Optical-Mark-Registration-PDF-Reader
|
0c6d8d652fa4e52d04098787c022b7235cd2b8ac
|
[
"MIT"
] | null | null | null |
src/modules/__init__.py
|
Nobregaigor/Optical-Mark-Registration-PDF-Reader
|
0c6d8d652fa4e52d04098787c022b7235cd2b8ac
|
[
"MIT"
] | null | null | null |
from .check_unumber import *
from .check_email import *
from .rename_pdfs import *
from .test_email import *
from .send_emails import *
| 27
| 28
| 0.785185
| 20
| 135
| 5.05
| 0.5
| 0.39604
| 0.29703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140741
| 135
| 5
| 29
| 27
| 0.87069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
c65d1896c0352cbebf8019534651e9a56036fa8e
| 504
|
py
|
Python
|
espnet2/gan_tts/hifigan/__init__.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 5,053
|
2017-12-13T06:21:41.000Z
|
2022-03-31T13:38:29.000Z
|
espnet2/gan_tts/hifigan/__init__.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 3,666
|
2017-12-14T05:58:50.000Z
|
2022-03-31T22:11:49.000Z
|
espnet2/gan_tts/hifigan/__init__.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 1,709
|
2017-12-13T01:02:42.000Z
|
2022-03-31T11:57:45.000Z
|
from espnet2.gan_tts.hifigan.hifigan import HiFiGANGenerator # NOQA
from espnet2.gan_tts.hifigan.hifigan import HiFiGANMultiPeriodDiscriminator # NOQA
from espnet2.gan_tts.hifigan.hifigan import HiFiGANMultiScaleDiscriminator # NOQA
from espnet2.gan_tts.hifigan.hifigan import ( # NOQA
HiFiGANMultiScaleMultiPeriodDiscriminator, # NOQA
)
from espnet2.gan_tts.hifigan.hifigan import HiFiGANPeriodDiscriminator # NOQA
from espnet2.gan_tts.hifigan.hifigan import HiFiGANScaleDiscriminator # NOQA
| 56
| 83
| 0.843254
| 55
| 504
| 7.618182
| 0.236364
| 0.157518
| 0.200477
| 0.243437
| 0.577566
| 0.577566
| 0.577566
| 0.48926
| 0
| 0
| 0
| 0.013245
| 0.10119
| 504
| 8
| 84
| 63
| 0.9117
| 0.06746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c6a23b3dd64ffdfe9143394f3d5d5ef32284969e
| 1,022
|
py
|
Python
|
tests/test_permission.py
|
dmelo/Flask-ACL
|
7339b89f96ad8686d1526e25c138244ad912e12d
|
[
"BSD-3-Clause"
] | 25
|
2015-02-06T20:00:16.000Z
|
2022-03-17T15:59:47.000Z
|
tests/test_permission.py
|
dmelo/Flask-ACL
|
7339b89f96ad8686d1526e25c138244ad912e12d
|
[
"BSD-3-Clause"
] | 2
|
2015-03-30T06:03:27.000Z
|
2018-01-05T17:03:12.000Z
|
tests/test_permission.py
|
dmelo/Flask-ACL
|
7339b89f96ad8686d1526e25c138244ad912e12d
|
[
"BSD-3-Clause"
] | 17
|
2015-03-20T19:48:58.000Z
|
2020-03-29T15:47:42.000Z
|
from . import *
from flask_acl.permission import is_permission_in_set
class TestPermissions(TestCase):
def test_strings(self):
self.assertTrue(is_permission_in_set('xxx', 'xxx'))
self.assertFalse(is_permission_in_set('xxx', 'axxx'))
self.assertFalse(is_permission_in_set('xxx', 'xxxb'))
def test_containers(self):
self.assertTrue(is_permission_in_set('xxx', ('a', 'xxx', 'b')))
self.assertTrue(is_permission_in_set('xxx', ['a', 'xxx', 'b']))
self.assertTrue(is_permission_in_set('xxx', set(['a', 'xxx', 'b'])))
self.assertFalse(is_permission_in_set('xxx', ('a', 'b')))
self.assertFalse(is_permission_in_set('xxx', ['a', 'b']))
self.assertFalse(is_permission_in_set('xxx', set(['a', 'b'])))
def test_callables(self):
self.assertTrue(is_permission_in_set('xxx', lambda p: True))
self.assertTrue(is_permission_in_set('xxx', lambda p: 'x' in p))
self.assertFalse(is_permission_in_set('xxx', lambda p: 'X' in p))
| 40.88
| 76
| 0.655577
| 143
| 1,022
| 4.384615
| 0.195804
| 0.248804
| 0.290271
| 0.352472
| 0.76236
| 0.76236
| 0.76236
| 0.614035
| 0.54067
| 0.452951
| 0
| 0
| 0.169276
| 1,022
| 24
| 77
| 42.583333
| 0.738516
| 0
| 0
| 0
| 0
| 0
| 0.068493
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.166667
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c6a51c5697d169beff79ebf57f4064354d0ce4ea
| 28
|
py
|
Python
|
example/boot.py
|
jdtsmith/autoftp
|
5d06749ba6132f64cbb2231334553ea17c814f41
|
[
"MIT"
] | 1
|
2021-03-14T03:12:12.000Z
|
2021-03-14T03:12:12.000Z
|
example/boot.py
|
jdtsmith/autoftp
|
5d06749ba6132f64cbb2231334553ea17c814f41
|
[
"MIT"
] | null | null | null |
example/boot.py
|
jdtsmith/autoftp
|
5d06749ba6132f64cbb2231334553ea17c814f41
|
[
"MIT"
] | null | null | null |
print("MyModule Starting!")
| 14
| 27
| 0.75
| 3
| 28
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 28
| 1
| 28
| 28
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
c6b05e8ef9e2ab103120c4a9405847a2dc073555
| 39
|
py
|
Python
|
tests/environment_tests/dataset_handler/__init__.py
|
doslindos/ml_crapwrap
|
f9daa2904234492921c6c344bfcd24992e2ff421
|
[
"MIT"
] | null | null | null |
tests/environment_tests/dataset_handler/__init__.py
|
doslindos/ml_crapwrap
|
f9daa2904234492921c6c344bfcd24992e2ff421
|
[
"MIT"
] | null | null | null |
tests/environment_tests/dataset_handler/__init__.py
|
doslindos/ml_crapwrap
|
f9daa2904234492921c6c344bfcd24992e2ff421
|
[
"MIT"
] | null | null | null |
from .util import DatasetHandlerTester
| 19.5
| 38
| 0.871795
| 4
| 39
| 8.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05b50cf561283569860addc2e70867e8bb843ad7
| 122
|
py
|
Python
|
src/dlqmc/tools.py
|
noegroup/dlqmc-project
|
ed7561ec0156df6d6309e49c1276646173ec8641
|
[
"MIT"
] | 3
|
2020-12-22T16:26:36.000Z
|
2021-08-11T16:54:46.000Z
|
src/dlqmc/tools.py
|
noegroup/dlqmc-project
|
ed7561ec0156df6d6309e49c1276646173ec8641
|
[
"MIT"
] | 5
|
2020-07-26T23:13:16.000Z
|
2020-07-26T23:13:45.000Z
|
src/dlqmc/tools.py
|
noegroup/dlqmc-project
|
ed7561ec0156df6d6309e49c1276646173ec8641
|
[
"MIT"
] | 1
|
2021-06-18T05:00:39.000Z
|
2021-06-18T05:00:39.000Z
|
import uncertainties
def short_fmt(x):
return f'{x:S}' if isinstance(x, uncertainties.core.AffineScalarFunc) else x
| 20.333333
| 80
| 0.754098
| 18
| 122
| 5.055556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139344
| 122
| 5
| 81
| 24.4
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.040984
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
05cae0dbf5c34f27a96d0017a1f5a55c10de89ff
| 74
|
py
|
Python
|
besspin/cyberPhys/cyberphyslib/cyberphyslib/canlib/__init__.py
|
mikkowus/BESSPIN-Tool-Suite
|
e87e9abb1156a8627aacc3272f1925b034129146
|
[
"Apache-2.0"
] | null | null | null |
besspin/cyberPhys/cyberphyslib/cyberphyslib/canlib/__init__.py
|
mikkowus/BESSPIN-Tool-Suite
|
e87e9abb1156a8627aacc3272f1925b034129146
|
[
"Apache-2.0"
] | null | null | null |
besspin/cyberPhys/cyberphyslib/cyberphyslib/canlib/__init__.py
|
mikkowus/BESSPIN-Tool-Suite
|
e87e9abb1156a8627aacc3272f1925b034129146
|
[
"Apache-2.0"
] | null | null | null |
from .canlib import *
from .canspecs import *
from .componentids import *
| 18.5
| 27
| 0.756757
| 9
| 74
| 6.222222
| 0.555556
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 3
| 28
| 24.666667
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05f416b3b4673cf189674c943bdc0c5032d17a65
| 25,179
|
py
|
Python
|
tests/test_time.py
|
colinahill/terrapyn
|
77a2bba2b6365d289d894f0aa722880ae36c52bd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_time.py
|
colinahill/terrapyn
|
77a2bba2b6365d289d894f0aa722880ae36c52bd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_time.py
|
colinahill/terrapyn
|
77a2bba2b6365d289d894f0aa722880ae36c52bd
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime as dt
import unittest
import zoneinfo
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
import xarray as xr
from freezegun import freeze_time
import terrapyn as tp
PACKAGE_ROOT_DIR = Path(__file__).resolve().parent.parent
TEST_DATA_PATH = PACKAGE_ROOT_DIR / "tests" / "data"
idx = pd.IndexSlice
class TestConvertDatetime64(unittest.TestCase):
def test_object_type(self):
result = tp.time.datetime64_to_datetime(np.datetime64("2013-04-05 07:12:34.056789"))
self.assertEqual(result, dt.datetime(2013, 4, 5, 7, 12, 34, 56789))
class TestConvertDatetime(unittest.TestCase):
def test_object_type(self):
result = tp.time.datetime_to_datetime64(dt.datetime(2013, 4, 5, 7, 12, 34, 123))
self.assertEqual(result, np.datetime64("2013-04-05 07:12:34.000123"))
class TestGetTimeFromData(unittest.TestCase):
expected = pd.DatetimeIndex(
["2019-03-15", "2019-03-16", "2019-03-17"], dtype="datetime64[ns]", name="time", freq=None
)
df = pd.DataFrame(
{
"time": expected,
"id": [123, 456, 789],
"val": [1, 3, 5],
}
).set_index(["time", "id"])
def test_dataframe(self):
results = tp.time.get_time_from_data(self.df.reset_index(drop=False))
pd.testing.assert_index_equal(results, self.expected)
def test_dataframe_time_column(self):
results = tp.time.get_time_from_data(self.df)
pd.testing.assert_index_equal(results, self.expected)
def test_dataset(self):
results = tp.time.get_time_from_data(self.df.to_xarray())
pd.testing.assert_index_equal(results, self.expected)
def test_list(self):
results = tp.time.get_time_from_data(list(self.expected.to_pydatetime()))
pd.testing.assert_index_equal(results, self.expected)
def test_dataarray(self):
results = tp.time.get_time_from_data(self.df.to_xarray()["val"])
pd.testing.assert_index_equal(results, self.expected)
def test_series_time_index(self):
results = tp.time.get_time_from_data(self.df["val"])
pd.testing.assert_index_equal(results, self.expected)
def test_series_time_column(self):
results = tp.time.get_time_from_data(pd.Series(self.expected))
pd.testing.assert_index_equal(results, self.expected)
def test_datetime(self):
results = tp.time.get_time_from_data(dt.datetime(2019, 3, 15))
pd.testing.assert_index_equal(results, pd.DatetimeIndex([dt.datetime(2019, 3, 15)], name="time"))
def test_ndarray(self):
results = tp.time.get_time_from_data(self.expected.to_numpy())
pd.testing.assert_index_equal(results, self.expected)
def test_datetimeindex(self):
results = tp.time.get_time_from_data(self.expected)
pd.testing.assert_index_equal(results, self.expected)
def test_multiindex(self):
results = tp.time.get_time_from_data(self.df.index)
pd.testing.assert_index_equal(results, self.expected)
def test_missing_from_multiindex(self):
with self.assertRaises(ValueError):
tp.time.get_time_from_data(self.df.index, time_dim="date")
def test_invalid_datatype(self):
with self.assertRaises(TypeError):
tp.time.get_time_from_data(1)
class TestGroupbyTime(unittest.TestCase):
ds = xr.open_dataset(TEST_DATA_PATH / "lat_2_lon_2_time_15_D_test_data.nc")
def test_dataset_groupby_week(self):
result = tp.time.groupby_time(self.ds, grouping="week")
self.assertEqual(result.groups, {8: [0, 1, 2, 3, 4], 9: [5, 6, 7, 8, 9, 10, 11], 10: [12, 13, 14]})
def test_dataarray_groupby_week(self):
result = tp.time.groupby_time(self.ds["var"], grouping="week")
self.assertEqual(result.groups, {8: [0, 1, 2, 3, 4], 9: [5, 6, 7, 8, 9, 10, 11], 10: [12, 13, 14]})
def test_dataset_groupby_month(self):
result = tp.time.groupby_time(self.ds, grouping="month")
self.assertEqual(result.groups, {2: [0, 1, 2, 3, 4, 5, 6, 7, 8], 3: [9, 10, 11, 12, 13, 14]})
def test_dataframe_groupby_pentad(self):
result = tp.time.groupby_time(self.ds.to_dataframe(), grouping="pentad")
np.testing.assert_almost_equal(result.sum().values, np.array([[272.59223017], [257.44398154], [295.72954042]]))
def test_series_groupby_dekad(self):
result = tp.time.groupby_time(self.ds.to_dataframe()["var"], grouping="dekad")
np.testing.assert_almost_equal(result.sum().values, np.array([80.23334597, 412.85991647, 332.67248968]))
def test_groupby_year(self):
result = tp.time.groupby_time(self.ds, grouping="year")
self.assertEqual(result.groups, {2019: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]})
def test_groupby_dayofyear(self):
result = tp.time.groupby_time(self.ds, grouping="dayofyear")
self.assertEqual(result.groups[55], [4])
def test_invalid_grouping(self):
with self.assertRaises(ValueError):
tp.time.groupby_time(self.ds, grouping="10d")
def test_groupby_other_keys(self):
result = tp.time.groupby_time(self.ds.to_dataframe(), grouping="year", other_grouping_keys="lon").groups[
(2019, 30)
][0]
self.assertEqual(result, (pd.Timestamp("2019-02-20 00:00:00"), 2, 30))
def test_groupby_multiple_other_keys(self):
result = tp.time.groupby_time(
self.ds.to_dataframe(), grouping="year", other_grouping_keys=["lat", "lon"]
).groups[(2019, 2, 30)][0]
self.assertEqual(result, (pd.Timestamp("2019-02-20 00:00:00"), 2, 30))
@freeze_time("2012-01-14 11:12:13")
class TestDailyDateRange(unittest.TestCase):
def test_date_today(self):
result = tp.time.daily_date_range()
self.assertEqual(result, [dt.datetime(2012, 1, 14, 0, 0)])
def test_delta_days(self):
result = tp.time.daily_date_range(delta_days=-1)
self.assertEqual(result, [dt.datetime(2012, 1, 13, 0, 0), dt.datetime(2012, 1, 14, 0, 0)])
def test_delta_days_future(self):
result = tp.time.daily_date_range(delta_days=1)
self.assertEqual(result, [dt.datetime(2012, 1, 14, 0, 0), dt.datetime(2012, 1, 15, 0, 0)])
def test_future_dates(self):
result = tp.time.daily_date_range(end_time=dt.datetime(2012, 1, 16), reset_time=True, hours=6)
self.assertEqual(
result, [dt.datetime(2012, 1, 14, 6, 0), dt.datetime(2012, 1, 15, 6, 0), dt.datetime(2012, 1, 16, 6, 0)]
)
@freeze_time("2012-01-14 11:12:13")
class TestMonthlyDateRange(unittest.TestCase):
def test_date_today_ignore_days(self):
result = tp.time.monthly_date_range()
self.assertEqual(result, [dt.datetime(2012, 1, 1, 0, 0)])
def test_date_today_include_days(self):
result = tp.time.monthly_date_range(reset_time=False)
self.assertEqual(result, [dt.datetime(2012, 1, 14, 11, 12, 13)])
def test_zero_delta_months(self):
result = tp.time.monthly_date_range(delta_months=0)
self.assertEqual(result, [dt.datetime(2012, 1, 1, 0, 0)])
def test_delta_months(self):
result = tp.time.monthly_date_range(delta_months=-2)
self.assertEqual(
result,
[
dt.datetime(2011, 11, 1, 0, 0),
dt.datetime(2011, 12, 1, 0, 0),
dt.datetime(2012, 1, 1, 0, 0),
],
)
class TestAddDayOfYearVariable(unittest.TestCase):
ds = xr.open_dataset(TEST_DATA_PATH / "lat_2_lon_2_time_366_D_test_data.nc")
def test_dataset_modify_ordinal_days(self):
result = tp.time.add_day_of_year_variable(self.ds)
np.testing.assert_equal(result["dayofyear"].values[58:62], np.array([59, 60, 60, 61]))
def test_dataset_no_modify_ordinal_days(self):
result = tp.time.add_day_of_year_variable(self.ds, modify_ordinal_days=False)
np.testing.assert_equal(result["dayofyear"].values[58:62], np.array([59, 60, 61, 62]))
def test_dataarray(self):
result = tp.time.add_day_of_year_variable(self.ds["var"], modify_ordinal_days=False)
np.testing.assert_equal(result["dayofyear"].values[58:62], np.array([59, 60, 61, 62]))
class TestCheckStartEndTimeValidity(unittest.TestCase):
def test_datetime64_with_datetime64(self):
result = tp.time.check_start_end_time_validity(
np.datetime64("2013-04-05 07:12:34.056789"), np.datetime64("2013-04-05 07:12:35")
)
self.assertTrue(result)
def test_datetime64_with_datetime64_invalid(self):
result = tp.time.check_start_end_time_validity(
np.datetime64("2013-04-05 07:12:35"), np.datetime64("2013-04-05 07:12:34.056789")
)
self.assertFalse(result)
def test_datetime_with_datetime64(self):
result = tp.time.check_start_end_time_validity(
dt.datetime(2013, 4, 5, 7, 12, 34, 56789), np.datetime64("2013-04-05 07:12:35")
)
self.assertTrue(result)
@pytest.fixture(autouse=True)
def capfd(self, capfd):
self.capfd = capfd
def test_verbose_warning(self):
result = tp.time.check_start_end_time_validity(dt.datetime(2014, 1, 2), dt.datetime(2014, 1, 1), verbose=True)
out, err = self.capfd.readouterr()
self.assertFalse(result)
assert out == "Warning: End time 2014-01-01 00:00:00 before start time 2014-01-02 00:00:00\n"
def test_missing_time(self):
with self.assertRaises(ValueError):
tp.time.check_start_end_time_validity(None, dt.datetime(2014, 1, 2))
class TestGetDayOfYear(unittest.TestCase):
ds = xr.open_dataset(TEST_DATA_PATH / "lat_2_lon_2_time_366_D_test_data.nc")
def test_dataset_ordinal_days(self):
result = tp.time.get_day_of_year(self.ds, modify_ordinal_days=False)[58:62]
np.testing.assert_equal(result, np.array([59, 60, 61, 62]))
def test_dataset_modify_ordinal_days(self):
result = tp.time.get_day_of_year(self.ds, modify_ordinal_days=True)[58:62]
np.testing.assert_equal(result, np.array([59, 60, 60, 61]))
def test_dataarray_ordinal_days(self):
result = tp.time.get_day_of_year(self.ds["var"], modify_ordinal_days=False)[58:62]
np.testing.assert_equal(result, np.array([59, 60, 61, 62]))
def test_dataframe_ordinal_days(self):
result = tp.time.get_day_of_year(self.ds.to_dataframe(), time_dim="time", modify_ordinal_days=False)[58:62]
np.testing.assert_equal(result, np.array([15, 15, 16, 16]))
def test_series_ordinal_days(self):
result = tp.time.get_day_of_year(self.ds.to_dataframe()["var"].index, modify_ordinal_days=False)[58:62]
np.testing.assert_equal(result, np.array([15, 15, 16, 16]))
def test_datetime_ordinal_days(self):
result = tp.time.get_day_of_year(dt.datetime(2004, 3, 1), modify_ordinal_days=False)
np.testing.assert_equal(result, np.array([61]))
class TestTimeToLocalTime(unittest.TestCase):
def test_invalid_string(self):
with self.assertRaises(TypeError):
tp.time.time_to_local_time(dt.datetime(2019, 3, 15, 1, 0), timezone_name=1)
class TestDataToLocalTime(unittest.TestCase):
expected = pd.DatetimeIndex(
["2019-03-15 01:00:00", "2019-03-15 02:00:00"], dtype="datetime64[ns]", name="time", freq=None
)
df = pd.DataFrame(
{"time": pd.date_range("2019-03-15", freq="h", periods=2), "id": ["a", "b"], "val": [1, 2]}
).set_index(["time", "id"])
def test_dataframe_multiindex(self):
results = tp.time.data_to_local_time(self.df.copy(), "CET").index.get_level_values("time")
pd.testing.assert_index_equal(results, self.expected)
def test_dataframe(self):
test_df = self.df.copy().reset_index(drop=False).set_index("time")
results = tp.time.data_to_local_time(test_df, "CET").index.get_level_values("time")
pd.testing.assert_index_equal(results, self.expected)
def test_dataframe_column(self):
results = tp.time.data_to_local_time(self.df.reset_index(drop=False).copy(), "CET", time_dim="time")["time"]
np.testing.assert_equal(results.values, self.expected.values)
def test_series(self):
results = tp.time.data_to_local_time(self.df.copy()["val"], "CET").index.get_level_values("time")
pd.testing.assert_index_equal(results, self.expected)
def test_series_values(self):
results = tp.time.data_to_local_time(pd.Series(self.expected), "CET").values
np.testing.assert_equal(results, pd.Series(self.expected) + pd.Timedelta("1h"))
def test_dataset(self):
results = tp.time.data_to_local_time(self.df.copy().to_xarray(), "CET").indexes["time"]
pd.testing.assert_index_equal(results, self.expected)
def test_dataarray(self):
results = tp.time.data_to_local_time(self.df.copy().to_xarray(), "CET")["val"].indexes["time"]
pd.testing.assert_index_equal(results, self.expected)
def test_datetime(self):
results = tp.time.data_to_local_time(dt.datetime(2019, 3, 15, 1, 0), "CET")[0]
self.assertTrue(results, dt.datetime(2019, 3, 15, 2, 0))
def test_missing_timezone(self):
with self.assertRaises(ValueError):
tp.time.data_to_local_time(dt.datetime(2019, 3, 15, 1, 0), None)
def test_datetimeindex(self):
results = tp.time.data_to_local_time(self.expected - pd.Timedelta("1h"), "CET")
pd.testing.assert_index_equal(results, self.expected)
def test_ndarray(self):
results = tp.time.data_to_local_time(self.expected.values - np.timedelta64(1, "h"), "CET")
pd.testing.assert_index_equal(results, self.expected)
def test_list(self):
results = tp.time.data_to_local_time([dt.datetime(2019, 3, 15, 1, 0)], "CET")
self.assertEqual(results.to_pydatetime(), [dt.datetime(2019, 3, 15, 2, 0)])
@pytest.fixture(autouse=True)
def capfd(self, capfd):
self.capfd = capfd
def test_invalid_data_type(self):
with self.assertRaises(TypeError):
out, err = self.capfd.readouterr()
_ = tp.time.data_to_local_time(1, "CET")
assert out == "Data type of int not implemented"
class TestListTimezones(unittest.TestCase):
def test_dict_type(self):
result = tp.time.list_timezones()
self.assertTrue(isinstance(result, set))
class TestEnsureDatetimeIndex(unittest.TestCase):
def test_datetime(self):
result = tp.time._ensure_datetimeindex(dt.datetime(2021, 4, 5))
expected = pd.DatetimeIndex(["2021-04-05 00:00:00"], dtype="datetime64[ns]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
def test_list_of_datetimes(self):
result = tp.time._ensure_datetimeindex([dt.datetime(2021, 4, 5), dt.datetime(2021, 4, 6)])
expected = pd.DatetimeIndex(
["2021-04-05 00:00:00", "2021-04-06 00:00:00"], dtype="datetime64[ns]", name="time", freq=None
)
pd.testing.assert_index_equal(result, expected)
def test_datetimeindex(self):
expected = pd.DatetimeIndex(["2021-04-05 00:00:00"], dtype="datetime64[ns]", name="time", freq=None)
result = tp.time._ensure_datetimeindex(expected)
pd.testing.assert_index_equal(result, expected)
class TestDatetimeToUTC(unittest.TestCase):
def test_no_timezone(self):
result = tp.time._datetime_to_UTC(dt.datetime(2021, 4, 5))
expected = pd.DatetimeIndex(["2021-04-05 00:00:00+00:00"], dtype="datetime64[ns, UTC]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
def test_timezone_set(self):
result = tp.time._datetime_to_UTC(dt.datetime(2021, 4, 5, tzinfo=zoneinfo.ZoneInfo("CET")))
expected = pd.DatetimeIndex(["2021-04-04 22:00:00+00:00"], dtype="datetime64[ns, UTC]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
class TestDatetimeindexToLocalTimeTzAware(unittest.TestCase):
def test_no_timezone(self):
result = tp.time._datetimeindex_to_local_time_tz_aware(dt.datetime(2021, 4, 5))
expected = pd.DatetimeIndex(["2021-04-05 00:00:00+00:00"], dtype="datetime64[ns, UTC]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
def test_timezone_set(self):
result = tp.time._datetimeindex_to_local_time_tz_aware(
dt.datetime(2021, 4, 5, tzinfo=zoneinfo.ZoneInfo("CET")), "EST"
)
expected = pd.DatetimeIndex(["2021-04-04 17:00:00-05:00"], dtype="datetime64[ns, EST]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
class TestDatetimeindexToLocalTimeTzNaive(unittest.TestCase):
def test_no_timezone(self):
result = tp.time._datetimeindex_to_local_time_tz_naive(dt.datetime(2021, 4, 5))
expected = pd.DatetimeIndex(["2021-04-05"], dtype="datetime64[ns]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
def test_timezone_set(self):
result = tp.time._datetimeindex_to_local_time_tz_naive(dt.datetime(2021, 4, 5, tzinfo=zoneinfo.ZoneInfo("CET")))
expected = pd.DatetimeIndex(["2021-04-04 22:00:00"], dtype="datetime64[ns]", name="time", freq=None)
pd.testing.assert_index_equal(result, expected)
class TestSetTimeInData(unittest.TestCase):
df = pd.DataFrame(
{"time": pd.date_range("2019-03-15 06:00", freq="D", periods=2), "id": ["a", "b"], "val": [1, 2]}
).set_index(["time", "id"])
def test_replace_times(self):
results = tp.time._set_time_in_data(
self.df, new_times=pd.date_range("2021-01-1 06:00", freq="D", periods=2)
).index.get_level_values("time")
expected = pd.DatetimeIndex(
["2021-01-01 06:00:00", "2021-01-02 06:00:00"], dtype="datetime64[ns]", name="time", freq=None
)
pd.testing.assert_index_equal(results, expected)
def test_set_times_to_midnight(self):
results = tp.time._set_time_in_data(
self.df, set_time_to_midnight=True, hours_to_subtract=None
).index.get_level_values("time")
expected = pd.DatetimeIndex(["2021-01-01", "2021-01-02"], dtype="datetime64[ns]", name="time", freq=None)
pd.testing.assert_index_equal(results, expected)
def test_subtract_hours(self):
results = tp.time._set_time_in_data(
self.df, set_time_to_midnight=True, hours_to_subtract=5
).index.get_level_values("time")
expected = pd.DatetimeIndex(
["2020-12-31 19:00:00", "2021-01-01 19:00:00"], dtype="datetime64[ns]", name="time", freq=None
)
pd.testing.assert_index_equal(results, expected)
def test_no_modification(self):
results = tp.time._set_time_in_data(self.df).index.get_level_values("time")
expected = self.df.index.get_level_values("time")
pd.testing.assert_index_equal(results, expected)
class TestUTCOffsetInHours(unittest.TestCase):
def test_datetimeindex_no_timezone(self):
result = tp.time.utc_offset_in_hours(pd.date_range("2019-01-02", freq="6H", periods=2), "Asia/Kolkata")
self.assertEqual(result, 5.5)
def test_datetime_no_timezone(self):
result = tp.time.utc_offset_in_hours(dt.datetime(2021, 4, 5), "Asia/Kolkata")
self.assertEqual(result, 5.5)
def test_datetimeindex_timezone_set(self):
result = tp.time.utc_offset_in_hours(
pd.date_range("2019-01-02", freq="6H", periods=2, tz="CET"), "Asia/Kolkata"
)
self.assertEqual(result, 5.5)
def test_datetimeindex_return_multiple_offsets(self):
result = tp.time.utc_offset_in_hours(
pd.date_range("2019-01-02", freq="6H", periods=2), "Asia/Kolkata", return_single_value=False
)
self.assertEqual(result, [5.5, 5.5])
class TestGroupbyFreq(unittest.TestCase):
ds = xr.Dataset(
data_vars={"var": (("lat", "lon", "time"), np.ones((1, 1, 100)))},
coords={"lat": [1], "lon": [2], "time": pd.date_range("2022-01-01", periods=100)},
)
ds_hourly = xr.Dataset(
data_vars={"var": (("lat", "lon", "time"), np.arange(100)[np.newaxis, np.newaxis, :])},
coords={"lat": [1], "lon": [2], "time": pd.date_range("2022-01-01", periods=100, freq="h")},
)
ds_hourly_multicoord = xr.Dataset(
data_vars={"var": (("lat", "lon", "time"), np.full((2, 2, 100), np.arange(100)))},
coords={"lat": [1, 2], "lon": [3, 4], "time": pd.date_range("2022-01-01", periods=100, freq="h")},
)
def test_dataset(self):
result = tp.time.groupby_freq(self.ds, freq="M").sum()["var"].values.flatten()
expected = np.array([30.0, 28.0, 31.0, 11.0])
np.testing.assert_array_equal(result, expected)
def test_dataarray(self):
result = tp.time.groupby_freq(self.ds["var"], freq="M").sum().values.flatten()
expected = np.array([30.0, 28.0, 31.0, 11.0])
np.testing.assert_array_equal(result, expected)
def test_dataframe(self):
result = tp.time.groupby_freq(self.ds["var"].to_dataframe(), freq="M").sum()["var"].values
expected = np.array([30.0, 28.0, 31.0, 11.0])
np.testing.assert_array_equal(result, expected)
def test_series(self):
result = tp.time.groupby_freq(self.ds["var"].to_series(), freq="M").sum().values
expected = np.array([30.0, 28.0, 31.0, 11.0])
np.testing.assert_array_equal(result, expected)
def test_dataset_hourly_to_daily(self):
result = tp.time.groupby_freq(self.ds_hourly, freq="D", day_start_hour=6).sum()["var"].values.flatten()
expected = np.array([15, 420, 996, 1572, 1947])
np.testing.assert_array_equal(result, expected)
def test_dataframe_hourly_to_daily(self):
result = tp.time.groupby_freq(self.ds_hourly.to_dataframe(), freq="D", day_start_hour=6).sum()["var"]
expected = np.array([15, 420, 996, 1572, 1947])
np.testing.assert_array_equal(result.values, expected)
self.assertEqual(result.index.names, ["time", "lat", "lon"])
def test_series_hourly_to_daily_single_index(self):
result = tp.time.groupby_freq(
self.ds_hourly["var"].to_series().reset_index(drop=True, level=["lat", "lon"]), freq="D", day_start_hour=6
).sum()
expected = np.array([15, 420, 996, 1572, 1947])
np.testing.assert_array_equal(result.values, expected)
self.assertEqual(result.index.name, "time")
def test_dataframe_no_time_dim(self):
with self.assertRaises(ValueError):
tp.time.groupby_freq(self.ds["var"].to_dataframe(), freq="M", time_dim="x")
def test_series_no_time_dim(self):
with self.assertRaises(ValueError):
tp.time.groupby_freq(self.ds["var"].to_series(), freq="M", time_dim="x")
def test_dataframe_time_column(self):
result = (
tp.time.groupby_freq(self.ds["var"].to_dataframe().reset_index(drop=False), freq="M").sum()["var"].values
)
expected = np.array([30.0, 28.0, 31.0, 11.0])
np.testing.assert_array_equal(result, expected)
def test_dataframe_time_column_other_cols(self):
result = (
tp.time.groupby_freq(
self.ds_hourly_multicoord.to_dataframe().reset_index(drop=False), freq="D", other_grouping_columns="lat"
)
.sum()["lon"]
.values
)
expected = np.array([168, 168, 168, 168, 168, 168, 168, 168, 28, 28])
np.testing.assert_array_equal(result, expected)
def test_invalid_datatype(self):
with self.assertRaises(TypeError):
tp.time.groupby_freq([1])
class TestResampleTime(unittest.TestCase):
ds_hourly = xr.Dataset(
data_vars={"var": (("lat", "lon", "time"), np.arange(100)[np.newaxis, np.newaxis, :])},
coords={"lat": [1], "lon": [2], "time": pd.date_range("2022-01-01", periods=100, freq="h")},
)
def test_dataset_sum(self):
result = tp.time.resample_time(self.ds_hourly, freq="D", day_start_hour=6, resample_method="sum")
expected = np.array([15, 420, 996, 1572, 1947])
self.assertEqual(result["time"].values[0], np.datetime64("2021-12-31T06:00:00.00"))
np.testing.assert_array_equal(result["var"].values.flatten(), expected)
def test_dataset_mean(self):
result = tp.time.resample_time(self.ds_hourly, resample_method="mean")["var"].values.flatten()
np.testing.assert_array_equal(result, np.array([11.5, 35.5, 59.5, 83.5, 97.5]))
def test_dataset_max(self):
result = tp.time.resample_time(self.ds_hourly, resample_method="max")["var"].values.flatten()
np.testing.assert_array_equal(result, np.array([23, 47, 71, 95, 99]))
def test_dataset_min(self):
result = tp.time.resample_time(self.ds_hourly, resample_method="min")["var"].values.flatten()
np.testing.assert_array_equal(result, np.array([0, 24, 48, 72, 96]))
def test_method_not_implemented(self):
with self.assertRaises(ValueError):
tp.time.resample_time(self.ds_hourly, resample_method="foobar")
| 43.262887
| 120
| 0.663092
| 3,646
| 25,179
| 4.370817
| 0.085573
| 0.042169
| 0.044428
| 0.058233
| 0.803715
| 0.770833
| 0.746925
| 0.702372
| 0.664157
| 0.612575
| 0
| 0.075769
| 0.184916
| 25,179
| 581
| 121
| 43.337349
| 0.700726
| 0
| 0
| 0.364045
| 0
| 0.002247
| 0.070535
| 0.005004
| 0
| 0
| 0
| 0
| 0.226966
| 1
| 0.220225
| false
| 0
| 0.022472
| 0
| 0.314607
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
05f9dbcdf439cc82456bb49424264d138e6ec2cf
| 20,660
|
py
|
Python
|
scoreprobability/crawl.py
|
htpauleta/ScoreProbability
|
eae1d76e3607bb4cb19eff6a7ad72d5b2c20f4f7
|
[
"Apache-2.0"
] | 7
|
2018-12-29T06:39:28.000Z
|
2021-10-12T01:42:37.000Z
|
scoreprobability/crawl.py
|
htpauleta/ScoreProbability
|
eae1d76e3607bb4cb19eff6a7ad72d5b2c20f4f7
|
[
"Apache-2.0"
] | 2
|
2021-03-31T18:56:32.000Z
|
2021-06-01T23:30:22.000Z
|
scoreprobability/crawl.py
|
htpauleta/ScoreProbability
|
eae1d76e3607bb4cb19eff6a7ad72d5b2c20f4f7
|
[
"Apache-2.0"
] | null | null | null |
"""
@Project : ScoreProbability
@Module : crawl.py
@Author : HjwGivenLyy [1752929469@qq.com]
@Created : 12/17/18 4:54 PM
@Desc : crawl data from qtw net
"""
import datetime
import re
import loguru
import requests
import yaml
from bs4 import BeautifulSoup
from pymongo import MongoClient
from base import SUPPORT_LEAGUE_NAME_ID, SERVER_FILE_PATH
# qtw company bet
QTW_COMPANY_BET = [8, 23, 24, 31]
YP_URL = "http://vip.win007.com/changeDetail/handicap.aspx?" \
"id={qtw_match_id}&companyID={company_id}&l=0"
DXQ_URL = "http://vip.win007.com/changeDetail/overunder.aspx?" \
"id={qtw_match_id}&companyID={company_id}&l=0"
LEAGUE_URL = "http://zq.win007.com/cn/SubLeague/{league_id}.html"
SEASON_URL = "http://zq.win007.com/jsData/matchResult/2018-2019/" \
"s{id}.js?version={value}"
SPECIAL_SEASON_URL = {
'fy': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s12_1778.js?version={value}",
'dy': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s9_132.js?version={value}",
'yg': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s37_87.js?version={value}",
'bj': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s5_114.js?version={value}",
'pc': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s23_1123.js?version={value}",
'ac': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s273_462.js?version={value}",
'xy': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s33_546.js?version={value}",
'yy': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s40_261.js?version={value}",
'hj': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s16_98.js?version={value}",
'hy': "http://zq.win007.com/jsData/matchResult/2018-2019/"
"s17_94.js?version={value}",
}
logger = loguru.logger
def get_latest_odds_by_qtw_match_id(qtw_match_id: int, odd_type: str):
"""
according to qtw match id get latest odds
:param qtw_match_id:
:param odd_type: "yp" or "dxq"
:return:
"""
def get_latest_odd_by_company_id(match_id: int, company_id: int, odd: str):
"""get certain bet company odd"""
if odd == "yp":
url = YP_URL.format(qtw_match_id=match_id, company_id=company_id)
elif odd == "dxq":
url = DXQ_URL.format(qtw_match_id=match_id, company_id=company_id)
else:
return "odds type must be in ['yp', 'dxq']"
headers1 = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; '
'rv:2.0.1) Gecko/20100101 Firefox/4.0.1'
}
try:
content = requests.get(url, headers=headers1)
page = BeautifulSoup(content.content, "lxml")
odds2_lst = page.find_all(id="odds2")
td_lst = odds2_lst[0].find_all("tr")[1].find_all("td")
if td_lst[-1].text.encode("utf-8") != "滚":
return td_lst[3].text
else:
return "match have begin!"
except Exception as e:
logger.exception(e)
logger.error(
"get qtw_match_id = {0} odds failure".format(qtw_match_id))
return None
for company_id_value in QTW_COMPANY_BET:
rtn = get_latest_odd_by_company_id(
match_id=qtw_match_id, company_id=company_id_value, odd=odd_type)
if rtn is not None:
return rtn
else:
return None
def save_match_info_to_mongodb(db_client: MongoClient, league_name: str):
"""
save qtw match information data to mongodb
:param db_client: mongodb client
:param league_name: league name --> "yc", "dj", "fj", "xj", "yj"
:return: run insert into
"""
tb = db_client["xscore"]["match_info"]
page_text = spare_url(league_name)
pattern = re.compile('jh.* = .*]')
match_lst = re.findall(pattern, page_text)
game_week = 0
for match_str in match_lst:
game_week += 1
match_info_str = match_str.replace(",,,", ",'','',")
match_info_lst = eval(str(match_info_str).split(" = ")[1])
for match_information in match_info_lst:
qtw_match_id = int(match_information[0])
result = tb.find_one(
filter={"qtw_match_id": qtw_match_id},
projection={'status': 1}
)
if result is not None and result["status"] == 2:
continue
elif result is not None and result["status"] != 2:
if match_information[2] == -1:
new_status = 2
new_status_text = "Played"
score_lst = match_information[6].split("-")
tb.update(
{"qtw_match_id": qtw_match_id},
{
"$set": {
"match_time": match_information[3],
"status": new_status,
"status_text": new_status_text,
"home_score": int(score_lst[0]),
"away_score": int(score_lst[1])
}
}
)
elif match_information[2] == -14:
new_status = 3
new_status_text = "Delay"
tb.update(
{"qtw_match_id": qtw_match_id},
{
"$set": {
"match_time": match_information[3],
"status": new_status,
"status_text": new_status_text
}
}
)
else:
new_status = 3
new_status_text = "Fixture"
tb.update(
{"qtw_match_id": qtw_match_id},
{
"$set": {
"match_time": match_information[3],
"status": new_status,
"status_text": new_status_text
}
}
)
logger.info(
"qtw_match_id = {0} have finished update!".format(
qtw_match_id))
else:
result_dict = dict()
result_dict["qtw_match_id"] = qtw_match_id
result_dict["qtw_league_id"] = int(match_information[1])
result_dict["match_time"] = match_information[3]
result_dict["home_id"] = int(match_information[4])
result_dict["away_id"] = int(match_information[5])
result_dict["game_week"] = int(game_week)
if match_information[2] == -1:
score_lst = match_information[6].split("-")
result_dict["home_score"] = int(score_lst[0])
result_dict["away_score"] = int(score_lst[1])
result_dict["status"] = 2
result_dict["status_text"] = "Played"
elif match_information[2] == -14:
result_dict["home_score"] = -1
result_dict["away_score"] = -1
result_dict["status"] = 3
result_dict["status_text"] = "Delay"
else:
result_dict["home_score"] = -1
result_dict["away_score"] = -1
result_dict["status"] = 1
result_dict["status_text"] = "Fixture"
logger.info("result_dict = {0}".format(result_dict))
tb.insert_one(result_dict)
def save_team_info_to_mongodb(db_client: MongoClient, league_name: str):
"""
save qtw team information data to mongodb
:param db_client: mongodb client
:param league_name: league name --> "yc", "dj", "fj", "xj", "yj"
:return: run insert into
"""
tb = db_client["xscore"]["team_info"]
league_id = int(SUPPORT_LEAGUE_NAME_ID[league_name])
page_text = spare_url(league_name)
pattern = re.compile("arrTeam = .*]")
match_str = re.search(pattern, page_text)
match_group = match_str.group()
team_information_lst = eval(str(match_group).split(" = ")[1])
for team_information in team_information_lst:
team_id = int(team_information[0])
result = tb.find_one(
filter={
"league_id": league_id,
"team_id": team_id
},
projection={'league_id': 1}
)
if result is not None:
logger.info("team_id = {0} exist in mongodb".format(
team_information[0]))
else:
result_dict = dict()
result_dict["league_id"] = league_id
result_dict["league_name"] = league_name
result_dict['team_id'] = team_id
result_dict["team_cn_name"] = team_information[1]
result_dict["team_en_name"] = team_information[3]
logger.info("result_dict = ", result_dict)
tb.insert_one(result_dict)
def save_league_info_to_mongodb(db_client: MongoClient):
"""save qtw league information data to mongodb"""
result_lst = []
tb = db_client["xscore"]["league_info"]
for key, value in SUPPORT_LEAGUE_NAME_ID.items():
result = tb.find_one(
filter={"league_id": int(value)},
projection={'league_id': 1}
)
if result is not None:
logger.info("league_id = {0} exist in mongodb".format(value))
else:
result_dct = dict()
result_dct["league_name"] = key
result_dct["league_id"] = int(value)
result_lst.append(result_dct)
if result_lst:
tb.insert_many(result_lst)
else:
logger.info("no league info need update !!!")
def spare_url(league_name: str):
"""spare url"""
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
version_value = now.split(":")[0].replace("-", "").replace(" ", "")
league_id = SUPPORT_LEAGUE_NAME_ID[league_name]
headers1 = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) '
'Gecko/20100101 Firefox/4.0.1'
}
headers2 = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) '
'Gecko/20100101 Firefox/4.0.1',
'Host': 'zq.win007.com',
'Connection': 'keep-alive',
'Accept': '*/*',
'Referer': 'http://zq.win007.com/cn/SubLeague/37.html',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.9'
}
session = requests.Session()
league_home_page_url = LEAGUE_URL.format(
league_id=SUPPORT_LEAGUE_NAME_ID[league_name]
)
session.get(league_home_page_url, headers=headers1)
if league_name in ['yc', 'dj', 'fj', 'xj', 'yj', 'sc']:
season_home_page_url = SEASON_URL.format(
id=league_id, value=version_value)
else:
season_home_page_url = SPECIAL_SEASON_URL[league_name].format(
value=version_value)
chi = session.get(season_home_page_url, headers=headers2)
page = BeautifulSoup(chi.text, "lxml")
page_text = page.p.text
return page_text
# Another way of code implementation
class CrawlDataBase:
def __init__(self, league_name: str):
"""
Initialization parameters
:param league_name: league name --> "yc", "dj", "fj", "xj", "yj"
"""
self._db_client = self._get_db_client()
self._league_name = league_name
def __del__(self):
self._db_client.close()
@staticmethod
def _get_db_client():
config = yaml.load(open(SERVER_FILE_PATH, encoding="utf-8"))
config_dct = config.get("mongodb")
client = MongoClient(
"mongodb://{user}:{pwd}@{host}:{port}/{db}"
"?readPreference=primary".format(
user=config_dct.get("user"),
pwd=config_dct.get("pwd"),
host=config_dct.get("host"),
port=config_dct.get("port"),
db=config_dct.get("db")))
return client
def spare_url(self):
"""spare url"""
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
version_value = now.split(":")[0].replace("-", "").replace(" ", "")
league_id = SUPPORT_LEAGUE_NAME_ID[self._league_name]
headers1 = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; '
'rv:2.0.1) Gecko/20100101 Firefox/4.0.1'
}
headers2 = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; '
'rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Host': 'zq.win007.com',
'Connection': 'keep-alive',
'Accept': '*/*',
'Referer': 'http://zq.win007.com/cn/SubLeague/37.html',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.9'
}
session = requests.Session()
league_home_page_url = LEAGUE_URL.format(
league_id=SUPPORT_LEAGUE_NAME_ID[self._league_name]
)
session.get(league_home_page_url, headers=headers1)
if self._league_name in ['yc', 'dj', 'fj', 'xj', 'yj', 'sc']:
season_home_page_url = SEASON_URL.format(
id=league_id, value=version_value)
else:
season_home_page_url = SPECIAL_SEASON_URL[
self._league_name].format(
value=version_value)
chi = session.get(season_home_page_url, headers=headers2)
page = BeautifulSoup(chi.text, "lxml")
page_text = page.p.text
return page_text
def save_info_to_mongodb(self):
raise NotImplementedError
class LeagueInfo(CrawlDataBase):
def save_info_to_mongodb(self):
"""save qtw league information data to mongodb"""
result_lst = []
tb = self._db_client["xscore"]["league_info"]
for key, value in SUPPORT_LEAGUE_NAME_ID.items():
result_dct = dict()
result_dct["league_name"] = key
result_dct["league_id"] = int(value)
result_lst.append(result_dct)
tb.insert_many(result_lst)
class MatchInfo(CrawlDataBase):
def save_info_to_mongodb(self):
"""save qtw match information data to mongodb"""
tb = self._db_client["xscore"]["match_info"]
page_text = self.spare_url()
pattern = re.compile('jh.* = .*]')
match_lst = re.findall(pattern, page_text)
game_week = 0
for match_str in match_lst:
game_week += 1
match_info_str = match_str.replace(",,,", ",'','',")
match_info_lst = eval(str(match_info_str).split(" = ")[1])
for match_information in match_info_lst:
qtw_match_id = int(match_information[0])
result = tb.find_one(
filter={"qtw_match_id": qtw_match_id},
projection={'status': 1}
)
if result is not None and result["status"] == 2:
continue
elif result is not None and result["status"] != 2:
if match_information[2] == -1:
new_status = 2
new_status_text = "Played"
score_lst = match_information[6].split("-")
tb.update(
{"qtw_match_id": qtw_match_id},
{
"$set": {
"match_time": match_information[3],
"status": new_status,
"status_text": new_status_text,
"home_score": int(score_lst[0]),
"away_score": int(score_lst[1])
}
}
)
elif match_information[2] == -14:
new_status = 3
new_status_text = "Delay"
tb.update(
{"qtw_match_id": qtw_match_id},
{
"$set": {
"match_time": match_information[3],
"status": new_status,
"status_text": new_status_text
}
}
)
else:
new_status = 3
new_status_text = "Fixture"
tb.update(
{"qtw_match_id": qtw_match_id},
{
"$set": {
"match_time": match_information[3],
"status": new_status,
"status_text": new_status_text
}
}
)
logger.info(
"qtw_match_id = {0} have finished update!".format(
qtw_match_id))
else:
result_dict = dict()
result_dict["qtw_match_id"] = qtw_match_id
result_dict["qtw_league_id"] = int(match_information[1])
result_dict["match_time"] = match_information[3]
result_dict["home_id"] = int(match_information[4])
result_dict["away_id"] = int(match_information[5])
result_dict["game_week"] = int(game_week)
if match_information[2] == -1:
score_lst = match_information[6].split("-")
result_dict["home_score"] = int(score_lst[0])
result_dict["away_score"] = int(score_lst[1])
result_dict["status"] = 2
result_dict["status_text"] = "Played"
elif match_information[2] == -14:
result_dict["home_score"] = -1
result_dict["away_score"] = -1
result_dict["status"] = 3
result_dict["status_text"] = "Delay"
else:
result_dict["home_score"] = -1
result_dict["away_score"] = -1
result_dict["status"] = 1
result_dict["status_text"] = "Fixture"
logger.info("result_dict = {0}".format(result_dict))
tb.insert_one(result_dict)
class TeamInfo(CrawlDataBase):
def save_info_to_mongodb(self):
"""save qtw team information data to mongodb"""
tb = self._db_client["xscore"]["team_info"]
league_id = int(SUPPORT_LEAGUE_NAME_ID[self._league_name])
page_text = self.spare_url()
pattern = re.compile("arrTeam = .*]")
match_str = re.search(pattern, page_text)
match_group = match_str.group()
team_information_lst = eval(str(match_group).split(" = ")[1])
for team_information in team_information_lst:
result = tb.find_one(
filter={
"league_id": league_id,
"team_id": int(team_information[0])
},
projection={'league_id': 1}
)
if result is not None:
logger.info("team_id = {0} exist in mongodb".format(
team_information[0]))
else:
result_dict = dict()
result_dict["league_id"] = league_id
result_dict["league_name"] = self._league_name
result_dict['team_id'] = int(team_information[0])
result_dict["team_cn_name"] = team_information[1]
result_dict["team_en_name"] = team_information[3]
print("result_dict = ", result_dict)
tb.insert_one(result_dict)
if __name__ == "__main__":
yp_odd = get_latest_odds_by_qtw_match_id(1585238, "yp")
print(yp_odd)
| 37.225225
| 79
| 0.512391
| 2,302
| 20,660
| 4.31364
| 0.124674
| 0.062437
| 0.038268
| 0.016918
| 0.808258
| 0.802719
| 0.774824
| 0.757402
| 0.703021
| 0.667271
| 0
| 0.034554
| 0.366844
| 20,660
| 554
| 80
| 37.292419
| 0.724562
| 0.046515
| 0
| 0.63109
| 0
| 0.00464
| 0.182214
| 0.022207
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032483
| false
| 0
| 0.018561
| 0
| 0.081207
| 0.00464
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
af2b91feb5701de094d9cc85ec6cd74a6e2eb062
| 56
|
py
|
Python
|
mcunet/tinynas/elastic_nn/modules/__init__.py
|
1999michael/tinyml
|
e8a5c9baef3d8a4890bb7ddbed4f5655cb4fa535
|
[
"MIT"
] | 306
|
2021-01-15T07:49:40.000Z
|
2022-03-31T03:13:20.000Z
|
tinynas/elastic_nn/modules/__init__.py
|
liuyy3364/mcunet
|
f53f9e20e8e912bdb111b4c32da75e71e9a59597
|
[
"Apache-2.0"
] | 9
|
2021-02-04T00:58:33.000Z
|
2022-03-29T06:19:55.000Z
|
tinynas/elastic_nn/modules/__init__.py
|
liuyy3364/mcunet
|
f53f9e20e8e912bdb111b4c32da75e71e9a59597
|
[
"Apache-2.0"
] | 65
|
2021-01-18T06:06:09.000Z
|
2022-03-25T01:42:15.000Z
|
from .dynamic_layers import *
from .dynamic_op import *
| 18.666667
| 29
| 0.785714
| 8
| 56
| 5.25
| 0.625
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 30
| 28
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
af4f1f9be162bc1f3cd196c4f0cf856e79bcf19d
| 7,662
|
py
|
Python
|
SZR/apps/groups/tests/test_tasks.py
|
Alek96/SZR
|
6c736cded0c6de88b6e4fc5a207273ec1024365b
|
[
"MIT"
] | 1
|
2019-04-04T17:02:24.000Z
|
2019-04-04T17:02:24.000Z
|
SZR/apps/groups/tests/test_tasks.py
|
Alek96/SZR
|
6c736cded0c6de88b6e4fc5a207273ec1024365b
|
[
"MIT"
] | 1
|
2019-03-30T13:32:14.000Z
|
2019-03-30T13:32:14.000Z
|
SZR/apps/groups/tests/test_tasks.py
|
Alek96/SZR
|
6c736cded0c6de88b6e4fc5a207273ec1024365b
|
[
"MIT"
] | null | null | null |
import json
from GitLabApi import mock_all_gitlab_url
from GitLabApi.MockUrls import mock_all_urls_and_raise_error
from GitLabApi.exceptions import GitlabGetError
from core.models import GitlabUser
from core.tests.test_view import LoginMethods
from groups import tasks
from groups.tests.models import AddMemberCreateMethods, AddSubgroupCreateMethods, AddProjectCreateMethods
from httmock import HTTMock
class CreateSubgroup(LoginMethods):
@LoginMethods.create_user_wrapper
@mock_all_gitlab_url
def test_create_subgroup(self):
from GitLabApi.tests.test_gitlab_api import TestGitLabGroupsApi
args_dict = {
'name': 'name',
'path': 'path',
'parent_id': 1,
}
with HTTMock(mock_all_urls_and_raise_error):
with HTTMock(TestGitLabGroupsApi().get_mock_for_create_url(args=args_dict)):
self.assertTrue(tasks.create_subgroup(
user_id=self.user.id,
name='name',
path='path',
group_id=1
))
class CreateProject(LoginMethods):
@LoginMethods.create_user_wrapper
@mock_all_gitlab_url
def test_create_project(self):
from GitLabApi.tests.test_gitlab_api import TestGitLabProjectsApi
args_dict = {
'name': 'name',
'path': 'path',
'namespace_id': 1,
}
with HTTMock(mock_all_urls_and_raise_error):
with HTTMock(TestGitLabProjectsApi().get_mock_for_create_url(args=args_dict)):
self.assertTrue(tasks.create_project(
user_id=self.user.id,
name='name',
path='path',
group_id=1
))
class AddOrUpdateMemberTests(LoginMethods):
@LoginMethods.create_user_wrapper
@mock_all_gitlab_url
def test_update_user(self):
from GitLabApi.tests.test_gitlab_api import TestGitLabGroupsApi, TestGitLabUsersApi, TestGitLabGroupMembersApi, \
TestGitLabGroupMemberObjApi
with HTTMock(mock_all_urls_and_raise_error):
with HTTMock(TestGitLabGroupsApi().get_mock_for_get_url()):
with HTTMock(TestGitLabUsersApi().get_mock_for_list_url()):
with HTTMock(TestGitLabGroupMembersApi().get_mock_for_get_url()):
with HTTMock(TestGitLabGroupMemberObjApi().get_mock_for_save_url(args={'access_level': 10})):
self.assertTrue(tasks.add_or_update_member(
user_id=self.user.id,
group_id=1,
username='name',
access_level=10
))
@LoginMethods.create_user_wrapper
@mock_all_gitlab_url
def test_create_user(self):
from GitLabApi.tests.test_gitlab_api import TestGitLabGroupsApi, TestGitLabUsersApi, TestGitLabGroupMembersApi
args_dict = {
'user_id': self.user.id,
'access_level': 10,
}
with HTTMock(mock_all_urls_and_raise_error):
with HTTMock(TestGitLabGroupsApi().get_mock_for_get_url()):
with HTTMock(TestGitLabUsersApi().get_mock_for_list_url()):
with HTTMock(TestGitLabGroupMembersApi().get_mock_for_get_url(raise_error=GitlabGetError())):
with HTTMock(TestGitLabGroupMembersApi().get_mock_for_create_url(args=args_dict)):
self.assertTrue(tasks.add_or_update_member(
user_id=self.user.id,
group_id=1,
username='name',
access_level=10
))
class AddSubgroupTaskTests(LoginMethods):
@LoginMethods.create_user_wrapper
def setUp(self):
self.task_model = AddSubgroupCreateMethods().create_task(
owner=GitlabUser.objects.get(user_social_auth=self.user_social_auth)
)
self.gitlab_group = self.task_model.gitlab_group
def get_run_args(self):
return json.loads(self.task_model.celery_task.kwargs)
@mock_all_gitlab_url
def test_gitlab_group_does_not_have_gitlab_id(self):
self.gitlab_group.gitlab_id = None
self.gitlab_group.save()
tasks.AddSubgroupTask().run(**self.get_run_args())
self.task_model.refresh_from_db()
self.assertEqual(self.task_model.status, self.task_model.FAILED)
self.assertNotEqual(self.task_model.error_msg, "")
@mock_all_gitlab_url
def test_run_correctly(self):
self.gitlab_group.gitlab_id = 2
self.gitlab_group.save()
tasks.AddSubgroupTask().run(**self.get_run_args())
self.task_model.refresh_from_db()
self.task_model.new_gitlab_group.refresh_from_db()
self.assertEqual(self.task_model.error_msg, None)
self.assertNotEqual(self.task_model.new_gitlab_group.gitlab_id, None)
self.assertEqual(self.task_model.status, self.task_model.SUCCEED)
class AddProjectTaskTests(LoginMethods):
@LoginMethods.create_user_wrapper
def setUp(self):
self.task_model = AddProjectCreateMethods().create_task(
owner=GitlabUser.objects.get(user_social_auth=self.user_social_auth)
)
self.gitlab_group = self.task_model.gitlab_group
def get_run_args(self):
return json.loads(self.task_model.celery_task.kwargs)
@mock_all_gitlab_url
def test_gitlab_group_does_not_have_gitlab_id(self):
self.gitlab_group.gitlab_id = None
self.gitlab_group.save()
tasks.AddProjectTask().run(**self.get_run_args())
self.task_model.refresh_from_db()
self.assertEqual(self.task_model.status, self.task_model.FAILED)
self.assertNotEqual(self.task_model.error_msg, "")
@mock_all_gitlab_url
def test_run_correctly(self):
self.gitlab_group.gitlab_id = 2
self.gitlab_group.save()
tasks.AddProjectTask().run(**self.get_run_args())
self.task_model.refresh_from_db()
self.task_model.new_gitlab_project.refresh_from_db()
self.assertEqual(self.task_model.error_msg, None)
self.assertNotEqual(self.task_model.new_gitlab_project.gitlab_id, None)
self.assertEqual(self.task_model.status, self.task_model.SUCCEED)
class AddMemberTaskTests(LoginMethods):
@LoginMethods.create_user_wrapper
def setUp(self):
self.task_model = AddMemberCreateMethods().create_task(
owner=GitlabUser.objects.get(user_social_auth=self.user_social_auth)
)
self.gitlab_group = self.task_model.gitlab_group
def get_run_args(self):
return json.loads(self.task_model.celery_task.kwargs)
@mock_all_gitlab_url
def test_gitlab_group_does_not_have_gitlab_id(self):
self.gitlab_group.gitlab_id = None
self.gitlab_group.save()
tasks.AddMemberTask().run(**self.get_run_args())
self.task_model.refresh_from_db()
self.assertEqual(self.task_model.status, self.task_model.FAILED)
self.assertNotEqual(self.task_model.error_msg, "")
@mock_all_gitlab_url
def test_run_correctly(self):
self.gitlab_group.gitlab_id = 1
self.gitlab_group.save()
tasks.AddMemberTask().run(**self.get_run_args())
self.task_model.refresh_from_db()
self.assertEqual(self.task_model.error_msg, None)
self.assertNotEqual(self.task_model.new_gitlab_user, None)
self.assertEqual(self.task_model.status, self.task_model.SUCCEED)
| 37.194175
| 121
| 0.663534
| 888
| 7,662
| 5.367117
| 0.113739
| 0.063785
| 0.103651
| 0.036928
| 0.844524
| 0.841167
| 0.81347
| 0.811162
| 0.80256
| 0.789971
| 0
| 0.002958
| 0.249804
| 7,662
| 205
| 122
| 37.37561
| 0.8262
| 0
| 0
| 0.68323
| 0
| 0
| 0.014096
| 0
| 0
| 0
| 0
| 0
| 0.118012
| 1
| 0.099379
| false
| 0
| 0.080745
| 0.018634
| 0.236025
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
af52cb7f5173a99cc3a216bec32ed523f43d8f2e
| 219
|
py
|
Python
|
angr/engines/soot/exceptions.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 6,132
|
2015-08-06T23:24:47.000Z
|
2022-03-31T21:49:34.000Z
|
angr/engines/soot/exceptions.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 2,272
|
2015-08-10T08:40:07.000Z
|
2022-03-31T23:46:44.000Z
|
angr/engines/soot/exceptions.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 1,155
|
2015-08-06T23:37:39.000Z
|
2022-03-31T05:54:11.000Z
|
class BlockTerminationNotice(Exception):
pass
class IncorrectLocationException(Exception):
pass
class SootMethodNotLoadedException(Exception):
pass
class SootFieldNotLoadedException(Exception):
pass
| 16.846154
| 46
| 0.799087
| 16
| 219
| 10.9375
| 0.4375
| 0.297143
| 0.308571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146119
| 219
| 12
| 47
| 18.25
| 0.935829
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
af6d63ddb4db62f035b31c6a5fd6f0572098d530
| 357
|
py
|
Python
|
synthesis/__init__.py
|
kunheek/style-aware-discriminator
|
d6ddd7c735d6c162f2a3c942d5cba1e0457f8c39
|
[
"MIT"
] | 21
|
2022-03-30T06:58:50.000Z
|
2022-03-31T16:38:48.000Z
|
synthesis/__init__.py
|
kunheek/style-aware-discriminator
|
d6ddd7c735d6c162f2a3c942d5cba1e0457f8c39
|
[
"MIT"
] | null | null | null |
synthesis/__init__.py
|
kunheek/style-aware-discriminator
|
d6ddd7c735d6c162f2a3c942d5cba1e0457f8c39
|
[
"MIT"
] | null | null | null |
from .base_synthesizer import BaseSynthesizer
from .interpolation_synthesizer import InterpolationSynthesizer
from .local_translation_synthesizer import LocalTranslationSynthesizer
from .prototype_synthesizer import PrototypeSynthesizer
from .swap_synthesizer import SwapSynthesizer
from .transplantation_synthesizer import TransplantationSynthesizer
| 51
| 71
| 0.89916
| 31
| 357
| 10.129032
| 0.516129
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084034
| 357
| 6
| 72
| 59.5
| 0.960245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
afd216efcec70b183f2a5f6a0f3fa85787c79bbb
| 57
|
py
|
Python
|
main.py
|
theexplorerdude/Password-manager
|
0743c9e9e29d1034e554fd69267ae95cd5ba2317
|
[
"MIT"
] | null | null | null |
main.py
|
theexplorerdude/Password-manager
|
0743c9e9e29d1034e554fd69267ae95cd5ba2317
|
[
"MIT"
] | null | null | null |
main.py
|
theexplorerdude/Password-manager
|
0743c9e9e29d1034e554fd69267ae95cd5ba2317
|
[
"MIT"
] | null | null | null |
print("its the first commit of simple password manager")
| 28.5
| 56
| 0.789474
| 9
| 57
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 57
| 1
| 57
| 57
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0.824561
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
|
0
| 6
|
bb816d1f987a0a364530c5c9df802d502b830786
| 37
|
py
|
Python
|
vulnapp/crawler/models/__init__.py
|
Fufuhu/VulnAppSample
|
d6a9ab667b2b5628a649d57cd97b1979bdd986f8
|
[
"Apache-2.0"
] | 1
|
2021-09-06T07:07:47.000Z
|
2021-09-06T07:07:47.000Z
|
vulnapp/crawler/models/__init__.py
|
Fufuhu/VulnAppSample
|
d6a9ab667b2b5628a649d57cd97b1979bdd986f8
|
[
"Apache-2.0"
] | null | null | null |
vulnapp/crawler/models/__init__.py
|
Fufuhu/VulnAppSample
|
d6a9ab667b2b5628a649d57cd97b1979bdd986f8
|
[
"Apache-2.0"
] | null | null | null |
from .crawled_page import CrawledPage
| 37
| 37
| 0.891892
| 5
| 37
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a59c64d642d602351d197587631217f0b410910b
| 207
|
py
|
Python
|
juon/errors/missing_dependency_error.py
|
joocer/seren
|
1563f84015b3460d766c71fbe108f17bc2b72181
|
[
"Apache-2.0"
] | null | null | null |
juon/errors/missing_dependency_error.py
|
joocer/seren
|
1563f84015b3460d766c71fbe108f17bc2b72181
|
[
"Apache-2.0"
] | 287
|
2021-05-14T21:25:26.000Z
|
2022-03-30T12:02:51.000Z
|
juon/errors/missing_dependency_error.py
|
joocer/juon
|
1563f84015b3460d766c71fbe108f17bc2b72181
|
[
"Apache-2.0"
] | 1
|
2021-04-29T18:18:20.000Z
|
2021-04-29T18:18:20.000Z
|
# nodoc - don't add to the documentation wiki
"""
This exception should be used when a lazy import fails
"""
from .base_exception import BaseException
class MissingDependencyError(BaseException):
pass
| 20.7
| 54
| 0.772947
| 27
| 207
| 5.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164251
| 207
| 9
| 55
| 23
| 0.919075
| 0.478261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
a5a488f8bbeb1fe0b5ec321de21c13f59115e9d6
| 40
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/template/data/_indicator.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/template/data/_indicator.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/template/data/_indicator.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
from plotly.graph_objs import Indicator
| 20
| 39
| 0.875
| 6
| 40
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
36fd90f74793e87abdc3f8fbbf7152a9ff72d737
| 257
|
py
|
Python
|
catnip/forms.py
|
ObjectifLibre/catnip
|
5d89c92de0396b1e912bb498af88687dd046718d
|
[
"Apache-2.0"
] | 2
|
2020-03-13T12:45:10.000Z
|
2020-04-01T12:04:49.000Z
|
catnip/forms.py
|
ObjectifLibre/catnip
|
5d89c92de0396b1e912bb498af88687dd046718d
|
[
"Apache-2.0"
] | 1
|
2020-07-24T21:54:08.000Z
|
2020-07-24T21:54:08.000Z
|
catnip/forms.py
|
ObjectifLibre/catnip
|
5d89c92de0396b1e912bb498af88687dd046718d
|
[
"Apache-2.0"
] | 1
|
2020-05-11T19:19:12.000Z
|
2020-05-11T19:19:12.000Z
|
from django import forms
class AuthForm(forms.Form):
domain = forms.CharField(label='Your domain', max_length=250)
username = forms.CharField(label='Your name', max_length=250)
password = forms.CharField(label='Your password', max_length=250)
| 32.125
| 69
| 0.743191
| 35
| 257
| 5.371429
| 0.485714
| 0.223404
| 0.303191
| 0.367021
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040541
| 0.136187
| 257
| 7
| 70
| 36.714286
| 0.806306
| 0
| 0
| 0
| 0
| 0
| 0.128405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
b1c430b5f856c375fc892fb7876debb5dee14d60
| 149
|
py
|
Python
|
office365/teams/schedulingGroup.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | null | null | null |
office365/teams/schedulingGroup.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | null | null | null |
office365/teams/schedulingGroup.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | null | null | null |
from office365.entity import Entity
class SchedulingGroup(Entity):
"""A logical grouping of users in a schedule (usually by role)."""
pass
| 21.285714
| 70
| 0.724832
| 20
| 149
| 5.4
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0.187919
| 149
| 6
| 71
| 24.833333
| 0.867769
| 0.402685
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
b1cc2ef42369ef8954e94c6f25717ef123a02305
| 29
|
py
|
Python
|
coincap/__init__.py
|
nlnsaoadc/py-coincap
|
d70718744593d97655c09db897fbdedf315dc432
|
[
"MIT"
] | null | null | null |
coincap/__init__.py
|
nlnsaoadc/py-coincap
|
d70718744593d97655c09db897fbdedf315dc432
|
[
"MIT"
] | null | null | null |
coincap/__init__.py
|
nlnsaoadc/py-coincap
|
d70718744593d97655c09db897fbdedf315dc432
|
[
"MIT"
] | null | null | null |
from .coincap import CoinCap
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
593ee71ed69d6ba6dbd4ae036c4b4272d4bf81d5
| 153,066
|
py
|
Python
|
pointCollection/CS2_wfm/data.py
|
tsutterley/pointCollection
|
04e4359e463ff8a556e0d078373578bd96390151
|
[
"MIT"
] | null | null | null |
pointCollection/CS2_wfm/data.py
|
tsutterley/pointCollection
|
04e4359e463ff8a556e0d078373578bd96390151
|
[
"MIT"
] | null | null | null |
pointCollection/CS2_wfm/data.py
|
tsutterley/pointCollection
|
04e4359e463ff8a556e0d078373578bd96390151
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 12 2020
Class to read and manipulate CryoSat-2 waveform data
Reads CryoSat Level-1b data products from baselines A, B and C
Reads CryoSat Level-1b netCDF4 data products from baseline D
Supported CryoSat Modes: LRM, SAR, SARin, FDM, SID, GDR
INPUTS:
full_filename: full path of CryoSat .DBL or .nc file
PYTHON DEPENDENCIES:
numpy: Scientific Computing Tools For Python
http://www.numpy.org
http://www.scipy.org/NumPy_for_Matlab_Users
netCDF4: Python interface to the netCDF C library
https://unidata.github.io/netcdf4-python/netCDF4/index.html
UPDATE HISTORY:
Updated 08/2020: flake8 compatible binary regular expression strings
Forked 02/2020 from read_cryosat_L1b.py
Updated 11/2019: empty placeholder dictionary for baseline D DSD headers
Updated 09/2019: added netCDF4 read function for baseline D
Updated 04/2019: USO correction signed 32 bit int
Updated 10/2018: updated header read functions for python3
Updated 05/2016: using __future__ print and division functions
Written 03/2016
"""
from __future__ import print_function
from __future__ import division
import numpy as np
import pointCollection as pc
import netCDF4
import re
import os
class data(pc.data):
np.seterr(invalid='ignore')
def __default_field_dict__(self):
"""
Define the default fields that get read from the CryoSat-2 file
"""
field_dict = {}
field_dict['Location'] = ['days_J2k','Day','Second','Micsec','USO_Corr',
'Mode_ID','SSC','Inst_config','Rec_Count','Lat','Lon','Alt','Alt_rate',
'Sat_velocity','Real_beam','Baseline','ST_ID','Roll','Pitch','Yaw','MCD']
field_dict['Data'] = ['TD', 'H_0','COR2','LAI','FAI','AGC_CH1','AGC_CH2',
'TR_gain_CH1','TR_gain_CH2','TX_Power','Doppler_range','TR_inst_range',
'R_inst_range','TR_inst_gain','R_inst_gain','Internal_phase',
'External_phase','Noise_power','Phase_slope']
field_dict['Geometry'] = ['dryTrop','wetTrop','InvBar','DAC','Iono_GIM',
'Iono_model','ocTideElv','lpeTideElv','olTideElv','seTideElv','gpTideElv',
'Surf_type','Corr_status','Corr_error']
field_dict['Waveform_20Hz'] = ['Waveform','Linear_Wfm_Multiplier',
'Power2_Wfm_Multiplier','N_avg_echoes']
field_dict['METADATA'] = ['MPH','SPH']
return field_dict
def from_dbl(self, full_filename, field_dict=None, unpack=False, verbose=False):
"""
Read CryoSat Level-1b data from binary formats
"""
# file basename and file extension of input file
fileBasename,fileExtension=os.path.splitext(os.path.basename(full_filename))
# CryoSat file class
# OFFL (Off Line Processing/Systematic)
# NRT_ (Near Real Time)
# RPRO (ReProcessing)
# TEST (Testing)
# TIxx (Stand alone IPF1 testing)
# LTA_ (Long Term Archive)
regex_class = 'OFFL|NRT_|RPRO|TEST|TIxx|LTA_'
# CryoSat mission products
# SIR1SAR_FR: Level 1 FBR SAR Mode (Rx1 Channel)
# SIR2SAR_FR: Level 1 FBR SAR Mode (Rx2 Channel)
# SIR_SIN_FR: Level 1 FBR SARin Mode
# SIR_LRM_1B: Level-1 Product Low Rate Mode
# SIR_FDM_1B: Level-1 Product Fast Delivery Marine Mode
# SIR_SAR_1B: Level-1 SAR Mode
# SIR_SIN_1B: Level-1 SARin Mode
# SIR1LRC11B: Level-1 CAL1 Low Rate Mode (Rx1 Channel)
# SIR2LRC11B: Level-1 CAL1 Low Rate Mode (Rx2 Channel)
# SIR1SAC11B: Level-1 CAL1 SAR Mode (Rx1 Channel)
# SIR2SAC11B: Level-1 CAL1 SAR Mode (Rx2 Channel)
# SIR_SIC11B: Level-1 CAL1 SARin Mode
# SIR_SICC1B: Level-1 CAL1 SARIN Exotic Data
# SIR1SAC21B: Level-1 CAL2 SAR Mode (Rx1 Channel)
# SIR2SAC21B: Level-1 CAL2 SAR Mode (Rx2 Channel)
# SIR1SIC21B: Level-1 CAL2 SARin Mode (Rx1 Channel)
# SIR2SIC21B: Level-1 CAL2 SARin Mode (Rx1 Channel)
# SIR1LRM_0M: LRM and TRK Monitoring Data from Rx 1 Channel
# SIR2LRM_0M: LRM and TRK Monitoring Data from Rx 2 Channel
# SIR1SAR_0M: SAR Monitoring Data from Rx 1 Channel
# SIR2SAR_0M: SAR Monitoring Data from Rx 1 Channel
# SIR_SIN_0M: SARIN Monitoring Data
# SIR_SIC40M: CAL4 Monitoring Data
regex_products = ('SIR1SAR_FR|SIR2SAR_FR|SIR_SIN_FR|SIR_LRM_1B|SIR_FDM_1B|'
'SIR_SAR_1B|SIR_SIN_1B|SIR1LRC11B|SIR2LRC11B|SIR1SAC11B|SIR2SAC11B|'
'SIR_SIC11B|SIR_SICC1B|SIR1SAC21B|SIR2SAC21B|SIR1SIC21B|SIR2SIC21B|'
'SIR1LRM_0M|SIR2LRM_0M|SIR1SAR_0M|SIR2SAR_0M|SIR_SIN_0M|SIR_SIC40M')
# CRYOSAT LEVEL-1b PRODUCTS NAMING RULES
# Mission Identifier
# File Class
# File Product
# Validity Start Date and Time
# Validity Stop Date and Time
# Baseline Identifier
# Version Number
regex_pattern = r'(.*?)_({0})_({1})_(\d+T?\d+)_(\d+T?\d+)_(.*?)(\d+)'
rx = re.compile(regex_pattern.format(regex_class,regex_products),re.VERBOSE)
# extract file information from filename
MI,CLASS,PRODUCT,START,STOP,BASELINE,VERSION=rx.findall(fileBasename).pop()
# CryoSat-2 Mode record sizes
i_size_timestamp = 12
n_SARIN_BC_RW = 1024
n_SARIN_RW = 512
n_SAR_BC_RW = 256
n_SAR_RW = 125
n_LRM_RW = 128
n_blocks = 20
n_BeamBehaviourParams = 50
# check baseline from file to set i_record_size and allocation function
if (BASELINE == 'C'):
# calculate total record sizes of each dataset group
i_size_timegroup = i_size_timestamp + 4 + 2*2 + 6*4 + 3*3*4 + 3*2 + 4*4
i_size_measuregroup = 8 + 4*17 + 8
i_size_external_corr = 4*13 + 12
i_size_1Hz_LRM = i_size_timestamp + 3*4 + 8 + n_LRM_RW*2 + 2*4 + 2*2
i_size_1Hz_SAR = i_size_timestamp + 4*3 + 8 + n_SAR_RW*2 + 4 + 4 + 2 + 2
i_size_1Hz_SARIN = i_size_timestamp + 4*3 + 8 + n_SARIN_RW*2 + 4 + 4 + 2 + 2
i_size_LRM_waveform = n_LRM_RW*2 + 4 + 4 + 2 + 2
i_size_SAR_waveform = n_SAR_BC_RW*2 + 4 + 4 + 2 + 2 + n_BeamBehaviourParams*2
i_size_SARIN_waveform = n_SARIN_BC_RW*2 + 4 + 4 + 2 + 2 + n_SARIN_BC_RW*2 + \
n_SARIN_BC_RW*4 + n_BeamBehaviourParams*2
# Low-Resolution Mode Record Size
i_record_size_LRM_L1b = n_blocks * (i_size_timegroup + \
i_size_measuregroup + i_size_LRM_waveform) + i_size_external_corr + \
i_size_1Hz_LRM
# SAR Mode Record Size
i_record_size_SAR_L1b = n_blocks * (i_size_timegroup + \
i_size_measuregroup + i_size_SAR_waveform) + i_size_external_corr + \
i_size_1Hz_SAR
# SARIN Mode Record Size
i_record_size_SARIN_L1b = n_blocks * (i_size_timegroup + \
i_size_measuregroup + i_size_SARIN_waveform) + i_size_external_corr + \
i_size_1Hz_SARIN
# set read function for Baseline C
read_cryosat_variables = self.cryosat_baseline_C
else:
# calculate total record sizes of each dataset group
i_size_timegroup = i_size_timestamp + 4 + 2*2+ 6*4 + 3*3*4 + 4
i_size_measuregroup = 8 + 4*17 + 8
i_size_external_corr = 4*13 + 12
i_size_1Hz_LRM = i_size_timestamp + 3*4 + 8 + n_LRM_RW*2 + 2*4 + 2*2
i_size_1Hz_SAR = i_size_timestamp + 4*3 + 8 + n_SAR_RW*2 + 4 + 4 + 2 + 2
i_size_1Hz_SARIN = i_size_timestamp + 4*3 + 8 + n_SARIN_RW*2 + 4 + 4 + 2 + 2
i_size_LRM_waveform = n_LRM_RW*2 + 4 + 4 + 2 + 2
i_size_SAR_waveform = n_SAR_RW*2 + 4 + 4 + 2 + 2 + n_BeamBehaviourParams*2
i_size_SARIN_waveform = n_SARIN_RW*2 + 4 + 4 + 2 + 2 + n_SARIN_RW*2 + \
n_SARIN_RW*4 + n_BeamBehaviourParams*2
# Low-Resolution Mode Record Size
i_record_size_LRM_L1b = n_blocks * (i_size_timegroup + \
i_size_measuregroup + i_size_LRM_waveform) + i_size_external_corr + \
i_size_1Hz_LRM
# SAR Mode Record Size
i_record_size_SAR_L1b = n_blocks * (i_size_timegroup + \
i_size_measuregroup + i_size_SAR_waveform) + i_size_external_corr + \
i_size_1Hz_SAR
# SARIN Mode Record Size
i_record_size_SARIN_L1b = n_blocks * (i_size_timegroup + \
i_size_measuregroup + i_size_SARIN_waveform) + i_size_external_corr + \
i_size_1Hz_SARIN
# set read function for Baselines A and B
read_cryosat_variables = self.cryosat_baseline_AB
# get dataset MODE from PRODUCT portion of file name
# set record sizes and DS_TYPE for read_DSD function
self.MODE = re.findall('(LRM|SAR|SIN)', PRODUCT).pop()
if (self.MODE == 'LRM'):
i_record_size = i_record_size_LRM_L1b
DS_TYPE = 'CS_L1B'
elif (self.MODE == 'SAR'):
i_record_size = i_record_size_SAR_L1b
DS_TYPE = 'CS_L1B'
elif (self.MODE == 'SIN'):
i_record_size = i_record_size_SARIN_L1b
DS_TYPE = 'CS_L1B'
# read the input file to get file information
fid = os.open(os.path.expanduser(full_filename),os.O_RDONLY)
file_info = os.fstat(fid)
os.close(fid)
# num DSRs from SPH
j_num_DSR = np.int32(file_info.st_size//i_record_size)
# print file information
if verbose:
print(full_filename)
print('{0:d} {1:d} {2:d}'.format(j_num_DSR,file_info.st_size,i_record_size))
# Check if MPH/SPH/DSD headers
if (j_num_DSR*i_record_size == file_info.st_size):
print('No Header on file')
print('The number of DSRs is: {0:d}'.format(j_num_DSR))
else:
print('Header on file')
# Check if MPH/SPH/DSD headers
if (j_num_DSR*i_record_size != file_info.st_size):
# If there are MPH/SPH/DSD headers
s_MPH_fields = self.read_MPH(full_filename)
j_sph_size = np.int32(re.findall(r'[-+]?\d+',s_MPH_fields['SPH_SIZE']).pop())
s_SPH_fields = self.read_SPH(full_filename, j_sph_size)
# extract information from DSD fields
s_DSD_fields = self.read_DSD(full_filename, DS_TYPE=DS_TYPE)
# extract DS_OFFSET
j_DS_start = np.int32(re.findall(r'[-+]?\d+',s_DSD_fields['DS_OFFSET']).pop())
# extract number of DSR in the file
j_num_DSR = np.int32(re.findall(r'[-+]?\d+',s_DSD_fields['NUM_DSR']).pop())
# check the record size
j_DSR_size = np.int32(re.findall(r'[-+]?\d+',s_DSD_fields['DSR_SIZE']).pop())
# minimum size is start of the read plus number of records to read
j_check_size = j_DS_start + (j_DSR_size*j_num_DSR)
if verbose:
print('The offset of the DSD is: {0:d} bytes'.format(j_DS_start))
print('The number of DSRs is {0:d}'.format(j_num_DSR))
print('The size of the DSR is {0:d}'.format(j_DSR_size))
# check if invalid file size
if (j_check_size > file_info.st_size):
raise IOError('File size error')
# extract binary data from input CryoSat data file (skip headers)
fid = open(os.path.expanduser(full_filename), 'rb')
cryosat_header = fid.read(j_DS_start)
# iterate through CryoSat file and fill output variables
CS_L1b_mds = read_cryosat_variables(fid, j_num_DSR)
# add headers to output dictionary as METADATA
CS_L1b_mds['METADATA'] = {}
CS_L1b_mds['METADATA']['MPH'] = s_MPH_fields
CS_L1b_mds['METADATA']['SPH'] = s_SPH_fields
CS_L1b_mds['METADATA']['DSD'] = s_DSD_fields
# close the input CryoSat binary file
fid.close()
else:
# If there are not MPH/SPH/DSD headers
# extract binary data from input CryoSat data file
fid = open(os.path.expanduser(full_filename), 'rb')
# iterate through CryoSat file and fill output variables
CS_L1b_mds = read_cryosat_variables(fid, j_num_DSR)
# close the input CryoSat binary file
fid.close()
# if unpacking the units
if unpack:
CS_l1b_scale = self.cryosat_scaling_factors()
# for each dictionary key
for group in CS_l1b_scale.keys():
# for each variable
for key,val in CS_L1b_mds[group].items():
# check if val is the 20Hz waveform beam variables
if isinstance(val, dict):
# for each waveform beam variable
for k,v in val.items():
# scale variable
CS_L1b_mds[group][key][k] = CS_l1b_scale[group][key][k]*v.copy()
else:
# scale variable
CS_L1b_mds[group][key] = CS_l1b_scale[group][key]*val.copy()
# calculate GPS time of CryoSat data (seconds since Jan 6, 1980 00:00:00)
# from TAI time since Jan 1, 2000 00:00:00
GPS_Time = self.calc_GPS_time(CS_L1b_mds['Location']['Day'],
CS_L1b_mds['Location']['Second'], CS_L1b_mds['Location']['Micsec'])
# leap seconds for converting from GPS time to UTC time
leap_seconds = self.count_leap_seconds(GPS_Time)
# calculate dates as J2000 days (UTC)
CS_L1b_mds['Location']['days_J2k'] = (GPS_Time - leap_seconds)/86400.0 - 7300.0
# parameters to extract
if field_dict is None:
field_dict = self.__default_field_dict__()
# extract fields of interest using field dict keys
for group,variables in field_dict.items():
for field in variables:
if field not in self.fields:
self.fields.append(field)
setattr(self, field, CS_L1b_mds[group][field])
# update size and shape of input data
self.__update_size_and_shape__()
# return the data and header text
return self
def from_nc(self, full_filename, field_dict=None, unpack=False, verbose=False):
"""
Read CryoSat Level-1b data from netCDF4 format data
"""
# file basename and file extension of input file
fileBasename,fileExtension=os.path.splitext(os.path.basename(full_filename))
# CryoSat file class
# OFFL (Off Line Processing/Systematic)
# NRT_ (Near Real Time)
# RPRO (ReProcessing)
# TEST (Testing)
# TIxx (Stand alone IPF1 testing)
# LTA_ (Long Term Archive)
regex_class = 'OFFL|NRT_|RPRO|TEST|TIxx|LTA_'
# CryoSat mission products
# SIR1SAR_FR: Level 1 FBR SAR Mode (Rx1 Channel)
# SIR2SAR_FR: Level 1 FBR SAR Mode (Rx2 Channel)
# SIR_SIN_FR: Level 1 FBR SARin Mode
# SIR_LRM_1B: Level-1 Product Low Rate Mode
# SIR_FDM_1B: Level-1 Product Fast Delivery Marine Mode
# SIR_SAR_1B: Level-1 SAR Mode
# SIR_SIN_1B: Level-1 SARin Mode
# SIR1LRC11B: Level-1 CAL1 Low Rate Mode (Rx1 Channel)
# SIR2LRC11B: Level-1 CAL1 Low Rate Mode (Rx2 Channel)
# SIR1SAC11B: Level-1 CAL1 SAR Mode (Rx1 Channel)
# SIR2SAC11B: Level-1 CAL1 SAR Mode (Rx2 Channel)
# SIR_SIC11B: Level-1 CAL1 SARin Mode
# SIR_SICC1B: Level-1 CAL1 SARIN Exotic Data
# SIR1SAC21B: Level-1 CAL2 SAR Mode (Rx1 Channel)
# SIR2SAC21B: Level-1 CAL2 SAR Mode (Rx2 Channel)
# SIR1SIC21B: Level-1 CAL2 SARin Mode (Rx1 Channel)
# SIR2SIC21B: Level-1 CAL2 SARin Mode (Rx1 Channel)
# SIR1LRM_0M: LRM and TRK Monitoring Data from Rx 1 Channel
# SIR2LRM_0M: LRM and TRK Monitoring Data from Rx 2 Channel
# SIR1SAR_0M: SAR Monitoring Data from Rx 1 Channel
# SIR2SAR_0M: SAR Monitoring Data from Rx 1 Channel
# SIR_SIN_0M: SARIN Monitoring Data
# SIR_SIC40M: CAL4 Monitoring Data
regex_products = ('SIR1SAR_FR|SIR2SAR_FR|SIR_SIN_FR|SIR_LRM_1B|SIR_FDM_1B|'
'SIR_SAR_1B|SIR_SIN_1B|SIR1LRC11B|SIR2LRC11B|SIR1SAC11B|SIR2SAC11B|'
'SIR_SIC11B|SIR_SICC1B|SIR1SAC21B|SIR2SAC21B|SIR1SIC21B|SIR2SIC21B|'
'SIR1LRM_0M|SIR2LRM_0M|SIR1SAR_0M|SIR2SAR_0M|SIR_SIN_0M|SIR_SIC40M')
# CRYOSAT LEVEL-1b PRODUCTS NAMING RULES
# Mission Identifier
# File Class
# File Product
# Validity Start Date and Time
# Validity Stop Date and Time
# Baseline Identifier
# Version Number
regex_pattern = r'(.*?)_({0})_({1})_(\d+T?\d+)_(\d+T?\d+)_(.*?)(\d+)'
rx = re.compile(regex_pattern.format(regex_class,regex_products),re.VERBOSE)
# extract file information from filename
MI,CLASS,PRODUCT,START,STOP,BASELINE,VERSION=rx.findall(fileBasename).pop()
print(full_filename) if verbose else None
# get dataset MODE from PRODUCT portion of file name
self.MODE = re.findall(r'(LRM|FDM|SAR|SIN)', PRODUCT).pop()
# read level-2 CryoSat-2 data from netCDF4 file
CS_L1b_mds = self.cryosat_baseline_D(full_filename, unpack=unpack)
# calculate GPS time of CryoSat data (seconds since Jan 6, 1980 00:00:00)
# from TAI time since Jan 1, 2000 00:00:00
GPS_Time = self.calc_GPS_time(CS_L1b_mds['Location']['Day'],
CS_L1b_mds['Location']['Second'], CS_L1b_mds['Location']['Micsec'])
# leap seconds for converting from GPS time to UTC time
leap_seconds = self.count_leap_seconds(GPS_Time)
# calculate dates as J2000 days (UTC)
CS_L1b_mds['Location']['days_J2k'] = (GPS_Time - leap_seconds)/86400.0 - 7300.0
# parameters to extract
if field_dict is None:
field_dict = self.__default_field_dict__()
# extract fields of interest using field dict keys
for group,variables in field_dict.items():
for field in variables:
if field not in self.fields:
self.fields.append(field)
setattr(self, field, CS_L1b_mds[group][field])
# update size and shape of input data
self.__update_size_and_shape__()
# return the data and header text
return self
def calc_GPS_time(self, day, second, micsec):
"""
Calculate the GPS time (seconds since Jan 6, 1980 00:00:00)
"""
# TAI time is ahead of GPS by 19 seconds
return (day + 7300.0)*86400.0 + second.astype('f') + micsec/1e6 - 19
def count_leap_seconds(self, GPS_Time):
"""
Count number of leap seconds that have passed for given GPS times
"""
# GPS times for leap seconds
leaps = [46828800, 78364801, 109900802, 173059203, 252028804, 315187205,
346723206, 393984007, 425520008, 457056009, 504489610, 551750411,
599184012, 820108813, 914803214, 1025136015, 1119744016, 1167264017]
# number of leap seconds prior to GPS_Time
n_leaps = np.zeros_like(GPS_Time)
for i,leap in enumerate(leaps):
count = np.count_nonzero(GPS_Time >= leap)
if (count > 0):
i_records,i_blocks = np.nonzero(GPS_Time >= leap)
n_leaps[i_records,i_blocks] += 1.0
return n_leaps
def read_MPH(self, full_filename):
"""
Read ASCII Main Product Header (MPH) block from an ESA PDS file
"""
# read input data file
with open(os.path.expanduser(full_filename), 'rb') as fid:
file_contents = fid.read().splitlines()
# Define constant values associated with PDS file formats
# number of text lines in standard MPH
n_MPH_lines = 41
# check that first line of header matches PRODUCT
if not bool(re.match(br'PRODUCT\=\"(.*)(?=\")',file_contents[0])):
raise IOError('File does not start with a valid PDS MPH')
# read MPH header text
s_MPH_fields = {}
for i in range(n_MPH_lines):
# use regular expression operators to read headers
if bool(re.match(br'(.*?)\=\"(.*)(?=\")',file_contents[i])):
# data fields within quotes
field,value=re.findall(br'(.*?)\=\"(.*)(?=\")',file_contents[i]).pop()
s_MPH_fields[field.decode('utf-8')] = value.decode('utf-8').rstrip()
elif bool(re.match(br'(.*?)\=(.*)',file_contents[i])):
# data fields without quotes
field,value=re.findall(br'(.*?)\=(.*)',file_contents[i]).pop()
s_MPH_fields[field.decode('utf-8')] = value.decode('utf-8').rstrip()
# Return block name array to calling function
return s_MPH_fields
def read_SPH(self, full_filename, j_sph_size):
"""
Read ASCII Specific Product Header (SPH) block from a PDS file
"""
# read input data file
with open(os.path.expanduser(full_filename), 'rb') as fid:
file_contents = fid.read().splitlines()
# Define constant values associated with PDS file formats
# number of text lines in standard MPH
n_MPH_lines = 41
# compile regular expression operator for reading headers
rx = re.compile(br'(.*?)\=\"?(.*)',re.VERBOSE)
# check first line of header matches SPH_DESCRIPTOR
if not bool(re.match(br'SPH\_DESCRIPTOR\=',file_contents[n_MPH_lines+1])):
raise IOError('File does not have a valid PDS DSD')
# read SPH header text (no binary control characters)
s_SPH_lines = [li for li in file_contents[n_MPH_lines+1:] if rx.match(li)
and not re.search(br'[^\x20-\x7e]+',li)]
# extract SPH header text
s_SPH_fields = {}
c = 0
while (c < len(s_SPH_lines)):
# check if line is within DS_NAME portion of SPH header
if bool(re.match(br'DS_NAME',s_SPH_lines[c])):
# add dictionary for DS_NAME
field,value=re.findall(br'(.*?)\=\"(.*)(?=\")',s_SPH_lines[c]).pop()
key = value.decode('utf-8').rstrip()
s_SPH_fields[key] = {}
for line in s_SPH_lines[c+1:c+7]:
if bool(re.match(br'(.*?)\=\"(.*)(?=\")',line)):
# data fields within quotes
dsfield,dsvalue=re.findall(br'(.*?)\=\"(.*)(?=\")',line).pop()
s_SPH_fields[key][dsfield.decode('utf-8')] = dsvalue.decode('utf-8').rstrip()
elif bool(re.match(br'(.*?)\=(.*)',line)):
# data fields without quotes
dsfield,dsvalue=re.findall(br'(.*?)\=(.*)',line).pop()
s_SPH_fields[key][dsfield.decode('utf-8')] = dsvalue.decode('utf-8').rstrip()
# add 6 to counter to go to next entry
c += 6
# use regular expression operators to read headers
elif bool(re.match(br'(.*?)\=\"(.*)(?=\")',s_SPH_lines[c])):
# data fields within quotes
field,value=re.findall(br'(.*?)\=\"(.*)(?=\")',s_SPH_lines[c]).pop()
s_SPH_fields[field.decode('utf-8')] = value.decode('utf-8').rstrip()
elif bool(re.match(br'(.*?)\=(.*)',s_SPH_lines[c])):
# data fields without quotes
field,value=re.findall(br'(.*?)\=(.*)',s_SPH_lines[c]).pop()
s_SPH_fields[field.decode('utf-8')] = value.decode('utf-8').rstrip()
# add 1 to counter to go to next line
c += 1
# Return block name array to calling function
return s_SPH_fields
def read_DSD(self, full_filename, DS_TYPE=None):
"""
Read ASCII Data Set Descriptors (DSD) block from a PDS file
"""
# read input data file
with open(os.path.expanduser(full_filename), 'rb') as fid:
file_contents = fid.read().splitlines()
# Define constant values associated with PDS file formats
# number of text lines in standard MPH
n_MPH_lines = 41
# number of text lines in a DSD header
n_DSD_lines = 8
# Level-1b CryoSat DS_NAMES within files
regex_patterns = []
if (DS_TYPE == 'CS_L1B'):
regex_patterns.append(br'DS_NAME\="SIR_L1B_LRM[\s+]*"')
regex_patterns.append(br'DS_NAME\="SIR_L1B_SAR[\s+]*"')
regex_patterns.append(br'DS_NAME\="SIR_L1B_SARIN[\s+]*"')
elif (DS_TYPE == 'SIR_L1B_FDM'):
regex_patterns.append(br'DS_NAME\="SIR_L1B_FDM[\s+]*"')
# find the DSD starting line within the SPH header
c = 0
Flag = False
while ((Flag is False) and (c < len(regex_patterns))):
# find indice within
indice = [i for i,line in enumerate(file_contents[n_MPH_lines+1:]) if
re.search(regex_patterns[c],line)]
if indice:
Flag = True
else:
c+=1
# check that valid indice was found within header
if not indice:
raise IOError('Can not find correct DSD field')
# extract s_DSD_fields info
DSD_START = n_MPH_lines + indice[0] + 1
s_DSD_fields = {}
for i in range(DSD_START,DSD_START+n_DSD_lines):
# use regular expression operators to read headers
if bool(re.match(br'(.*?)\=\"(.*)(?=\")',file_contents[i])):
# data fields within quotes
field,value=re.findall(br'(.*?)\=\"(.*)(?=\")',file_contents[i]).pop()
s_DSD_fields[field.decode('utf-8')] = value.decode('utf-8').rstrip()
elif bool(re.match(br'(.*?)\=(.*)',file_contents[i])):
# data fields without quotes
field,value=re.findall(br'(.*?)\=(.*)',file_contents[i]).pop()
s_DSD_fields[field.decode('utf-8')] = value.decode('utf-8').rstrip()
# Return block name array to calling function
return s_DSD_fields
def cryosat_baseline_AB(self, fid, n_records):
"""
Read L1b MDS variables for CryoSat Baselines A and B
"""
n_SARIN_RW = 512
n_SAR_RW = 128
n_LRM_RW = 128
n_blocks = 20
n_BeamBehaviourParams = 50
# Bind all the variables of the l1b_mds together into a single dictionary
CS_l1b_mds = {}
# CryoSat-2 Time and Orbit Group
CS_l1b_mds['Location'] = {}
# Time: day part
CS_l1b_mds['Location']['Day'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32,fill_value=0)
# Time: second part
CS_l1b_mds['Location']['Second'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# Time: microsecond part
CS_l1b_mds['Location']['Micsec'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# USO correction factor
CS_l1b_mds['Location']['USO_Corr'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Mode ID
CS_l1b_mds['Location']['Mode_ID'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint16)
# Source sequence counter
CS_l1b_mds['Location']['SSC'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint16)
# Instrument configuration
CS_l1b_mds['Location']['Inst_config'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# Record Counter
CS_l1b_mds['Location']['Rec_Count'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Lat'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Lon'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Location']['Alt'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instantaneous altitude rate derived from orbit: packed units (mm/s, 1e-3 m/s)
CS_l1b_mds['Location']['Alt_rate'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Satellite velocity vector. In ITRF: packed units (mm/s, 1e-3 m/s)
# ITRF= International Terrestrial Reference Frame
CS_l1b_mds['Location']['Sat_velocity'] = np.ma.zeros((n_records,n_blocks,3),dtype=np.int32)
# Real beam direction vector. In CRF: packed units (micro-m, 1e-6 m)
# CRF= CryoSat Reference Frame.
CS_l1b_mds['Location']['Real_beam'] = np.ma.zeros((n_records,n_blocks,3),dtype=np.int32)
# Interferometric baseline vector. In CRF: packed units (micro-m, 1e-6 m)
CS_l1b_mds['Location']['Baseline'] = np.ma.zeros((n_records,n_blocks,3),dtype=np.int32)
# Measurement Confidence Data Flags
# Generally the MCD flags indicate problems when set
# If MCD is 0 then no problems or non-nominal conditions were detected
# Serious errors are indicated by setting bit 31
CS_l1b_mds['Location']['MCD'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
CS_l1b_mds['Data'] = {}
# Window Delay reference (two-way) corrected for instrument delays
CS_l1b_mds['Data']['TD'] = np.ma.zeros((n_records,n_blocks),dtype=np.int64)
# H0 Initial Height Word from telemetry
CS_l1b_mds['Data']['H_0'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# COR2 Height Rate: on-board tracker height rate over the radar cycle
CS_l1b_mds['Data']['COR2'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Coarse Range Word (LAI) derived from telemetry
CS_l1b_mds['Data']['LAI'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Fine Range Word (FAI) derived from telemetry
CS_l1b_mds['Data']['FAI'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Automatic Gain Control Channel 1: AGC gain applied on Rx channel 1.
# Gain calibration corrections are applied (Sum of AGC stages 1 and 2
# plus the corresponding corrections) (dB/100)
CS_l1b_mds['Data']['AGC_CH1'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Automatic Gain Control Channel 2: AGC gain applied on Rx channel 2.
# Gain calibration corrections are applied (dB/100)
CS_l1b_mds['Data']['AGC_CH2'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Total Fixed Gain On Channel 1: gain applied by the RF unit. (dB/100)
CS_l1b_mds['Data']['TR_gain_CH1'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Total Fixed Gain On Channel 2: gain applied by the RF unit. (dB/100)
CS_l1b_mds['Data']['TR_gain_CH2'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Transmit Power in microWatts
CS_l1b_mds['Data']['TX_Power'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Doppler range correction: Radial component (mm)
# computed for the component of satellite velocity in the nadir direction
CS_l1b_mds['Data']['Doppler_range'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Range Correction: transmit-receive antenna (mm)
# Calibration correction to range on channel 1 computed from CAL1.
CS_l1b_mds['Data']['TR_inst_range'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Range Correction: receive-only antenna (mm)
# Calibration correction to range on channel 2 computed from CAL1.
CS_l1b_mds['Data']['R_inst_range'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Gain Correction: transmit-receive antenna (dB/100)
# Calibration correction to gain on channel 1 computed from CAL1
CS_l1b_mds['Data']['TR_inst_gain'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Gain Correction: receive-only (dB/100)
# Calibration correction to gain on channel 2 computed from CAL1
CS_l1b_mds['Data']['R_inst_gain'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Internal Phase Correction (microradians)
CS_l1b_mds['Data']['Internal_phase'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# External Phase Correction (microradians)
CS_l1b_mds['Data']['External_phase'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Noise Power measurement (dB/100): converted from telemetry units to be
# the noise floor of FBR measurement echoes.
# Set to -9999.99 when the telemetry contains zero.
CS_l1b_mds['Data']['Noise_power'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Phase slope correction (microradians)
# Computed from the CAL-4 packets during the azimuth impulse response
# amplitude (SARIN only). Set from the latest available CAL-4 packet.
CS_l1b_mds['Data']['Phase_slope'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
CS_l1b_mds['Data']['Spares1'] = np.ma.zeros((n_records,n_blocks,4),dtype=np.int8)
# CryoSat-2 External Corrections Group
CS_l1b_mds['Geometry'] = {}
# Dry Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['dryTrop'] = np.ma.zeros((n_records),dtype=np.int32)
# Wet Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['wetTrop'] = np.ma.zeros((n_records),dtype=np.int32)
# Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['InvBar'] = np.ma.zeros((n_records),dtype=np.int32)
# Delta Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['DAC'] = np.ma.zeros((n_records),dtype=np.int32)
# GIM Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['Iono_GIM'] = np.ma.zeros((n_records),dtype=np.int32)
# Model Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['Iono_model'] = np.ma.zeros((n_records),dtype=np.int32)
# Ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['ocTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Long period equilibrium ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['lpeTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Ocean loading tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['olTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Solid Earth tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['seTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Geocentric Polar tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['gpTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Surface Type: enumerated key to classify surface at nadir
# 0 = Open Ocean
# 1 = Closed Sea
# 2 = Continental Ice
# 3 = Land
CS_l1b_mds['Geometry']['Surf_type'] = np.ma.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Geometry']['Spare1'] = np.ma.zeros((n_records,4),dtype=np.int8)
# Corrections Status Flag
CS_l1b_mds['Geometry']['Corr_status'] = np.ma.zeros((n_records),dtype=np.uint32)
# Correction Error Flag
CS_l1b_mds['Geometry']['Corr_error'] = np.ma.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Geometry']['Spare2'] = np.ma.zeros((n_records,4),dtype=np.int8)
# CryoSat-2 Average Waveforms Groups
CS_l1b_mds['Waveform_1Hz'] = {}
if (self.MODE == 'LRM'):
# Low-Resolution Mode
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Waveform_1Hz']['Day'] = np.zeros((n_records),dtype=np.int32)
CS_l1b_mds['Waveform_1Hz']['Second'] = np.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.zeros((n_records),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = np.zeros((n_records),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = np.zeros((n_records),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = np.zeros((n_records),dtype=np.int32)
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = np.zeros((n_records),dtype=np.int64)
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = np.zeros((n_records,n_LRM_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = np.zeros((n_records),dtype=np.uint16)
CS_l1b_mds['Waveform_1Hz']['Flags'] = np.zeros((n_records),dtype=np.uint16)
elif (self.MODE == 'SAR'):
# SAR Mode
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Waveform_1Hz']['Day'] = np.zeros((n_records),dtype=np.int32)
CS_l1b_mds['Waveform_1Hz']['Second'] = np.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.zeros((n_records),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = np.zeros((n_records),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = np.zeros((n_records),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = np.zeros((n_records),dtype=np.int32)
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = np.zeros((n_records),dtype=np.int64)
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = np.zeros((n_records,n_SAR_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = np.zeros((n_records),dtype=np.uint16)
CS_l1b_mds['Waveform_1Hz']['Flags'] = np.zeros((n_records),dtype=np.uint16)
elif (self.MODE == 'SIN'):
# SARIN Mode
# Same as the LRM/SAR groups but the waveform array is 512 bins instead of
# 128 and the number of echoes averaged is different.
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Waveform_1Hz']['Day'] = np.zeros((n_records),dtype=np.int32)
CS_l1b_mds['Waveform_1Hz']['Second'] = np.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.zeros((n_records),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = np.zeros((n_records),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = np.zeros((n_records),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = np.zeros((n_records),dtype=np.int32)
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = np.zeros((n_records),dtype=np.int64)
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = np.zeros((n_records,n_SARIN_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = np.zeros((n_records),dtype=np.uint16)
CS_l1b_mds['Waveform_1Hz']['Flags'] = np.zeros((n_records),dtype=np.uint16)
# CryoSat-2 Waveforms Groups
# Beam Behavior Parameters
Beam_Behavior = {}
# Standard Deviation of Gaussian fit to range integrated stack power.
Beam_Behavior['SD'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Stack Center: Mean of Gaussian fit to range integrated stack power.
Beam_Behavior['Center'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Stack amplitude parameter scaled in dB/100.
Beam_Behavior['Amplitude'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# 3rd moment: providing the degree of asymmetry of the range integrated
# stack power distribution.
Beam_Behavior['Skewness'] = np.zeros((n_records,n_blocks),dtype=np.int16)
# 4th moment: Measure of peakiness of range integrated stack power distribution.
Beam_Behavior['Kurtosis'] = np.zeros((n_records,n_blocks),dtype=np.int16)
Beam_Behavior['Spare'] = np.zeros((n_records,n_blocks,n_BeamBehaviourParams-5),dtype=np.int16)
# CryoSat-2 mode specific waveforms
CS_l1b_mds['Waveform_20Hz'] = {}
if (self.MODE == 'LRM'):
# Low-Resolution Mode
# Averaged Power Echo Waveform [128]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.zeros((n_records,n_blocks,n_LRM_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
elif (self.MODE == 'SAR'):
# SAR Mode
# Averaged Power Echo Waveform [128]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.zeros((n_records,n_blocks,n_SAR_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Beam behaviour parameters
CS_l1b_mds['Waveform_20Hz']['Beam'] = Beam_Behavior
elif (self.MODE == 'SIN'):
# SARIN Mode
# Averaged Power Echo Waveform [512]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.zeros((n_records,n_blocks,n_SARIN_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Beam behaviour parameters
CS_l1b_mds['Waveform_20Hz']['Beam'] = Beam_Behavior
# Coherence [512]: packed units (1/1000)
CS_l1b_mds['Waveform_20Hz']['Coherence'] = np.zeros((n_records,n_blocks,n_SARIN_RW),dtype=np.int16)
# Phase Difference [512]: packed units (microradians)
CS_l1b_mds['Waveform_20Hz']['Phase_diff'] = np.zeros((n_records,n_blocks,n_SARIN_RW),dtype=np.int32)
# for each record in the CryoSat file
for r in range(n_records):
# CryoSat-2 Time and Orbit Group
for b in range(n_blocks):
CS_l1b_mds['Location']['Day'].data[r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Second'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Micsec'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['USO_Corr'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Mode_ID'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Location']['SSC'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Location']['Inst_config'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Rec_Count'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Lat'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Lon'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Alt'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Alt_rate'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Sat_velocity'][r,b,:] = np.fromfile(fid,dtype='>i4',count=3)
CS_l1b_mds['Location']['Real_beam'][r,b,:] = np.fromfile(fid,dtype='>i4',count=3)
CS_l1b_mds['Location']['Baseline'][r,b,:] = np.fromfile(fid,dtype='>i4',count=3)
CS_l1b_mds['Location']['MCD'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
for b in range(n_blocks):
CS_l1b_mds['Data']['TD'][r,b] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Data']['H_0'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['COR2'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['LAI'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['FAI'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['AGC_CH1'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['AGC_CH2'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_gain_CH1'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_gain_CH2'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TX_Power'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Doppler_range'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_inst_range'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['R_inst_range'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_inst_gain'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['R_inst_gain'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Internal_phase'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['External_phase'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Noise_power'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Phase_slope'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Spares1'][r,b,:] = np.fromfile(fid,dtype='>i1',count=4)
# CryoSat-2 External Corrections Group
CS_l1b_mds['Geometry']['dryTrop'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['wetTrop'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['InvBar'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['DAC'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['Iono_GIM'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['Iono_model'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['ocTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['lpeTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['olTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['seTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['gpTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['Surf_type'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Geometry']['Spare1'][r,:] = np.fromfile(fid,dtype='>i1',count=4)
CS_l1b_mds['Geometry']['Corr_status'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Geometry']['Corr_error'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Geometry']['Spare2'][r,:] = np.fromfile(fid,dtype='>i1',count=4)
# CryoSat-2 Average Waveforms Groups
if (self.MODE == 'LRM'):
# Low-Resolution Mode
CS_l1b_mds['Waveform_1Hz']['Day'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Second'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Micsec'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lat'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lon'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Alt'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['TD'][r] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Waveform_1Hz']['Waveform'][r,:] = np.fromfile(fid,dtype='>u2',count=n_LRM_RW)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'][r] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_1Hz']['Flags'][r] = np.fromfile(fid,dtype='>u2',count=1)
elif (self.MODE == 'SAR'):
# SAR Mode
CS_l1b_mds['Waveform_1Hz']['Day'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Second'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Micsec'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lat'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lon'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Alt'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['TD'][r] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Waveform_1Hz']['Waveform'][r,:] = np.fromfile(fid,dtype='>u2',count=n_SAR_RW)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'][r] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_1Hz']['Flags'][r] = np.fromfile(fid,dtype='>u2',count=1)
elif (self.MODE == 'SIN'):
# SARIN Mode
CS_l1b_mds['Waveform_1Hz']['Day'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Second'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Micsec'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lat'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lon'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Alt'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['TD'][r] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Waveform_1Hz']['Waveform'][r,:] = np.fromfile(fid,dtype='>u2',count=n_SARIN_RW)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'][r] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_1Hz']['Flags'][r] = np.fromfile(fid,dtype='>u2',count=1)
# CryoSat-2 Waveforms Groups
if (self.MODE == 'LRM'):
# Low-Resolution Mode
for b in range(n_blocks):
CS_l1b_mds['Waveform_20Hz']['Waveform'][r,b,:] = np.fromfile(fid,dtype='>u2',count=n_LRM_RW)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Flags'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
elif (self.MODE == 'SAR'):
# SAR Mode
for b in range(n_blocks):
CS_l1b_mds['Waveform_20Hz']['Waveform'][r,b,:] = np.fromfile(fid,dtype='>u2',count=n_SAR_RW)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Flags'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Spare'][r,b,:] = np.fromfile(fid,dtype='>i2',count=(n_BeamBehaviourParams-5))
elif (self.MODE == 'SIN'):
# SARIN Mode
for b in range(n_blocks):
CS_l1b_mds['Waveform_20Hz']['Waveform'][r,b,:] = np.fromfile(fid,dtype='>u2',count=n_SARIN_RW)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Flags'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Spare'][r,b,:] = np.fromfile(fid,dtype='>i2',count=(n_BeamBehaviourParams-5))
CS_l1b_mds['Waveform_20Hz']['Coherence'][r,b,:] = np.fromfile(fid,dtype='>i2',count=n_SARIN_RW)
CS_l1b_mds['Waveform_20Hz']['Phase_diff'][r,b,:] = np.fromfile(fid,dtype='>i4',count=n_SARIN_RW)
# set the mask from day variables
mask_20Hz = CS_l1b_mds['Location']['Day'].data == CS_l1b_mds['Location']['Day'].fill_value
Location_keys = [key for key in CS_l1b_mds['Location'].keys() if not re.search(r'Spare',key)]
Data_keys = [key for key in CS_l1b_mds['Data'].keys() if not re.search(r'Spare',key)]
Geometry_keys = [key for key in CS_l1b_mds['Geometry'].keys() if not re.search(r'Spare',key)]
Wfm_1Hz_keys = [key for key in CS_l1b_mds['Waveform_1Hz'].keys() if not re.search(r'Spare',key)]
Wfm_20Hz_keys = [key for key in CS_l1b_mds['Waveform_20Hz'].keys() if not re.search(r'Spare',key)]
for key in Location_keys:
CS_l1b_mds['Location'][key].mask = mask_20Hz.copy()
for key in Data_keys:
CS_l1b_mds['Data'][key].mask = mask_20Hz.copy()
# return the output dictionary
return CS_l1b_mds
def cryosat_baseline_C(self, fid, n_records):
"""
Read L1b MDS variables for CryoSat Baseline C
"""
n_SARIN_BC_RW = 1024
n_SARIN_RW = 512
n_SAR_BC_RW = 256
n_SAR_RW = 128
n_LRM_RW = 128
n_blocks = 20
n_BeamBehaviourParams = 50
# Bind all the variables of the l1b_mds together into a single dictionary
CS_l1b_mds = {}
# CryoSat-2 Time and Orbit Group
CS_l1b_mds['Location'] = {}
# Time: day part
CS_l1b_mds['Location']['Day'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32,fill_value=0)
# Time: second part
CS_l1b_mds['Location']['Second'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# Time: microsecond part
CS_l1b_mds['Location']['Micsec'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# USO correction factor
CS_l1b_mds['Location']['USO_Corr'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Mode ID
CS_l1b_mds['Location']['Mode_ID'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint16)
# Source sequence counter
CS_l1b_mds['Location']['SSC'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint16)
# Instrument configuration
CS_l1b_mds['Location']['Inst_config'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# Record Counter
CS_l1b_mds['Location']['Rec_Count'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Lat'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Lon'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Location']['Alt'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instantaneous altitude rate derived from orbit: packed units (mm/s, 1e-3 m/s)
CS_l1b_mds['Location']['Alt_rate'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Satellite velocity vector. In ITRF: packed units (mm/s, 1e-3 m/s)
# ITRF= International Terrestrial Reference Frame
CS_l1b_mds['Location']['Sat_velocity'] = np.ma.zeros((n_records,n_blocks,3),dtype=np.int32)
# Real beam direction vector. In CRF: packed units (micro-m/s, 1e-6 m/s)
# CRF= CryoSat Reference Frame.
CS_l1b_mds['Location']['Real_beam'] = np.ma.zeros((n_records,n_blocks,3),dtype=np.int32)
# Interferometric baseline vector. In CRF: packed units (micro-m/s, 1e-6 m/s)
CS_l1b_mds['Location']['Baseline'] = np.ma.zeros((n_records,n_blocks,3),dtype=np.int32)
# Star Tracker ID
CS_l1b_mds['Location']['ST_ID'] = np.ma.zeros((n_records,n_blocks),dtype=np.int16)
# Antenna Bench Roll Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Roll'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Antenna Bench Pitch Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Pitch'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Antenna Bench Yaw Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Yaw'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Measurement Confidence Data Flags
# Generally the MCD flags indicate problems when set
# If MCD is 0 then no problems or non-nominal conditions were detected
# Serious errors are indicated by setting bit 31
CS_l1b_mds['Location']['MCD'] = np.ma.zeros((n_records,n_blocks),dtype=np.uint32)
CS_l1b_mds['Location']['Spares'] = np.ma.zeros((n_records,n_blocks,2),dtype=np.int16)
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
CS_l1b_mds['Data'] = {}
# Window Delay reference (two-way) corrected for instrument delays
CS_l1b_mds['Data']['TD'] = np.ma.zeros((n_records,n_blocks),dtype=np.int64)
# H0 Initial Height Word from telemetry
CS_l1b_mds['Data']['H_0'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# COR2 Height Rate: on-board tracker height rate over the radar cycle
CS_l1b_mds['Data']['COR2'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Coarse Range Word (LAI) derived from telemetry
CS_l1b_mds['Data']['LAI'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Fine Range Word (FAI) derived from telemetry
CS_l1b_mds['Data']['FAI'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Automatic Gain Control Channel 1: AGC gain applied on Rx channel 1.
# Gain calibration corrections are applied (Sum of AGC stages 1 and 2
# plus the corresponding corrections) (dB/100)
CS_l1b_mds['Data']['AGC_CH1'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Automatic Gain Control Channel 2: AGC gain applied on Rx channel 2.
# Gain calibration corrections are applied (dB/100)
CS_l1b_mds['Data']['AGC_CH2'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Total Fixed Gain On Channel 1: gain applied by the RF unit. (dB/100)
CS_l1b_mds['Data']['TR_gain_CH1'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Total Fixed Gain On Channel 2: gain applied by the RF unit. (dB/100)
CS_l1b_mds['Data']['TR_gain_CH2'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Transmit Power in microWatts
CS_l1b_mds['Data']['TX_Power'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Doppler range correction: Radial component (mm)
# computed for the component of satellite velocity in the nadir direction
CS_l1b_mds['Data']['Doppler_range'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Range Correction: transmit-receive antenna (mm)
# Calibration correction to range on channel 1 computed from CAL1.
CS_l1b_mds['Data']['TR_inst_range'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Range Correction: receive-only antenna (mm)
# Calibration correction to range on channel 2 computed from CAL1.
CS_l1b_mds['Data']['R_inst_range'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Gain Correction: transmit-receive antenna (dB/100)
# Calibration correction to gain on channel 1 computed from CAL1
CS_l1b_mds['Data']['TR_inst_gain'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Instrument Gain Correction: receive-only (dB/100)
# Calibration correction to gain on channel 2 computed from CAL1
CS_l1b_mds['Data']['R_inst_gain'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Internal Phase Correction (microradians)
CS_l1b_mds['Data']['Internal_phase'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# External Phase Correction (microradians)
CS_l1b_mds['Data']['External_phase'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Noise Power measurement (dB/100)
CS_l1b_mds['Data']['Noise_power'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
# Phase slope correction (microradians)
# Computed from the CAL-4 packets during the azimuth impulse response
# amplitude (SARIN only). Set from the latest available CAL-4 packet.
CS_l1b_mds['Data']['Phase_slope'] = np.ma.zeros((n_records,n_blocks),dtype=np.int32)
CS_l1b_mds['Data']['Spares1'] = np.ma.zeros((n_records,n_blocks,4),dtype=np.int8)
# CryoSat-2 External Corrections Group
CS_l1b_mds['Geometry'] = {}
# Dry Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['dryTrop'] = np.ma.zeros((n_records),dtype=np.int32)
# Wet Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['wetTrop'] = np.ma.zeros((n_records),dtype=np.int32)
# Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['InvBar'] = np.ma.zeros((n_records),dtype=np.int32)
# Delta Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['DAC'] = np.ma.zeros((n_records),dtype=np.int32)
# GIM Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['Iono_GIM'] = np.ma.zeros((n_records),dtype=np.int32)
# Model Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['Iono_model'] = np.ma.zeros((n_records),dtype=np.int32)
# Ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['ocTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Long period equilibrium ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['lpeTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Ocean loading tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['olTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Solid Earth tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['seTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Geocentric Polar tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['gpTideElv'] = np.ma.zeros((n_records),dtype=np.int32)
# Surface Type: enumerated key to classify surface at nadir
# 0 = Open Ocean
# 1 = Closed Sea
# 2 = Continental Ice
# 3 = Land
CS_l1b_mds['Geometry']['Surf_type'] = np.ma.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Geometry']['Spare1'] = np.ma.zeros((n_records,4),dtype=np.int8)
# Corrections Status Flag
CS_l1b_mds['Geometry']['Corr_status'] = np.ma.zeros((n_records),dtype=np.uint32)
# Correction Error Flag
CS_l1b_mds['Geometry']['Corr_error'] = np.ma.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Geometry']['Spare2'] = np.ma.zeros((n_records,4),dtype=np.int8)
# CryoSat-2 Average Waveforms Groups
CS_l1b_mds['Waveform_1Hz'] = {}
if (self.MODE == 'LRM'):
# Low-Resolution Mode
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Waveform_1Hz']['Day'] = np.zeros((n_records),dtype=np.int32)
CS_l1b_mds['Waveform_1Hz']['Second'] = np.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.zeros((n_records),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = np.zeros((n_records),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = np.zeros((n_records),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = np.zeros((n_records),dtype=np.int32)
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = np.zeros((n_records),dtype=np.int64)
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = np.zeros((n_records,n_LRM_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = np.zeros((n_records),dtype=np.uint16)
CS_l1b_mds['Waveform_1Hz']['Flags'] = np.zeros((n_records),dtype=np.uint16)
elif (self.MODE == 'SAR'):
# SAR Mode
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Waveform_1Hz']['Day'] = np.zeros((n_records),dtype=np.int32)
CS_l1b_mds['Waveform_1Hz']['Second'] = np.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.zeros((n_records),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = np.zeros((n_records),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = np.zeros((n_records),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = np.zeros((n_records),dtype=np.int32)
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = np.zeros((n_records),dtype=np.int64)
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = np.zeros((n_records,n_SAR_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = np.zeros((n_records),dtype=np.uint16)
CS_l1b_mds['Waveform_1Hz']['Flags'] = np.zeros((n_records),dtype=np.uint16)
elif (self.MODE == 'SIN'):
# SARIN Mode
# Same as the LRM/SAR groups but the waveform array is 512 bins instead of
# 128 and the number of echoes averaged is different.
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Waveform_1Hz']['Day'] = np.zeros((n_records),dtype=np.int32)
CS_l1b_mds['Waveform_1Hz']['Second'] = np.zeros((n_records),dtype=np.uint32)
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.zeros((n_records),dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = np.zeros((n_records),dtype=np.int32)
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = np.zeros((n_records),dtype=np.int32)
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = np.zeros((n_records),dtype=np.int32)
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = np.zeros((n_records),dtype=np.int64)
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = np.zeros((n_records,n_SARIN_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = np.zeros((n_records),dtype=np.uint16)
CS_l1b_mds['Waveform_1Hz']['Flags'] = np.zeros((n_records),dtype=np.uint16)
# CryoSat-2 Waveforms Groups
# Beam Behavior Parameters
Beam_Behavior = {}
# Standard Deviation of Gaussian fit to range integrated stack power.
Beam_Behavior['SD'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Stack Center: Mean of Gaussian fit to range integrated stack power.
Beam_Behavior['Center'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Stack amplitude parameter scaled in dB/100.
Beam_Behavior['Amplitude'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# 3rd moment: providing the degree of asymmetry of the range integrated
# stack power distribution.
Beam_Behavior['Skewness'] = np.zeros((n_records,n_blocks),dtype=np.int16)
# 4th moment: Measure of peakiness of range integrated stack power distribution.
Beam_Behavior['Kurtosis'] = np.zeros((n_records,n_blocks),dtype=np.int16)
# Standard deviation as a function of boresight angle (microradians)
Beam_Behavior['SD_boresight_angle'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Stack Center angle as a function of boresight angle (microradians)
Beam_Behavior['Center_boresight_angle'] = np.zeros((n_records,n_blocks),dtype=np.int16)
Beam_Behavior['Spare'] = np.zeros((n_records,n_blocks,n_BeamBehaviourParams-7),dtype=np.int16)
# CryoSat-2 mode specific waveform variables
CS_l1b_mds['Waveform_20Hz'] = {}
if (self.MODE == 'LRM'):
# Low-Resolution Mode
# Averaged Power Echo Waveform [128]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.zeros((n_records,n_blocks,n_LRM_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
elif (self.MODE == 'SAR'):
# SAR Mode
# Averaged Power Echo Waveform [256]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.zeros((n_records,n_blocks,n_SAR_BC_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Beam behaviour parameters
CS_l1b_mds['Waveform_20Hz']['Beam'] = Beam_Behavior
elif (self.MODE == 'SIN'):
# SARIN Mode
# Averaged Power Echo Waveform [1024]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.zeros((n_records,n_blocks,n_SARIN_BC_RW),dtype=np.uint16)
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.zeros((n_records,n_blocks),dtype=np.int32)
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.zeros((n_records,n_blocks),dtype=np.uint16)
# Beam behaviour parameters
CS_l1b_mds['Waveform_20Hz']['Beam'] = Beam_Behavior
# Coherence [1024]: packed units (1/1000)
CS_l1b_mds['Waveform_20Hz']['Coherence'] = np.zeros((n_records,n_blocks,n_SARIN_BC_RW),dtype=np.int16)
# Phase Difference [1024]: packed units (microradians)
CS_l1b_mds['Waveform_20Hz']['Phase_diff'] = np.zeros((n_records,n_blocks,n_SARIN_BC_RW),dtype=np.int32)
# for each record in the CryoSat file
for r in range(n_records):
# CryoSat-2 Time and Orbit Group
for b in range(n_blocks):
CS_l1b_mds['Location']['Day'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Second'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Micsec'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['USO_Corr'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Mode_ID'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Location']['SSC'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Location']['Inst_config'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Rec_Count'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Lat'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Lon'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Alt'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Alt_rate'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Sat_velocity'][r,b,:] = np.fromfile(fid,dtype='>i4',count=3)
CS_l1b_mds['Location']['Real_beam'][r,b,:] = np.fromfile(fid,dtype='>i4',count=3)
CS_l1b_mds['Location']['Baseline'][r,b,:] = np.fromfile(fid,dtype='>i4',count=3)
CS_l1b_mds['Location']['ST_ID'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Location']['Roll'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Pitch'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['Yaw'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Location']['MCD'][r,b] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Location']['Spares'][r,b,:] = np.fromfile(fid,dtype='>i2',count=2)
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
for b in range(n_blocks):
CS_l1b_mds['Data']['TD'][r,b] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Data']['H_0'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['COR2'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['LAI'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['FAI'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['AGC_CH1'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['AGC_CH2'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_gain_CH1'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_gain_CH2'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TX_Power'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Doppler_range'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_inst_range'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['R_inst_range'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['TR_inst_gain'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['R_inst_gain'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Internal_phase'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['External_phase'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Noise_power'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Phase_slope'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Data']['Spares1'][r,b,:] = np.fromfile(fid,dtype='>i1',count=4)
# CryoSat-2 External Corrections Group
CS_l1b_mds['Geometry']['dryTrop'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['wetTrop'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['InvBar'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['DAC'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['Iono_GIM'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['Iono_model'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['ocTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['lpeTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['olTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['seTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['gpTideElv'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Geometry']['Surf_type'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Geometry']['Spare1'][r,:] = np.fromfile(fid,dtype='>i1',count=4)
CS_l1b_mds['Geometry']['Corr_status'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Geometry']['Corr_error'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Geometry']['Spare2'][r,:] = np.fromfile(fid,dtype='>i1',count=4)
# CryoSat-2 Average Waveforms Groups
if (self.MODE == 'LRM'):
# Low-Resolution Mode
CS_l1b_mds['Waveform_1Hz']['Day'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Second'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Micsec'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lat'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lon'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Alt'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['TD'][r] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Waveform_1Hz']['Waveform'][r,:] = np.fromfile(fid,dtype='>u2',count=n_LRM_RW)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'][r] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_1Hz']['Flags'][r] = np.fromfile(fid,dtype='>u2',count=1)
elif (self.MODE == 'SAR'):
# SAR Mode
CS_l1b_mds['Waveform_1Hz']['Day'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Second'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Micsec'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lat'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lon'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Alt'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['TD'][r] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Waveform_1Hz']['Waveform'][r,:] = np.fromfile(fid,dtype='>u2',count=n_SAR_RW)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'][r] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_1Hz']['Flags'][r] = np.fromfile(fid,dtype='>u2',count=1)
elif (self.MODE == 'SIN'):
# SARIN Mode
CS_l1b_mds['Waveform_1Hz']['Day'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Second'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Micsec'][r] = np.fromfile(fid,dtype='>u4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lat'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Lon'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Alt'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['TD'][r] = np.fromfile(fid,dtype='>i8',count=1)
CS_l1b_mds['Waveform_1Hz']['Waveform'][r,:] = np.fromfile(fid,dtype='>u2',count=n_SARIN_RW)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'][r] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'][r] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_1Hz']['Flags'][r] = np.fromfile(fid,dtype='>u2',count=1)
# CryoSat-2 Waveforms Groups
if (self.MODE == 'LRM'):
# Low-Resolution Mode
for b in range(n_blocks):
CS_l1b_mds['Waveform_20Hz']['Waveform'][r,b,:] = np.fromfile(fid,dtype='>u2',count=n_LRM_RW)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Flags'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
elif (self.MODE == 'SAR'):
# SAR Mode
for b in range(n_blocks):
CS_l1b_mds['Waveform_20Hz']['Waveform'][r,b,:] = np.fromfile(fid,dtype='>u2',count=n_SAR_BC_RW)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Flags'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD_boresight_angle'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_boresight_angle'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Spare'][r,b,:] = np.fromfile(fid,dtype='>i2',count=(n_BeamBehaviourParams-7))
elif (self.MODE == 'SIN'):
# SARIN Mode
for b in range(n_blocks):
CS_l1b_mds['Waveform_20Hz']['Waveform'][r,b,:] = np.fromfile(fid,dtype='>u2',count=n_SARIN_BC_RW)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'][r,b] = np.fromfile(fid,dtype='>i4',count=1)
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Flags'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD_boresight_angle'][r,b] = np.fromfile(fid,dtype='>u2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_boresight_angle'][r,b] = np.fromfile(fid,dtype='>i2',count=1)
CS_l1b_mds['Waveform_20Hz']['Beam']['Spare'][r,b,:] = np.fromfile(fid,dtype='>i2',count=(n_BeamBehaviourParams-7))
CS_l1b_mds['Waveform_20Hz']['Coherence'][r,b,:] = np.fromfile(fid,dtype='>i2',count=n_SARIN_BC_RW)
CS_l1b_mds['Waveform_20Hz']['Phase_diff'][r,b,:] = np.fromfile(fid,dtype='>i4',count=n_SARIN_BC_RW)
# set the mask from day variables
mask_20Hz = CS_l1b_mds['Location']['Day'].data == CS_l1b_mds['Location']['Day'].fill_value
Location_keys = [key for key in CS_l1b_mds['Location'].keys() if not re.search(r'Spare',key)]
Data_keys = [key for key in CS_l1b_mds['Data'].keys() if not re.search(r'Spare',key)]
Geometry_keys = [key for key in CS_l1b_mds['Geometry'].keys() if not re.search(r'Spare',key)]
Wfm_1Hz_keys = [key for key in CS_l1b_mds['Waveform_1Hz'].keys() if not re.search(r'Spare',key)]
Wfm_20Hz_keys = [key for key in CS_l1b_mds['Waveform_20Hz'].keys() if not re.search(r'Spare',key)]
for key in Location_keys:
CS_l1b_mds['Location'][key].mask = mask_20Hz.copy()
for key in Data_keys:
CS_l1b_mds['Data'][key].mask = mask_20Hz.copy()
# return the output dictionary
return CS_l1b_mds
def cryosat_baseline_D(self, full_filename, unpack=False):
"""
Read L1b MDS variables for CryoSat Baseline D (netCDF4)
"""
# open netCDF4 file for reading
fid = netCDF4.Dataset(os.path.expanduser(full_filename),'r')
# use original unscaled units unless unpack=True
fid.set_auto_scale(unpack)
# get dimensions
ind_first_meas_20hz_01 = fid.variables['ind_first_meas_20hz_01'][:].copy()
ind_meas_1hz_20_ku = fid.variables['ind_meas_1hz_20_ku'][:].copy()
n_records = len(ind_first_meas_20hz_01)
n_SARIN_D_RW = 1024
n_SARIN_RW = 512
n_SAR_D_RW = 256
n_SAR_RW = 128
n_LRM_RW = 128
n_blocks = 20
# Bind all the variables of the l1b_mds together into a single dictionary
CS_l1b_mds = {}
# CryoSat-2 Time and Orbit Group
CS_l1b_mds['Location'] = {}
# MDS Time
CS_l1b_mds['Location']['Time'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Time'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
time_20_ku = fid.variables['time_20_ku'][:].copy()
# Time: day part
CS_l1b_mds['Location']['Day'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Day'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
# Time: second part
CS_l1b_mds['Location']['Second'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Second'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
# Time: microsecond part
CS_l1b_mds['Location']['Micsec'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Micsec'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
# USO correction factor
CS_l1b_mds['Location']['USO_Corr'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['USO_Corr'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
uso_cor_20_ku = fid.variables['uso_cor_20_ku'][:].copy()
# Mode ID
CS_l1b_mds['Location']['Mode_ID'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Mode_ID'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_mode_op_20_ku =fid.variables['flag_instr_mode_op_20_ku'][:].copy()
# Mode Flags
CS_l1b_mds['Location']['Mode_flags'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Mode_flags'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_mode_flags_20_ku =fid.variables['flag_instr_mode_flags_20_ku'][:].copy()
# Platform attitude control mode
CS_l1b_mds['Location']['Att_control'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Att_control'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_mode_att_ctrl_20_ku =fid.variables['flag_instr_mode_att_ctrl_20_ku'][:].copy()
# Instrument configuration
CS_l1b_mds['Location']['Inst_config'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Inst_config'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_conf_rx_flags_20_ku = fid.variables['flag_instr_conf_rx_flags_20_ku'][:].copy()
# acquisition band
CS_l1b_mds['Location']['Inst_band'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Inst_band'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_conf_rx_bwdt_20_ku = fid.variables['flag_instr_conf_rx_bwdt_20_ku'][:].copy()
# instrument channel
CS_l1b_mds['Location']['Inst_channel'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Inst_channel'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_conf_rx_in_use_20_ku = fid.variables['flag_instr_conf_rx_in_use_20_ku'][:].copy()
# tracking mode
CS_l1b_mds['Location']['Tracking_mode'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Tracking_mode'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_conf_rx_trk_mode_20_ku = fid.variables['flag_instr_conf_rx_trk_mode_20_ku'][:].copy()
# Source sequence counter
CS_l1b_mds['Location']['SSC'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['SSC'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
seq_count_20_ku = fid.variables['seq_count_20_ku'][:].copy()
# Record Counter
CS_l1b_mds['Location']['Rec_Count'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Rec_Count'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
rec_count_20_ku = fid.variables['rec_count_20_ku'][:].copy()
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Lat'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Lat'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
lat_20_ku = fid.variables['lat_20_ku'][:].copy()
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Lon'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Lon'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
lon_20_ku = fid.variables['lon_20_ku'][:].copy()
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Location']['Alt'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Alt'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
alt_20_ku = fid.variables['alt_20_ku'][:].copy()
# Instantaneous altitude rate derived from orbit: packed units (mm/s, 1e-3 m/s)
CS_l1b_mds['Location']['Alt_rate'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Alt_rate'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
orb_alt_rate_20_ku = fid.variables['orb_alt_rate_20_ku'][:].copy()
# Satellite velocity vector. In ITRF: packed units (mm/s, 1e-3 m/s)
# ITRF= International Terrestrial Reference Frame
CS_l1b_mds['Location']['Sat_velocity'] = np.ma.zeros((n_records,n_blocks,3))
CS_l1b_mds['Location']['Sat_velocity'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
sat_vel_vec_20_ku = fid.variables['sat_vel_vec_20_ku'][:].copy()
# Real beam direction vector. In CRF: packed units (micro-m/s, 1e-6 m/s)
# CRF= CryoSat Reference Frame.
CS_l1b_mds['Location']['Real_beam'] = np.ma.zeros((n_records,n_blocks,3))
CS_l1b_mds['Location']['Real_beam'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
beam_dir_vec_20_ku = fid.variables['beam_dir_vec_20_ku'][:].copy()
# Interferometric baseline vector. In CRF: packed units (micro-m/s, 1e-6 m/s)
CS_l1b_mds['Location']['Baseline'] = np.ma.zeros((n_records,n_blocks,3))
CS_l1b_mds['Location']['Baseline'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
inter_base_vec_20_ku = fid.variables['inter_base_vec_20_ku'][:].copy()
# Star Tracker ID
CS_l1b_mds['Location']['ST_ID'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['ST_ID'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_instr_conf_rx_str_in_use_20_ku = fid.variables['flag_instr_conf_rx_str_in_use_20_ku'][:].copy()
# Antenna Bench Roll Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Roll'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Roll'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
off_nadir_roll_angle_str_20_ku = fid.variables['off_nadir_roll_angle_str_20_ku'][:].copy()
# Antenna Bench Pitch Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Pitch'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Pitch'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
off_nadir_pitch_angle_str_20_ku = fid.variables['off_nadir_pitch_angle_str_20_ku'][:].copy()
# Antenna Bench Yaw Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Location']['Yaw'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['Yaw'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
off_nadir_yaw_angle_str_20_ku = fid.variables['off_nadir_yaw_angle_str_20_ku'][:].copy()
# Measurement Confidence Data Flags
# Generally the MCD flags indicate problems when set
# If MCD is 0 then no problems or non-nominal conditions were detected
# Serious errors are indicated by setting bit 31
CS_l1b_mds['Location']['MCD'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Location']['MCD'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_mcd_20_ku = fid.variables['flag_mcd_20_ku'][:].copy()
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
CS_l1b_mds['Data'] = {}
# Window Delay reference (two-way) corrected for instrument delays
CS_l1b_mds['Data']['TD'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['TD'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
window_del_20_ku = fid.variables['window_del_20_ku'][:].copy()
# H0 Initial Height Word from telemetry
CS_l1b_mds['Data']['H_0'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['H_0'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
h0_applied_20_ku = fid.variables['h0_applied_20_ku'][:].copy()
# COR2 Height Rate: on-board tracker height rate over the radar cycle
CS_l1b_mds['Data']['COR2'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['COR2'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
cor2_applied_20_ku = fid.variables['cor2_applied_20_ku'][:].copy()
# Coarse Range Word (LAI) derived from telemetry
CS_l1b_mds['Data']['LAI'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['LAI'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
h0_lai_word_20_ku = fid.variables['h0_lai_word_20_ku'][:].copy()
# Fine Range Word (FAI) derived from telemetry
CS_l1b_mds['Data']['FAI'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['FAI'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
h0_fai_word_20_ku = fid.variables['h0_fai_word_20_ku'][:].copy()
# Automatic Gain Control Channel 1: AGC gain applied on Rx channel 1.
# Gain calibration corrections are applied (Sum of AGC stages 1 and 2
# plus the corresponding corrections) (dB/100)
CS_l1b_mds['Data']['AGC_CH1'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['AGC_CH1'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
agc_ch1_20_ku = fid.variables['agc_ch1_20_ku'][:].copy()
# Automatic Gain Control Channel 2: AGC gain applied on Rx channel 2.
# Gain calibration corrections are applied (dB/100)
CS_l1b_mds['Data']['AGC_CH2'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['AGC_CH2'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
agc_ch2_20_ku = fid.variables['agc_ch2_20_ku'][:].copy()
# Total Fixed Gain On Channel 1: gain applied by the RF unit. (dB/100)
CS_l1b_mds['Data']['TR_gain_CH1'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['TR_gain_CH1'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
tot_gain_ch1_20_ku = fid.variables['tot_gain_ch1_20_ku'][:].copy()
# Total Fixed Gain On Channel 2: gain applied by the RF unit. (dB/100)
CS_l1b_mds['Data']['TR_gain_CH2'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['TR_gain_CH2'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
tot_gain_ch2_20_ku = fid.variables['tot_gain_ch2_20_ku'][:].copy()
# Transmit Power in microWatts
CS_l1b_mds['Data']['TX_Power'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['TX_Power'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
transmit_pwr_20_ku = fid.variables['transmit_pwr_20_ku'][:].copy()
# Doppler range correction: Radial component (mm)
# computed for the component of satellite velocity in the nadir direction
CS_l1b_mds['Data']['Doppler_range'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['Doppler_range'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
dop_cor_20_ku = fid.variables['dop_cor_20_ku'][:].copy()
# Value of Doppler Angle for the first single look echo (1e-7 radians)
CS_l1b_mds['Data']['Doppler_angle_start'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['Doppler_angle_start'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
dop_angle_start_20_ku = fid.variables['dop_angle_start_20_ku'][:].copy()
# Value of Doppler Angle for the last single look echo (1e-7 radians)
CS_l1b_mds['Data']['Doppler_angle_stop'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['Doppler_angle_stop'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
dop_angle_stop_20_ku = fid.variables['dop_angle_stop_20_ku'][:].copy()
# Instrument Range Correction: transmit-receive antenna (mm)
# Calibration correction to range on channel 1 computed from CAL1.
CS_l1b_mds['Data']['TR_inst_range'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['TR_inst_range'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
instr_cor_range_tx_rx_20_ku = fid.variables['instr_cor_range_tx_rx_20_ku'][:].copy()
# Instrument Range Correction: receive-only antenna (mm)
# Calibration correction to range on channel 2 computed from CAL1.
CS_l1b_mds['Data']['R_inst_range'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['R_inst_range'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
instr_cor_range_rx_20_ku = fid.variables['instr_cor_range_rx_20_ku'][:].copy()
# Instrument Gain Correction: transmit-receive antenna (dB/100)
# Calibration correction to gain on channel 1 computed from CAL1
CS_l1b_mds['Data']['TR_inst_gain'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['TR_inst_gain'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
instr_cor_gain_tx_rx_20_ku = fid.variables['instr_cor_gain_tx_rx_20_ku'][:].copy()
# Instrument Gain Correction: receive-only (dB/100)
# Calibration correction to gain on channel 2 computed from CAL1
CS_l1b_mds['Data']['R_inst_gain'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['R_inst_gain'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
instr_cor_gain_rx_20_ku = fid.variables['instr_cor_gain_rx_20_ku'][:].copy()
# Internal Phase Correction (microradians)
CS_l1b_mds['Data']['Internal_phase'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['Internal_phase'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
instr_int_ph_cor_20_ku = fid.variables['instr_int_ph_cor_20_ku'][:].copy()
# External Phase Correction (microradians)
CS_l1b_mds['Data']['External_phase'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['External_phase'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
instr_ext_ph_cor_20_ku = fid.variables['instr_ext_ph_cor_20_ku'][:].copy()
# Noise Power measurement (dB/100)
CS_l1b_mds['Data']['Noise_power'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['Noise_power'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
noise_power_20_ku = fid.variables['noise_power_20_ku'][:].copy()
# Phase slope correction (microradians)
# Computed from the CAL-4 packets during the azimuth impulse response
# amplitude (SARIN only). Set from the latest available CAL-4 packet.
CS_l1b_mds['Data']['Phase_slope'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Data']['Phase_slope'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
ph_slope_cor_20_ku = fid.variables['ph_slope_cor_20_ku'][:].copy()
# CryoSat-2 External Corrections Group
CS_l1b_mds['Geometry'] = {}
# Data Record Time (MDSR Time Stamp)
CS_l1b_mds['Geometry']['Time'] = fid.variables['time_cor_01'][:].copy()
# Dry Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['dryTrop'] = fid.variables['mod_dry_tropo_cor_01'][:].copy()
# Wet Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['wetTrop'] = fid.variables['mod_wet_tropo_cor_01'][:].copy()
# Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['InvBar'] = fid.variables['inv_bar_cor_01'][:].copy()
# Delta Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['DAC'] = fid.variables['hf_fluct_total_cor_01'][:].copy()
# GIM Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['Iono_GIM'] = fid.variables['iono_cor_gim_01'][:].copy()
# Model Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['Iono_model'] = fid.variables['iono_cor_01'][:].copy()
# Ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['ocTideElv'] = fid.variables['ocean_tide_01'][:].copy()
# Long period equilibrium ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['lpeTideElv'] = fid.variables['ocean_tide_eq_01'][:].copy()
# Ocean loading tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['olTideElv'] = fid.variables['load_tide_01'][:].copy()
# Solid Earth tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['seTideElv'] = fid.variables['solid_earth_tide_01'][:].copy()
# Geocentric Polar tide Correction packed units (mm, 1e-3 m)
CS_l1b_mds['Geometry']['gpTideElv'] = fid.variables['pole_tide_01'][:].copy()
# Surface Type: enumerated key to classify surface at nadir
# 0 = Open Ocean
# 1 = Closed Sea
# 2 = Continental Ice
# 3 = Land
CS_l1b_mds['Geometry']['Surf_type'] = fid.variables['surf_type_01'][:].copy()
# Corrections Status Flag
CS_l1b_mds['Geometry']['Corr_status'] = fid.variables['flag_cor_status_01'][:].copy()
# Correction Error Flag
CS_l1b_mds['Geometry']['Corr_error'] = fid.variables['flag_cor_err_01'][:].copy()
# Same as the LRM/SAR groups but the waveform array is 512 bins instead of
# 128 and the number of echoes averaged is different.
CS_l1b_mds['Waveform_1Hz'] = {}
# Data Record Time (MDSR Time Stamp)
# Time (seconds since 2000-01-01)
time_avg_01_ku = fid.variables['time_avg_01_ku'][:].copy()
CS_l1b_mds['Waveform_1Hz']['Time'] = time_avg_01_ku.copy()
# Time: day part
CS_l1b_mds['Waveform_1Hz']['Day'] = np.array(time_avg_01_ku/86400.0, dtype=np.int32)
# Time: second part
CS_l1b_mds['Waveform_1Hz']['Second'] = np.array(time_avg_01_ku -
CS_l1b_mds['Waveform_1Hz']['Day'][:]*86400.0, dtype=np.uint32)
# Time: microsecond part
CS_l1b_mds['Waveform_1Hz']['Micsec'] = np.array((time_avg_01_ku -
CS_l1b_mds['Waveform_1Hz']['Day'][:]*86400.0 -
CS_l1b_mds['Waveform_1Hz']['Second'][:])*1e6, dtype=np.uint32)
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lat'] = fid.variables['lat_avg_01_ku'][:].copy()
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_mds['Waveform_1Hz']['Lon'] = fid.variables['lon_avg_01_ku'][:].copy()
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_mds['Waveform_1Hz']['Alt'] = fid.variables['alt_avg_01_ku'][:].copy()
# Window Delay (two-way) corrected for instrument delays
CS_l1b_mds['Waveform_1Hz']['TD'] = fid.variables['window_del_avg_01_ku'][:].copy()
# 1 Hz Averaged Power Echo Waveform
CS_l1b_mds['Waveform_1Hz']['Waveform'] = fid.variables['pwr_waveform_avg_01_ku'][:].copy()
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_1Hz']['Linear_Wfm_Multiplier'] = fid.variables['echo_scale_factor_avg_01_ku'][:].copy()
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_mds['Waveform_1Hz']['Power2_Wfm_Multiplier'] = fid.variables['echo_scale_pwr_avg_01_ku'][:].copy()
# Number of echoes averaged
CS_l1b_mds['Waveform_1Hz']['N_avg_echoes'] = fid.variables['echo_numval_avg_01_ku'][:].copy()
CS_l1b_mds['Waveform_1Hz']['Flags'] = fid.variables['flag_echo_avg_01_ku'][:].copy()
# CryoSat-2 Waveforms Groups
CS_l1b_mds['Waveform_20Hz'] = {}
# Echo Scale Factor (to scale echo to watts)
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
echo_scale_factor_20_ku = fid.variables['echo_scale_factor_20_ku'][:].copy()
# Echo Scale Power (a power of 2)
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
echo_scale_pwr_20_ku = fid.variables['echo_scale_pwr_20_ku'][:].copy()
# Number of echoes averaged
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
echo_numval_20_ku = fid.variables['echo_numval_20_ku'][:].copy()
# Flags for errors or information about 20Hz waveform
CS_l1b_mds['Waveform_20Hz']['Flags'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Flags'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
flag_echo_20_ku = fid.variables['flag_echo_20_ku'][:].copy()
# CryoSat-2 mode specific waveform variables
if (self.MODE == 'LRM'):
# Low-Resolution Mode
# Averaged Power Echo Waveform [128]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.ma.zeros((n_records,n_blocks,n_LRM_RW))
CS_l1b_mds['Waveform_20Hz']['Waveform'].mask = np.zeros((n_records,n_blocks,n_LRM_RW),dtype=np.bool)
pwr_waveform_20_ku = fid.variables['pwr_waveform_20_ku'][:].copy()
elif (self.MODE == 'SAR'):
# SAR Mode
# Averaged Power Echo Waveform [256]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.ma.zeros((n_records,n_blocks,n_SAR_D_RW))
CS_l1b_mds['Waveform_20Hz']['Waveform'].mask = np.zeros((n_records,n_blocks,n_SAR_D_RW),dtype=np.bool)
pwr_waveform_20_ku = fid.variables['pwr_waveform_20_ku'][:].copy()
elif (self.MODE == 'SIN'):
# SARIN Mode
# Averaged Power Echo Waveform [1024]
CS_l1b_mds['Waveform_20Hz']['Waveform'] = np.ma.zeros((n_records,n_blocks,n_SARIN_D_RW))
CS_l1b_mds['Waveform_20Hz']['Waveform'].mask = np.zeros((n_records,n_blocks,n_SARIN_D_RW),dtype=np.bool)
pwr_waveform_20_ku = fid.variables['pwr_waveform_20_ku'][:].copy()
# Coherence [1024]: packed units (1/1000)
CS_l1b_mds['Waveform_20Hz']['Coherence'] = np.ma.zeros((n_records,n_blocks,n_SARIN_D_RW))
CS_l1b_mds['Waveform_20Hz']['Coherence'].mask = np.zeros((n_records,n_blocks,n_SARIN_D_RW),dtype=np.bool)
coherence_waveform_20_ku = fid.variables['coherence_waveform_20_ku'][:].copy()
# Phase Difference [1024]: packed units (microradians)
CS_l1b_mds['Waveform_20Hz']['Phase_diff'] = np.ma.zeros((n_records,n_blocks,n_SARIN_D_RW))
CS_l1b_mds['Waveform_20Hz']['Phase_diff'].mask = np.zeros((n_records,n_blocks,n_SARIN_D_RW),dtype=np.bool)
ph_diff_waveform_20_ku = fid.variables['ph_diff_waveform_20_ku'][:].copy()
# Beam Behavior Parameters
if self.MODE in ('SAR','SIN'):
CS_l1b_mds['Waveform_20Hz']['Beam'] = {}
# Standard Deviation of Gaussian fit to range integrated stack power.
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_std_20_ku = fid.variables['stack_std_20_ku'][:].copy()
# Stack Center: Mean of Gaussian fit to range integrated stack power.
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_centre_20_ku = fid.variables['stack_centre_20_ku'][:].copy()
# Stack amplitude parameter scaled in dB/100.
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_scaled_amplitude_20_ku = fid.variables['stack_scaled_amplitude_20_ku'][:].copy()
# 3rd moment: providing the degree of asymmetry of the range integrated
# stack power distribution.
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_skewness_20_ku = fid.variables['stack_skewness_20_ku'][:].copy()
# 4th moment: Measure of peakiness of range integrated stack power distribution.
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_kurtosis_20_ku = fid.variables['stack_kurtosis_20_ku'][:].copy()
# Stack peakiness computed from the range integrated power of the single look echoes
CS_l1b_mds['Waveform_20Hz']['Beam']['Peakiness'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Peakiness'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_peakiness_20_ku = fid.variables['stack_peakiness_20_ku'][:].copy()
# Stack residuals of Gaussian that fits the range integrated power of the single look echoes
CS_l1b_mds['Waveform_20Hz']['Beam']['RMS'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['RMS'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_gaussian_fitting_residuals_20_ku = fid.variables['stack_gaussian_fitting_residuals_20_ku'][:].copy()
# Standard deviation as a function of boresight angle (microradians)
CS_l1b_mds['Waveform_20Hz']['Beam']['SD_boresight_angle'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['SD_boresight_angle'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_std_angle_20_ku = fid.variables['stack_std_angle_20_ku'][:].copy()
# Stack Center angle as a function of boresight angle (microradians)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_boresight_angle'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_boresight_angle'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_centre_angle_20_ku = fid.variables['stack_centre_angle_20_ku'][:].copy()
# Stack Center angle as a function of look angle (microradians)
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_look_angle'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_look_angle'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_centre_look_angle_20_ku = fid.variables['stack_centre_look_angle_20_ku'][:].copy()
# Number of contributing beams in the stack before weighting
CS_l1b_mds['Waveform_20Hz']['Beam']['Number'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Number'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_number_before_weighting_20_ku = fid.variables['stack_number_before_weighting_20_ku'][:].copy()
# Number of contributing beams in the stack after weighting
CS_l1b_mds['Waveform_20Hz']['Beam']['Weighted_Number'] = np.ma.zeros((n_records,n_blocks))
CS_l1b_mds['Waveform_20Hz']['Beam']['Weighted_Number'].mask = np.zeros((n_records,n_blocks),dtype=np.bool)
stack_number_after_weighting_20_ku = fid.variables['stack_number_after_weighting_20_ku'][:].copy()
# for each record in the CryoSat file
for r in range(n_records):
# index for record r
idx = ind_first_meas_20hz_01[r]
# number of valid blocks in record r
cnt = np.count_nonzero(ind_meas_1hz_20_ku == r)
# CryoSat-2 Time and Orbit Group
CS_l1b_mds['Location']['Time'].data[r,:cnt] = time_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Time'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Day'].data[r,:cnt] = np.array(time_20_ku[idx:idx+cnt]/86400.0, dtype=np.int)
CS_l1b_mds['Location']['Day'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Second'].data[r,:cnt] = np.array(time_20_ku[idx:idx+cnt] -
CS_l1b_mds['Location']['Day'].data[r,:cnt]*86400.0, dtype=np.int)
CS_l1b_mds['Location']['Second'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Micsec'].data[r,:cnt] = np.array((time_20_ku[idx:idx+cnt] -
CS_l1b_mds['Location']['Day'].data[r,:cnt]*86400.0 -
CS_l1b_mds['Location']['Second'].data[r,:cnt])*1e6, dtype=np.uint32)
CS_l1b_mds['Location']['Micsec'].mask[r,:cnt] = False
CS_l1b_mds['Location']['USO_Corr'].data[r,:cnt] = uso_cor_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['USO_Corr'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Mode_ID'].data[r,:cnt] = flag_instr_mode_op_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Mode_ID'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Mode_flags'].data[r,:cnt] = flag_instr_mode_flags_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Mode_flags'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Att_control'].data[r,:cnt] = flag_instr_mode_att_ctrl_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Att_control'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Inst_config'].data[r,:cnt] = flag_instr_conf_rx_flags_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Inst_config'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Inst_band'].data[r,:cnt] = flag_instr_conf_rx_bwdt_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Inst_band'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Inst_channel'].data[r,:cnt] = flag_instr_conf_rx_in_use_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Inst_channel'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Tracking_mode'].data[r,:cnt] = flag_instr_conf_rx_trk_mode_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Tracking_mode'].mask[r,:cnt] = False
CS_l1b_mds['Location']['SSC'].data[r,:cnt] = seq_count_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['SSC'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Rec_Count'].data[r,:cnt] = rec_count_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Rec_Count'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Lat'].data[r,:cnt] = lat_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Lat'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Lon'].data[r,:cnt] = lon_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Lon'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Alt'].data[r,:cnt] = alt_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Alt'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Alt_rate'].data[r,:cnt] = orb_alt_rate_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Alt_rate'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Sat_velocity'].data[r,:cnt,:] = sat_vel_vec_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Sat_velocity'].mask[r,:cnt,:] = False
CS_l1b_mds['Location']['Real_beam'].data[r,:cnt,:] = beam_dir_vec_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Real_beam'].mask[r,:cnt,:] = False
CS_l1b_mds['Location']['Baseline'].data[r,:cnt,:] = inter_base_vec_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Baseline'].mask[r,:cnt,:] = False
CS_l1b_mds['Location']['ST_ID'].data[r,:cnt] = flag_instr_conf_rx_str_in_use_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['ST_ID'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Roll'].data[r,:cnt] = off_nadir_roll_angle_str_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Roll'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Pitch'].data[r,:cnt] = off_nadir_pitch_angle_str_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Pitch'].mask[r,:cnt] = False
CS_l1b_mds['Location']['Yaw'].data[r,:cnt] = off_nadir_yaw_angle_str_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['Yaw'].mask[r,:cnt] = False
CS_l1b_mds['Location']['MCD'].data[r,:cnt] = flag_mcd_20_ku[idx:idx+cnt]
CS_l1b_mds['Location']['MCD'].mask[r,:cnt] = False
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
CS_l1b_mds['Data']['TD'].data[r,:cnt] = window_del_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['TD'].mask[r,:cnt] = False
CS_l1b_mds['Data']['H_0'].data[r,:cnt] = h0_applied_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['H_0'].mask[r,:cnt] = False
CS_l1b_mds['Data']['COR2'].data[r,:cnt] = cor2_applied_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['COR2'].mask[r,:cnt] = False
CS_l1b_mds['Data']['LAI'].data[r,:cnt] = h0_lai_word_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['LAI'].mask[r,:cnt] = False
CS_l1b_mds['Data']['FAI'].data[r,:cnt] = h0_fai_word_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['FAI'].mask[r,:cnt] = False
CS_l1b_mds['Data']['AGC_CH1'].data[r,:cnt] = agc_ch1_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['AGC_CH1'].mask[r,:cnt] = False
CS_l1b_mds['Data']['AGC_CH2'].data[r,:cnt] = agc_ch2_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['AGC_CH2'].mask[r,:cnt] = False
CS_l1b_mds['Data']['TR_gain_CH1'].data[r,:cnt] = tot_gain_ch1_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['TR_gain_CH1'].mask[r,:cnt] = False
CS_l1b_mds['Data']['TR_gain_CH2'].data[r,:cnt] = tot_gain_ch2_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['TR_gain_CH2'].mask[r,:cnt] = False
CS_l1b_mds['Data']['TX_Power'].data[r,:cnt] = transmit_pwr_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['TX_Power'].mask[r,:cnt] = False
CS_l1b_mds['Data']['Doppler_range'].data[r,:cnt] = dop_cor_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['Doppler_range'].mask[r,:cnt] = False
CS_l1b_mds['Data']['Doppler_angle_start'].data[r,:cnt] = dop_angle_start_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['Doppler_angle_start'].mask[r,:cnt] = False
CS_l1b_mds['Data']['Doppler_angle_stop'].data[r,:cnt] = dop_angle_stop_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['Doppler_angle_stop'].mask[r,:cnt] = False
CS_l1b_mds['Data']['TR_inst_range'].data[r,:cnt] = instr_cor_range_tx_rx_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['TR_inst_range'].mask[r,:cnt] = False
CS_l1b_mds['Data']['R_inst_range'].data[r,:cnt] = instr_cor_range_rx_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['R_inst_range'].mask[r,:cnt] = False
CS_l1b_mds['Data']['TR_inst_gain'].data[r,:cnt] = instr_cor_gain_tx_rx_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['TR_inst_gain'].mask[r,:cnt] = False
CS_l1b_mds['Data']['R_inst_gain'].data[r,:cnt] = instr_cor_gain_rx_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['R_inst_gain'].mask[r,:cnt] = False
CS_l1b_mds['Data']['Internal_phase'].data[r,:cnt] = instr_int_ph_cor_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['Internal_phase'].mask[r,:cnt] = False
CS_l1b_mds['Data']['External_phase'].data[r,:cnt] = instr_ext_ph_cor_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['External_phase'].mask[r,:cnt] = False
CS_l1b_mds['Data']['Noise_power'].data[r,:cnt] = noise_power_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['Noise_power'].mask[r,:cnt] = False
CS_l1b_mds['Data']['Phase_slope'].data[r,:cnt] = ph_slope_cor_20_ku[idx:idx+cnt]
CS_l1b_mds['Data']['Phase_slope'].mask[r,:cnt] = False
# CryoSat-2 Waveforms Groups
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'].data[r,:cnt] = echo_scale_factor_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Linear_Wfm_Multiplier'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'].data[r,:cnt] = echo_scale_pwr_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Power2_Wfm_Multiplier'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'].data[r,:cnt] = echo_numval_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['N_avg_echoes'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Flags'].data[r,:cnt] = flag_echo_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Flags'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Waveform'].data[r,:cnt,:] = pwr_waveform_20_ku[idx:idx+cnt,:]
CS_l1b_mds['Waveform_20Hz']['Waveform'].mask[r,:cnt,:] = False
# SARIN Mode parameters
if (self.MODE == 'SIN'):
CS_l1b_mds['Waveform_20Hz']['Coherence'].data[r,:cnt,:] = coherence_waveform_20_ku[idx:idx+cnt,:]
CS_l1b_mds['Waveform_20Hz']['Coherence'].mask[r,:cnt,:] = False
CS_l1b_mds['Waveform_20Hz']['Phase_diff'].data[r,:cnt,:] = ph_diff_waveform_20_ku[idx:idx+cnt,:]
CS_l1b_mds['Waveform_20Hz']['Phase_diff'].mask[r,:cnt,:] = False
# SAR/SARIN waveform beam parameters
if self.MODE in ('SAR','SIN'):
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'].data[r,:cnt] = stack_std_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['SD'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'].data[r,:cnt] = stack_centre_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Center'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'].data[r,:cnt] = stack_scaled_amplitude_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Amplitude'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'].data[r,:cnt] = stack_skewness_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Skewness'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'].data[r,:cnt] = stack_kurtosis_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Kurtosis'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Peakiness'].data[r,:cnt] = stack_peakiness_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Peakiness'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['RMS'].data[r,:cnt] = stack_gaussian_fitting_residuals_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['RMS'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['SD_boresight_angle'].data[r,:cnt] = stack_std_angle_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['SD_boresight_angle'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_boresight_angle'].data[r,:cnt] = stack_centre_angle_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_boresight_angle'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_look_angle'].data[r,:cnt] = stack_centre_look_angle_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Center_look_angle'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Number'].data[r,:cnt] = stack_number_before_weighting_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Number'].mask[r,:cnt] = False
CS_l1b_mds['Waveform_20Hz']['Beam']['Weighted_Number'].data[r,:cnt] = stack_number_after_weighting_20_ku[idx:idx+cnt]
CS_l1b_mds['Waveform_20Hz']['Beam']['Weighted_Number'].mask[r,:cnt] = False
# extract global attributes and assign as MPH and SPH metadata
CS_l1b_mds['METADATA'] = dict(MPH={},SPH={},DSD={})
# MPH attributes
CS_l1b_mds['METADATA']['MPH']['PRODUCT'] = fid.product_name
CS_l1b_mds['METADATA']['MPH']['DOI'] = fid.doi
CS_l1b_mds['METADATA']['MPH']['PROC_STAGE'] = fid.processing_stage
CS_l1b_mds['METADATA']['MPH']['REF_DOC'] = fid.reference_document
CS_l1b_mds['METADATA']['MPH']['ACQUISITION_STATION'] = fid.acquisition_station
CS_l1b_mds['METADATA']['MPH']['PROC_CENTER'] = fid.processing_centre
CS_l1b_mds['METADATA']['MPH']['PROC_TIME'] = fid.creation_time
CS_l1b_mds['METADATA']['MPH']['SOFTWARE_VER'] = fid.software_version
CS_l1b_mds['METADATA']['MPH']['SENSING_START'] = fid.sensing_start
CS_l1b_mds['METADATA']['MPH']['SENSING_STOP'] = fid.sensing_stop
CS_l1b_mds['METADATA']['MPH']['PHASE'] = fid.phase
CS_l1b_mds['METADATA']['MPH']['CYCLE'] = fid.cycle_number
CS_l1b_mds['METADATA']['MPH']['REL_ORBIT'] = fid.rel_orbit_number
CS_l1b_mds['METADATA']['MPH']['ABS_ORBIT'] = fid.abs_orbit_number
CS_l1b_mds['METADATA']['MPH']['STATE_VECTOR_TIME'] = fid.state_vector_time
CS_l1b_mds['METADATA']['MPH']['DELTA_UT1'] = fid.delta_ut1
CS_l1b_mds['METADATA']['MPH']['X_POSITION'] = fid.x_position
CS_l1b_mds['METADATA']['MPH']['Y_POSITION'] = fid.y_position
CS_l1b_mds['METADATA']['MPH']['Z_POSITION'] = fid.z_position
CS_l1b_mds['METADATA']['MPH']['X_VELOCITY'] = fid.x_velocity
CS_l1b_mds['METADATA']['MPH']['Y_VELOCITY'] = fid.y_velocity
CS_l1b_mds['METADATA']['MPH']['Z_VELOCITY'] = fid.z_velocity
CS_l1b_mds['METADATA']['MPH']['VECTOR_SOURCE'] = fid.vector_source
CS_l1b_mds['METADATA']['MPH']['LEAP_UTC'] = fid.leap_utc
CS_l1b_mds['METADATA']['MPH']['LEAP_SIGN'] = fid.leap_sign
CS_l1b_mds['METADATA']['MPH']['LEAP_ERR'] = fid.leap_err
CS_l1b_mds['METADATA']['MPH']['PRODUCT_ERR'] = fid.product_err
# SPH attributes
CS_l1b_mds['METADATA']['SPH']['START_RECORD_TAI_TIME'] = fid.first_record_time
CS_l1b_mds['METADATA']['SPH']['STOP_RECORD_TAI_TIME'] = fid.last_record_time
CS_l1b_mds['METADATA']['SPH']['ABS_ORBIT_START'] = fid.abs_orbit_start
CS_l1b_mds['METADATA']['SPH']['REL_TIME_ASC_NODE_START'] = fid.rel_time_acs_node_start
CS_l1b_mds['METADATA']['SPH']['ABS_ORBIT_STOP'] = fid.abs_orbit_stop
CS_l1b_mds['METADATA']['SPH']['REL_TIME_ASC_NODE_STOP'] = fid.rel_time_acs_node_stop
CS_l1b_mds['METADATA']['SPH']['EQUATOR_CROSS_TIME_UTC'] = fid.equator_cross_time
CS_l1b_mds['METADATA']['SPH']['EQUATOR_CROSS_LONG'] = fid.equator_cross_long
CS_l1b_mds['METADATA']['SPH']['ASCENDING_FLAG'] = fid.ascending_flag
CS_l1b_mds['METADATA']['SPH']['START_LAT'] = fid.first_record_lat
CS_l1b_mds['METADATA']['SPH']['START_LONG'] = fid.first_record_lon
CS_l1b_mds['METADATA']['SPH']['STOP_LAT'] = fid.last_record_lat
CS_l1b_mds['METADATA']['SPH']['STOP_LONG'] = fid.last_record_lon
CS_l1b_mds['METADATA']['SPH']['L0_PROC_FLAG'] = fid.l0_proc_flag
CS_l1b_mds['METADATA']['SPH']['L0_PROCESSING_QUALITY'] = fid.l0_processing_quality
CS_l1b_mds['METADATA']['SPH']['L0_PROC_THRESH'] = fid.l0_proc_thresh
CS_l1b_mds['METADATA']['SPH']['L0_GAPS_FLAG'] = fid.l0_gaps_flag
CS_l1b_mds['METADATA']['SPH']['L0_GAPS_NUM'] = fid.l0_gaps_num
CS_l1b_mds['METADATA']['SPH']['INSTR_ID'] = fid.instr_id
CS_l1b_mds['METADATA']['SPH']['OPEN_OCEAN_PERCENT'] = fid.open_ocean_percent
CS_l1b_mds['METADATA']['SPH']['CLOSE_SEA_PERCENT'] = fid.close_sea_percent
CS_l1b_mds['METADATA']['SPH']['CONTINENT_ICE_PERCENT'] = fid.continent_ice_percent
CS_l1b_mds['METADATA']['SPH']['LAND_PERCENT'] = fid.land_percent
CS_l1b_mds['METADATA']['SPH']['L1_PROD_STATUS'] = fid.l1b_prod_status
CS_l1b_mds['METADATA']['SPH']['L1_PROC_FLAG'] = fid.l1b_proc_flag
CS_l1b_mds['METADATA']['SPH']['L1_PROCESSING_QUALITY'] = fid.l1b_processing_quality
CS_l1b_mds['METADATA']['SPH']['L1_PROC_THRESH'] = fid.l1b_proc_thresh
CS_l1b_mds['METADATA']['SPH']['SIR_CONFIGURATION'] = fid.sir_configuration
CS_l1b_mds['METADATA']['SPH']['SIR_OP_MODE'] = fid.sir_op_mode
CS_l1b_mds['METADATA']['SPH']['ORBIT_FILE'] = fid.xref_orbit
CS_l1b_mds['METADATA']['SPH']['PROC_CONFIG_PARAMS_FILE'] = fid.xref_pconf
CS_l1b_mds['METADATA']['SPH']['CONSTANTS_FILE'] = fid.xref_constants
CS_l1b_mds['METADATA']['SPH']['IPF_RA_DATABASE_FILE'] = fid.xref_siral_characterisation
CS_l1b_mds['METADATA']['SPH']['DORIS_USO_DRIFT_FILE'] = fid.xref_uso
CS_l1b_mds['METADATA']['SPH']['STAR_TRACKER_ATTREF_FILE'] = fid.xref_star_tracker_attref
CS_l1b_mds['METADATA']['SPH']['SIRAL_LEVEL_0_FILE'] = fid.xref_siral_l0
CS_l1b_mds['METADATA']['SPH']['CALIBRATION_TYPE_1_FILE'] = fid.xref_cal1
CS_l1b_mds['METADATA']['SPH']['SIR_COMPLEX_CAL1_SARIN'] = fid.xref_cal1_sarin
CS_l1b_mds['METADATA']['SPH']['CALIBRATION_TYPE_2_FILE'] = fid.xref_cal2
CS_l1b_mds['METADATA']['SPH']['SURFACE_PRESSURE_FILE'] = fid.xref_surf_pressure
CS_l1b_mds['METADATA']['SPH']['MEAN_PRESSURE_FILE'] = fid.xref_mean_pressure
CS_l1b_mds['METADATA']['SPH']['WET_TROPOSPHERE_FILE'] = fid.xref_wet_trop
CS_l1b_mds['METADATA']['SPH']['U_WIND_FILE'] = fid.xref_u_wind
CS_l1b_mds['METADATA']['SPH']['V_WIND_FILE'] = fid.xref_v_wind
CS_l1b_mds['METADATA']['SPH']['METEO_GRID_DEF_FILE'] = fid.xref_meteo
CS_l1b_mds['METADATA']['SPH']['S1S2_PRESSURE_00H_MAP'] = fid.xref_s1s2_pressure_00h
CS_l1b_mds['METADATA']['SPH']['S1S2_PRESSURE_06H_MAP'] = fid.xref_s1s2_pressure_06h
CS_l1b_mds['METADATA']['SPH']['S1S2_PRESSURE_12H_MAP'] = fid.xref_s1s2_pressure_12h
CS_l1b_mds['METADATA']['SPH']['S1S2_PRESSURE_18H_MAP'] = fid.xref_s1s2_pressure_18h
CS_l1b_mds['METADATA']['SPH']['S1_TIDE_AMPLITUDE_MAP'] = fid.xref_s1_tide_amplitude
CS_l1b_mds['METADATA']['SPH']['S1_TIDE_PHASE_MAP'] = fid.xref_s1_tide_phase
CS_l1b_mds['METADATA']['SPH']['S2_TIDE_AMPLITUDE_MAP'] = fid.xref_s2_tide_amplitude
CS_l1b_mds['METADATA']['SPH']['S2_TIDE_PHASE_MAP'] = fid.xref_s2_tide_phase
CS_l1b_mds['METADATA']['SPH']['GPS_IONO_MAP'] = fid.xref_gim
CS_l1b_mds['METADATA']['SPH']['IONO_COEFFICENTS_FILE'] = fid.xref_iono_cor
CS_l1b_mds['METADATA']['SPH']['SAI_FILE'] = fid.xref_sai
CS_l1b_mds['METADATA']['SPH']['OCEAN_TIDE_FILE'] = fid.xref_ocean_tide
CS_l1b_mds['METADATA']['SPH']['TIDAL_LOADING_FILE'] = fid.xref_tidal_load
CS_l1b_mds['METADATA']['SPH']['EARTH_TIDE_FILE'] = fid.xref_earth_tide
CS_l1b_mds['METADATA']['SPH']['POLE_TIDE_FILE'] = fid.xref_pole_location
CS_l1b_mds['METADATA']['SPH']['SURFACE_TYPE_FILE'] = fid.xref_surf_type
# return the output dictionary
return CS_l1b_mds
def cryosat_scaling_factors(self):
"""
Get scaling factors for converting original unpacked units in binary files
"""
# dictionary of scale factors for CryoSat-2 variables
CS_l1b_scale = {}
# CryoSat-2 Time and Orbit Group
CS_l1b_scale['Location'] = {}
# Time: day part
CS_l1b_scale['Location']['Day'] = 1.0
# Time: second part
CS_l1b_scale['Location']['Second'] = 1.0
# Time: microsecond part
CS_l1b_scale['Location']['Micsec'] = 1.0
# USO correction factor
CS_l1b_scale['Location']['USO_Corr'] = 1e-15
# Mode ID
CS_l1b_scale['Location']['Mode_ID'] = 1
# Source sequence counter
CS_l1b_scale['Location']['SSC'] = 1
# Instrument configuration
CS_l1b_scale['Location']['Inst_config'] = 1
# Record Counter
CS_l1b_scale['Location']['Rec_Count'] = 1
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Location']['Lat'] = 1e-7
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Location']['Lon'] = 1e-7
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_scale['Location']['Alt'] = 1e-3
# Instantaneous altitude rate derived from orbit: packed units (mm/s, 1e-3 m/s)
CS_l1b_scale['Location']['Alt_rate'] = 1e-3
# Satellite velocity vector. In ITRF: packed units (mm/s, 1e-3 m/s)
# ITRF= International Terrestrial Reference Frame
CS_l1b_scale['Location']['Sat_velocity'] = 1e-3
# Real beam direction vector. In CRF: packed units (micro-m/s, 1e-6 m/s)
# CRF= CryoSat Reference Frame.
CS_l1b_scale['Location']['Real_beam'] = 1e-6
# Interferometric baseline vector. In CRF: packed units (micro-m/s, 1e-6 m/s)
CS_l1b_scale['Location']['Baseline'] = 1e-6
# Star Tracker ID
CS_l1b_scale['Location']['ST_ID'] = 1
# Antenna Bench Roll Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Location']['Roll'] = 1e-7
# Antenna Bench Pitch Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Location']['Pitch'] = 1e-7
# Antenna Bench Yaw Angle (Derived from star trackers)
# packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Location']['Yaw'] = 1e-7
# Measurement Confidence Data Flags
# Generally the MCD flags indicate problems when set
# If MCD is 0 then no problems or non-nominal conditions were detected
# Serious errors are indicated by setting bit 31
CS_l1b_scale['Location']['MCD'] = 1
CS_l1b_scale['Location']['Spares'] = 1
# CryoSat-2 Measurement Group
# Derived from instrument measurement parameters
CS_l1b_scale['Data'] = {}
# Window Delay reference (two-way) corrected for instrument delays
CS_l1b_scale['Data']['TD'] = 1e-12
# H0 Initial Height Word from telemetry
CS_l1b_scale['Data']['H_0'] = 4.88e-11
# COR2 Height Rate: on-board tracker height rate over the radar cycle
CS_l1b_scale['Data']['COR2'] = 3.05e-12
# Coarse Range Word (LAI) derived from telemetry
CS_l1b_scale['Data']['LAI'] = 1.25e-8
# Fine Range Word (FAI) derived from telemetry
CS_l1b_scale['Data']['FAI'] = 12.5e-9/256.0
# Automatic Gain Control Channel 1: AGC gain applied on Rx channel 1.
# Gain calibration corrections are applied (Sum of AGC stages 1 and 2
# plus the corresponding corrections) (dB/100)
CS_l1b_scale['Data']['AGC_CH1'] = 1e-2
# Automatic Gain Control Channel 2: AGC gain applied on Rx channel 2.
# Gain calibration corrections are applied (dB/100)
CS_l1b_scale['Data']['AGC_CH2'] = 1e-2
# Total Fixed Gain On Channel 1: gain applied by the RF unit. (dB/100)
CS_l1b_scale['Data']['TR_gain_CH1'] = 1e-2
# Total Fixed Gain On Channel 2: gain applied by the RF unit. (dB/100)
CS_l1b_scale['Data']['TR_gain_CH2'] = 1e-2
# Transmit Power in microWatts
CS_l1b_scale['Data']['TX_Power'] = 1e-6
# Doppler range correction: Radial component (mm)
# computed for the component of satellite velocity in the nadir direction
CS_l1b_scale['Data']['Doppler_range'] = 1e-3
# Instrument Range Correction: transmit-receive antenna (mm)
# Calibration correction to range on channel 1 computed from CAL1.
CS_l1b_scale['Data']['TR_inst_range'] = 1e-3
# Instrument Range Correction: receive-only antenna (mm)
# Calibration correction to range on channel 2 computed from CAL1.
CS_l1b_scale['Data']['R_inst_range'] = 1e-3
# Instrument Gain Correction: transmit-receive antenna (dB/100)
# Calibration correction to gain on channel 1 computed from CAL1
CS_l1b_scale['Data']['TR_inst_gain'] = 1e-2
# Instrument Gain Correction: receive-only (dB/100)
# Calibration correction to gain on channel 2 computed from CAL1
CS_l1b_scale['Data']['R_inst_gain'] = 1e-2
# Internal Phase Correction (microradians)
CS_l1b_scale['Data']['Internal_phase'] = 1e-6
# External Phase Correction (microradians)
CS_l1b_scale['Data']['External_phase'] = 1e-6
# Noise Power measurement (dB/100)
CS_l1b_scale['Data']['Noise_power'] = 1e-2
# Phase slope correction (microradians)
# Computed from the CAL-4 packets during the azimuth impulse response
# amplitude (SARIN only). Set from the latest available CAL-4 packet.
CS_l1b_scale['Data']['Phase_slope'] = 1e-6
CS_l1b_scale['Data']['Spares1'] = 1
# CryoSat-2 External Corrections Group
CS_l1b_scale['Geometry'] = {}
# Dry Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['dryTrop'] = 1e-3
# Wet Tropospheric Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['wetTrop'] = 1e-3
# Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['InvBar'] = 1e-3
# Delta Inverse Barometric Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['DAC'] = 1e-3
# GIM Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['Iono_GIM'] = 1e-3
# Model Ionospheric Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['Iono_model'] = 1e-3
# Ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['ocTideElv'] = 1e-3
# Long period equilibrium ocean tide Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['lpeTideElv'] = 1e-3
# Ocean loading tide Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['olTideElv'] = 1e-3
# Solid Earth tide Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['seTideElv'] = 1e-3
# Geocentric Polar tide Correction packed units (mm, 1e-3 m)
CS_l1b_scale['Geometry']['gpTideElv'] = 1e-3
# Surface Type: enumerated key to classify surface at nadir
# 0 = Open Ocean
# 1 = Closed Sea
# 2 = Continental Ice
# 3 = Land
CS_l1b_scale['Geometry']['Surf_type'] = 1
CS_l1b_scale['Geometry']['Spare1'] = 1
# Corrections Status Flag
CS_l1b_scale['Geometry']['Corr_status'] = 1
# Correction Error Flag
CS_l1b_scale['Geometry']['Corr_error'] = 1
CS_l1b_scale['Geometry']['Spare2'] = 1
# CryoSat-2 Average Waveforms Groups
CS_l1b_scale['Waveform_1Hz'] = {}
# Data Record Time (MDSR Time Stamp)
CS_l1b_scale['Waveform_1Hz']['Day'] = 1.0
CS_l1b_scale['Waveform_1Hz']['Second'] = 1.0
CS_l1b_scale['Waveform_1Hz']['Micsec'] = 1.0
# Lat: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Waveform_1Hz']['Lat'] = 1e-7
# Lon: packed units (0.1 micro-degree, 1e-7 degrees)
CS_l1b_scale['Waveform_1Hz']['Lon'] = 1e-7
# Alt: packed units (mm, 1e-3 m)
# Altitude of COG above reference ellipsoid (interpolated value)
CS_l1b_scale['Waveform_1Hz']['Alt'] = 1e-3
# Window Delay (two-way) corrected for instrument delays
CS_l1b_scale['Waveform_1Hz']['TD'] = 1e-12
# 1 Hz Averaged Power Echo Waveform
CS_l1b_scale['Waveform_1Hz']['Waveform'] = 1.0
# Echo Scale Factor (to scale echo to watts)
CS_l1b_scale['Waveform_1Hz']['Linear_Wfm_Multiplier'] = 1.0
# Echo Scale Power (a power of 2 to scale echo to Watts)
CS_l1b_scale['Waveform_1Hz']['Power2_Wfm_Multiplier'] = 1.0
# Number of echoes averaged
CS_l1b_scale['Waveform_1Hz']['N_avg_echoes'] = 1
CS_l1b_scale['Waveform_1Hz']['Flags'] = 1
# CryoSat-2 Waveforms Groups
# Beam Behavior Parameters
Beam_Behavior = {}
# Standard Deviation of Gaussian fit to range integrated stack power.
Beam_Behavior['SD'] = 1e-2
# Stack Center: Mean of Gaussian fit to range integrated stack power.
Beam_Behavior['Center'] = 1e-2
# Stack amplitude parameter scaled in dB/100.
Beam_Behavior['Amplitude'] = 1e-2
# 3rd moment: providing the degree of asymmetry of the range integrated
# stack power distribution.
Beam_Behavior['Skewness'] = 1e-2
# 4th moment: Measure of peakiness of range integrated stack power distribution.
Beam_Behavior['Kurtosis'] = 1e-2
# Standard deviation as a function of boresight angle (microradians)
Beam_Behavior['SD_boresight_angle'] = 1e-6
# Stack Center angle as a function of boresight angle (microradians)
Beam_Behavior['Center_boresight_angle'] = 1e-6
Beam_Behavior['Spare'] = 1
# CryoSat-2 waveform variables
CS_l1b_scale['Waveform_20Hz'] = {}
# Averaged Power Echo Waveform
CS_l1b_scale['Waveform_20Hz']['Waveform'] = 1.0
# Echo Scale Factor (to scale echo to watts)
CS_l1b_scale['Waveform_20Hz']['Linear_Wfm_Multiplier'] = 1.0
# Echo Scale Power (a power of 2)
CS_l1b_scale['Waveform_20Hz']['Power2_Wfm_Multiplier'] = 1.0
# Number of echoes averaged
CS_l1b_scale['Waveform_20Hz']['N_avg_echoes'] = 1
CS_l1b_scale['Waveform_20Hz']['Flags'] = 1
# Beam behaviour parameters
CS_l1b_scale['Waveform_20Hz']['Beam'] = Beam_Behavior
# Coherence [SARIN]: packed units (1/1000)
CS_l1b_scale['Waveform_20Hz']['Coherence'] = 1e-3
# Phase Difference [SARIN]: packed units (microradians)
CS_l1b_scale['Waveform_20Hz']['Phase_diff'] = 1e-6
# return the scaling factors
return CS_l1b_scale
| 65.721769
| 134
| 0.627559
| 22,464
| 153,066
| 4.018118
| 0.037838
| 0.055671
| 0.080919
| 0.062218
| 0.890974
| 0.858635
| 0.827881
| 0.795786
| 0.771789
| 0.749842
| 0
| 0.041883
| 0.220539
| 153,066
| 2,328
| 135
| 65.75
| 0.714665
| 0.221663
| 0
| 0.479181
| 0
| 0.001411
| 0.194162
| 0.031171
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008469
| false
| 0
| 0.00494
| 0
| 0.022583
| 0.007057
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
594bc5f77ca906ae0433fe140d3a32eb4d88a297
| 40
|
py
|
Python
|
Hello_World.py
|
Fang465/MB215Lab1
|
9a45c7e6fc804d541c213d1741084ba32d95b0f5
|
[
"MIT"
] | null | null | null |
Hello_World.py
|
Fang465/MB215Lab1
|
9a45c7e6fc804d541c213d1741084ba32d95b0f5
|
[
"MIT"
] | null | null | null |
Hello_World.py
|
Fang465/MB215Lab1
|
9a45c7e6fc804d541c213d1741084ba32d95b0f5
|
[
"MIT"
] | null | null | null |
print("Hello, my name is Ryan Kitamura")
| 40
| 40
| 0.75
| 7
| 40
| 4.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 40
| 1
| 40
| 40
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
3ca01de01e0660058ccb8475194422ab6c649039
| 104
|
py
|
Python
|
torchOnVideo/datasets/Vimeo90KTriplet/frame_interpolation/__init__.py
|
torchOnVideo/torchOnVideo
|
aa07d5661f772eca027ecc6b79e14bd68a515aa1
|
[
"MIT"
] | 2
|
2021-03-19T08:05:06.000Z
|
2021-05-22T21:54:10.000Z
|
torchOnVideo/datasets/Vimeo90KTriplet/frame_interpolation/__init__.py
|
torchOnVideo/torchOnVideo
|
aa07d5661f772eca027ecc6b79e14bd68a515aa1
|
[
"MIT"
] | null | null | null |
torchOnVideo/datasets/Vimeo90KTriplet/frame_interpolation/__init__.py
|
torchOnVideo/torchOnVideo
|
aa07d5661f772eca027ecc6b79e14bd68a515aa1
|
[
"MIT"
] | null | null | null |
from .train_adacof import TrainAdaCoF
from .train_cain import TrainCAIN
from .test_CAIN import TestCAIN
| 26
| 37
| 0.855769
| 15
| 104
| 5.733333
| 0.6
| 0.209302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 3
| 38
| 34.666667
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ca43b7cd44810b0c1007f18b976c480556f7152
| 84
|
py
|
Python
|
pycardano/backend/__init__.py
|
Blockery-io/pycardano
|
81749b46324346a0e3cb4808290fd565f4ed7450
|
[
"MIT"
] | 72
|
2022-01-09T03:54:06.000Z
|
2022-03-30T22:05:44.000Z
|
pycardano/backend/__init__.py
|
Blockery-io/pycardano
|
81749b46324346a0e3cb4808290fd565f4ed7450
|
[
"MIT"
] | 13
|
2022-02-19T13:08:11.000Z
|
2022-03-30T16:57:33.000Z
|
pycardano/backend/__init__.py
|
henryyuanheng-wang/pycardano
|
d58c53791ffef542762e6d0220d4ccd1c0950e5e
|
[
"MIT"
] | 15
|
2022-02-07T23:54:51.000Z
|
2022-03-30T17:06:12.000Z
|
# flake8: noqa
from .base import *
from .blockfrost import *
from .ogmios import *
| 14
| 25
| 0.714286
| 11
| 84
| 5.454545
| 0.636364
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.190476
| 84
| 5
| 26
| 16.8
| 0.867647
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ca803824d090f0628d75891cf8f7c180165f2a8
| 2,030
|
py
|
Python
|
Operations.py
|
alfredots/image-processing
|
6a66e75c3248bfb997ed951dace24e3d5a431f5f
|
[
"Apache-2.0"
] | null | null | null |
Operations.py
|
alfredots/image-processing
|
6a66e75c3248bfb997ed951dace24e3d5a431f5f
|
[
"Apache-2.0"
] | null | null | null |
Operations.py
|
alfredots/image-processing
|
6a66e75c3248bfb997ed951dace24e3d5a431f5f
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import cv2
class Operations:
@staticmethod
def binarizar(img):
rows, cols = img.shape
for i in range(rows):
for j in range(cols):
if(img[i,j] < 128):
img[i,j] = 0
else:
img[i,j] = 255
return img
@staticmethod
def _or(imgOne, imgTwo):
imgThree = imgOne.copy()
rows, cols = imgOne.shape
imgOne = Operations.binarizar(imgOne)
imgTwo = Operations.binarizar(imgTwo)
for i in range(rows):
for j in range(cols):
if(imgOne[i,j]==255 or imgTwo[i,j]==255):
imgThree[i,j] = 255
else:
imgThree[i,j] = 0
return imgThree
@staticmethod
def _and (imgOne, imgTwo):
imgThree = imgOne.copy()
rows, cols = imgOne.shape
imgOne = Operations.binarizar(imgOne)
imgTwo = Operations.binarizar(imgTwo)
for i in range(rows):
for j in range(cols):
if(imgOne[i,j]==255 and imgTwo[i,j]==imgOne[i,j]):
imgThree[i,j] = 255
else:
imgThree[i,j] = 0
return imgThree
@staticmethod
def _xor (imgOne, imgTwo):
imgThree = imgOne.copy()
rows, cols = imgOne.shape
imgOne = Operations.binarizar(imgOne)
imgTwo = Operations.binarizar(imgTwo)
for i in range(rows):
for j in range(cols):
if( imgTwo[i,j]!=imgOne[i,j]):
imgThree[i,j] = 255
else:
imgThree[i,j] = 0
return imgThree
@staticmethod
def _not (imgOne):
imgThree = imgOne.copy()
rows, cols = imgOne.shape
imgOne = Operations.binarizar(imgOne)
for i in range(rows):
for j in range(cols):
imgThree[i,j] = 255 - imgOne[i,j]
return imgThree
| 25.696203
| 66
| 0.4867
| 228
| 2,030
| 4.315789
| 0.144737
| 0.036585
| 0.04065
| 0.055894
| 0.796748
| 0.796748
| 0.796748
| 0.796748
| 0.796748
| 0.796748
| 0
| 0.026846
| 0.412808
| 2,030
| 79
| 67
| 25.696203
| 0.798658
| 0
| 0
| 0.721311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081967
| false
| 0
| 0.032787
| 0
| 0.213115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3cc3162aad3d3f093242c4bf948625131137442e
| 43
|
py
|
Python
|
packagename/core.py
|
ptrstn/python-starter
|
372e1563a84dfa8a0d5af0be52cceaf607666237
|
[
"Unlicense"
] | null | null | null |
packagename/core.py
|
ptrstn/python-starter
|
372e1563a84dfa8a0d5af0be52cceaf607666237
|
[
"Unlicense"
] | 4
|
2021-08-23T23:32:53.000Z
|
2022-01-24T10:41:02.000Z
|
packagename/core.py
|
ptrstn/python-starter
|
372e1563a84dfa8a0d5af0be52cceaf607666237
|
[
"Unlicense"
] | null | null | null |
def do_something():
return "something"
| 14.333333
| 22
| 0.697674
| 5
| 43
| 5.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 23
| 21.5
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.209302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
3ce8085886e9bf5721e34cde26a31e4f71d5e1dd
| 3,632
|
py
|
Python
|
experiments/verification_experiments/leaf_experiment_metadata_providers.py
|
M4rukku/impact_of_non_iid_data_in_federated_learning
|
c818db03699c82e42217d56f8ddd4cc2081c8bb1
|
[
"MIT"
] | null | null | null |
experiments/verification_experiments/leaf_experiment_metadata_providers.py
|
M4rukku/impact_of_non_iid_data_in_federated_learning
|
c818db03699c82e42217d56f8ddd4cc2081c8bb1
|
[
"MIT"
] | null | null | null |
experiments/verification_experiments/leaf_experiment_metadata_providers.py
|
M4rukku/impact_of_non_iid_data_in_federated_learning
|
c818db03699c82e42217d56f8ddd4cc2081c8bb1
|
[
"MIT"
] | null | null | null |
from enum import IntEnum, auto
from typing import Dict
from sources.experiments.experiment_metadata_provider_utils import FixedExperimentMetadata, ExperimentMetadataProvider
class ExperimentScale(IntEnum):
SMALL = auto()
MEDIUM = auto()
LARGE = auto()
CELEBA_SCALE_EXPERIMENT_METADATA_MAP: Dict[ExperimentScale, FixedExperimentMetadata] = {
ExperimentScale.SMALL: {
"num_clients": None,
"num_rounds": 30,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 10,
"val_steps": 2
},
ExperimentScale.MEDIUM: {
"num_clients": None,
"num_rounds": 100,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 10,
"val_steps": 2
},
ExperimentScale.LARGE: {
"num_clients": None,
"num_rounds": 400,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 20,
"val_steps": 2
}
}
celeba_small_experiment_metadata_provider = ExperimentMetadataProvider(
CELEBA_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.SMALL]
)
celeba_medium_experiment_metadata_provider = ExperimentMetadataProvider(
CELEBA_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.MEDIUM]
)
celeba_large_experiment_metadata_provider = ExperimentMetadataProvider(
CELEBA_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.LARGE]
)
FEMNIST_SCALE_EXPERIMENT_METADATA_MAP: Dict[ExperimentScale, FixedExperimentMetadata] = {
ExperimentScale.SMALL: {
"num_clients": None,
"num_rounds": 30,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 10,
"val_steps": 2
},
ExperimentScale.MEDIUM: {
"num_clients": None,
"num_rounds": 100,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 10,
"val_steps": 2
},
ExperimentScale.LARGE: {
"num_clients": None,
"num_rounds": 400,
"clients_per_round": 3,
"batch_size": 5,
"local_epochs": 20,
"val_steps": 2
}
}
femnist_small_experiment_metadata_provider = ExperimentMetadataProvider(
FEMNIST_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.SMALL]
)
femnist_medium_experiment_metadata_provider = ExperimentMetadataProvider(
FEMNIST_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.MEDIUM]
)
femnist_large_experiment_metadata_provider = ExperimentMetadataProvider(
FEMNIST_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.LARGE]
)
SHAKESPEARE_SCALE_EXPERIMENT_METADATA_MAP: Dict[ExperimentScale, FixedExperimentMetadata] = {
ExperimentScale.SMALL: {
"num_clients": None,
"num_rounds": 6,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 2,
"val_steps": 1
},
ExperimentScale.MEDIUM: {
"num_clients": None,
"num_rounds": 8,
"clients_per_round": 2,
"batch_size": 5,
"local_epochs": 2,
"val_steps": 1
},
ExperimentScale.LARGE: {
"num_clients": None,
"num_rounds": 20,
"clients_per_round": 3,
"batch_size": 5,
"local_epochs": 1,
"val_steps": 1
}
}
shakespeare_small_experiment_metadata_provider = ExperimentMetadataProvider(
SHAKESPEARE_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.SMALL]
)
shakespeare_medium_experiment_metadata_provider = ExperimentMetadataProvider(
SHAKESPEARE_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.MEDIUM]
)
shakespeare_large_experiment_metadata_provider = ExperimentMetadataProvider(
SHAKESPEARE_SCALE_EXPERIMENT_METADATA_MAP[ExperimentScale.LARGE]
)
| 28.375
| 118
| 0.688877
| 358
| 3,632
| 6.569832
| 0.134078
| 0.168367
| 0.117347
| 0.132653
| 0.875
| 0.844388
| 0.82398
| 0.799745
| 0.799745
| 0.755952
| 0
| 0.021816
| 0.217511
| 3,632
| 128
| 119
| 28.375
| 0.805771
| 0
| 0
| 0.526786
| 0
| 0
| 0.170933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026786
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3cec6e4163724eb27c47f2cce4fabae2b578c3ca
| 30
|
py
|
Python
|
dags/odoodw/utils/__init__.py
|
kakkurij/odoodw-test
|
9f50b73b2d4107c2bfde9f4aebf51e7e70f251ab
|
[
"MIT"
] | null | null | null |
dags/odoodw/utils/__init__.py
|
kakkurij/odoodw-test
|
9f50b73b2d4107c2bfde9f4aebf51e7e70f251ab
|
[
"MIT"
] | null | null | null |
dags/odoodw/utils/__init__.py
|
kakkurij/odoodw-test
|
9f50b73b2d4107c2bfde9f4aebf51e7e70f251ab
|
[
"MIT"
] | null | null | null |
from .dw_setup import DWSetup
| 15
| 29
| 0.833333
| 5
| 30
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a72447b1785d7014243f0659a7bd0476f7a368c8
| 38
|
py
|
Python
|
jsdb/__init__.py
|
talwrii/jsdb
|
a1134856326bee8625c4a893d595113506597b44
|
[
"BSD-2-Clause"
] | 2
|
2020-08-28T19:15:11.000Z
|
2020-09-05T01:49:25.000Z
|
jsdb/__init__.py
|
talwrii/jsdb
|
a1134856326bee8625c4a893d595113506597b44
|
[
"BSD-2-Clause"
] | null | null | null |
jsdb/__init__.py
|
talwrii/jsdb
|
a1134856326bee8625c4a893d595113506597b44
|
[
"BSD-2-Clause"
] | null | null | null |
from .jsdb import Jsdb, DbClosedError
| 19
| 37
| 0.815789
| 5
| 38
| 6.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59c135a3a043b7cd21b68b6f96aa58903615e78d
| 37
|
py
|
Python
|
packtets/utils/__init__.py
|
maxhutch/packtets
|
eba7d3d354da9bef50bfdbc48e6934c4e17f165c
|
[
"MIT"
] | 1
|
2017-12-13T18:24:14.000Z
|
2017-12-13T18:24:14.000Z
|
packtets/utils/__init__.py
|
maxhutch/packtets
|
eba7d3d354da9bef50bfdbc48e6934c4e17f165c
|
[
"MIT"
] | 4
|
2016-05-19T14:48:57.000Z
|
2016-05-19T19:30:54.000Z
|
packtets/utils/__init__.py
|
maxhutch/packtets
|
eba7d3d354da9bef50bfdbc48e6934c4e17f165c
|
[
"MIT"
] | null | null | null |
from .io import *
from .vis import *
| 12.333333
| 18
| 0.675676
| 6
| 37
| 4.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 37
| 2
| 19
| 18.5
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59c76084bdc5f9ff51731e4d8519ac275b4767f0
| 2,263
|
py
|
Python
|
src_main/devtools/bin/separate_vcprojs.py
|
ArcadiusGFN/SourceEngine2007
|
51cd6d4f0f9ed901cb9b61456eb621a50ce44f55
|
[
"bzip2-1.0.6"
] | 25
|
2018-02-28T15:04:42.000Z
|
2021-08-16T03:49:00.000Z
|
tf2_src/devtools/bin/separate_vcprojs.py
|
Counter2828/TeamFortress2
|
1b81dded673d49adebf4d0958e52236ecc28a956
|
[
"MIT"
] | 1
|
2019-09-20T11:06:03.000Z
|
2019-09-20T11:06:03.000Z
|
tf2_src/devtools/bin/separate_vcprojs.py
|
Counter2828/TeamFortress2
|
1b81dded673d49adebf4d0958e52236ecc28a956
|
[
"MIT"
] | 9
|
2019-07-31T11:58:20.000Z
|
2021-08-31T11:18:15.000Z
|
from vsdotnetxmlparser import *
#print f.GetAttribute( 'VisualStudioProject\\Configurations\\Configuration\\<2>Tool\\CommandLine' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug DoD|Win32", "Release DoD|Win32"], 'cl_dll\\client_dod.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug CounterStrike|Win32", "Release CounterStrike|Win32"], 'cl_dll\\client_cs.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug HL1|Win32", "Release HL1|Win32"], 'cl_dll\\client_hl1.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug HL2|Win32", "Release HL2|Win32"], 'cl_dll\\client_hl2.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug TF2|Win32", "Release TF2|Win32"], 'cl_dll\\client_tf2.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug SDK|Win32", "Release SDK|Win32"], 'cl_dll\\client_temp_sdk.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug HL2MP|Win32", "Release HL2MP|Win32"], 'cl_dll\\client_hl2mp.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'cl_dll\\client.vcproj' ), ["Debug Episodic HL2|Win32", "Release Episodic HL2|Win32"], 'cl_dll\\client_episodic.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug DoD|Win32", "Release DoD|Win32"], 'dlls\\hl_dod.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug CounterStrike|Win32", "Release CounterStrike|Win32"], 'dlls\\hl_cs.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug HL1|Win32", "Release HL1|Win32"], 'dlls\\hl_hl1.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug HL2|Win32", "Release HL2|Win32"], 'dlls\\hl_hl2.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug TF2|Win32", "Release TF2|Win32"], 'dlls\\hl_tf2.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug SDK|Win32", "Release SDK|Win32"], 'dlls\\hl_temp_sdk.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug HL2MP|Win32", "Release HL2MP|Win32"], 'dlls\\hl_hl2mp.vcproj' )
WriteSeparateVCProj( LoadVCProj( 'dlls\\hl.vcproj' ), ["Debug Episodic HL2|Win32", "Release Episodic HL2|Win32"], 'dlls\\hl_episodic.vcproj' )
| 80.821429
| 154
| 0.735749
| 274
| 2,263
| 5.952555
| 0.120438
| 0.284488
| 0.107909
| 0.166769
| 0.827713
| 0.805641
| 0.805641
| 0.299203
| 0.061312
| 0
| 0
| 0.044669
| 0.079982
| 2,263
| 27
| 155
| 83.814815
| 0.738713
| 0.043305
| 0
| 0
| 0
| 0
| 0.582138
| 0.207311
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
59cc84ff386eccd17323c87cdb81cdc0fdefe19a
| 22,479
|
py
|
Python
|
scripts/ida_is_subvtable.py
|
RUB-SysSec/Marx
|
4e9dbf1e5cfaab12a6544521032ef44bd8c9dbb1
|
[
"MIT"
] | 118
|
2016-12-24T00:00:07.000Z
|
2022-03-19T06:19:49.000Z
|
scripts/ida_is_subvtable.py
|
RUB-SysSec/Marx
|
4e9dbf1e5cfaab12a6544521032ef44bd8c9dbb1
|
[
"MIT"
] | 1
|
2021-09-17T22:01:19.000Z
|
2021-09-18T16:56:38.000Z
|
scripts/ida_is_subvtable.py
|
RUB-SysSec/Marx
|
4e9dbf1e5cfaab12a6544521032ef44bd8c9dbb1
|
[
"MIT"
] | 14
|
2016-12-24T04:34:43.000Z
|
2021-12-19T16:04:45.000Z
|
#!/usr/bin/python2
from idc import *
from idaapi import *
from idautils import *
'''
Checks if the vtable is a possible sub-vtable.
'''
vtables = [ 0x1c74170, 0x1c74150, 0x1c74d90, 0x1c74d50, 0x15bae30, 0x15bae50, 0x15bae70, 0x15bae90, 0x15baeb0, 0x15e3650, 0x15e3690, 0x15e35d0, 0x15e3610, 0x15e48d0, 0x15e3770, 0x15e37b0, 0x15e3870, 0x15e38b0, 0x15e3a50, 0x15e3a10, 0x15e4ad0, 0x15e4b30, 0x15e4a70, 0x15e4c50, 0x15e4b90, 0x15e4cb0, 0x15e4690, 0x15e4bf0, 0x15e4d10, 0x15e4a10, 0x15e46f0, 0x15e50b0, 0x15e4d70, 0x15e5030, 0x15e4eb0, 0x15e4770, 0x15e4df0, 0x15e4f10, 0x15e4fd0, 0x15e4f70, 0x15e4e50, 0x15e4910, 0x15e4950, 0x15e4990, 0x15e49d0, 0x1d1c3d0, 0x1d1c430, 0x1d17bf0, 0x1d1c490, 0x1d1c4f0, 0x1d1c590, 0x1d1c5d0, 0x1d1c8b0, 0x1c741f0, 0x1d14910, 0x1d1c8f0, 0x1d1ca50, 0x1d1ca90, 0x1d1cad0, 0x1d1c9d0, 0x1d1ca10, 0x1d1ceb0, 0x1d1cef0, 0x1d1cf30, 0x1d1d0b0, 0x1d1d0f0, 0x1d1d130, 0x1d1d070, 0x1d1e790, 0x1d1e7b0, 0x1d1e7d0, 0x1d345b0, 0x1d34570, 0x1d14410, 0x1d14450, 0x1d15270, 0x1d152b0, 0x1d1adf0, 0x1d155b0, 0x1d155f0, 0x1d190b0, 0x1d15b50, 0x1d18710, 0x1d15b70, 0x1d29c30, 0x1d29b30, 0x1d29c70, 0x1d29b70, 0x1d29bb0, 0x1d29ab0, 0x1d29bf0, 0x1d29af0, 0x1d306b0, 0x1d306f0, 0x1d30770, 0x1d30730, 0x1d30630, 0x1d30670, 0x1d30270, 0x1d30290, 0x1d346f0, 0x1d34990, 0x1d35050, 0x1d35170, 0x1d34ad0, 0x1d350b0, 0x1d34ff0, 0x1d35110, 0x1d1e750, 0x1d355f0, 0x1d35630, 0x1d3ccf0, 0x1d3cd10, 0x1c749d0, 0x1c74830, 0x1c74b70, 0x1c76690, 0x1c7a150, 0x1c77590, 0x1c7c310, 0x1c7ae10, 0x1c76090, 0x1c76390, 0x1c784b0, 0x1c7bd10, 0x1c7a490, 0x1c7b110, 0x1c7a7d0, 0x1c77290, 0x1c7ab10, 0x1c787f0, 0x1c78190, 0x1c76c90, 0x1c7ba10, 0x1c78b30, 0x1c79b30, 0x1c78e70, 0x1c7c910, 0x1c76f90, 0x1c791b0, 0x1c7b710, 0x1c75d90, 0x1c7c610, 0x1c794f0, 0x1c77b90, 0x1c77e90, 0x1c7b410, 0x1c75a90, 0x1c79810, 0x1c77890, 0x1c76990, 0x1c79e50, 0x1c7c010, 0x1d14330, 0x1c74250, 0x1d14370, 0x1d14490, 0x1d1c390, 0x1d144d0, 0x1d3b470, 0x1d36c90, 0x1d36cd0, 0x1d3ae10, 0x1d3ae50, 0x1d3add0, 0x1d3afd0, 0x1d3afb0, 0x15b54d0, 0x15b5590, 0x15b5550, 0x15b5510, 0x15caad0, 0x15cab10, 0x15cab50, 0x15caa90, 0x15d4730, 0x15d46f0, 0x15a8d70, 0x15d4e90, 0x15f80f0, 0x15f9030, 0x15ed5b0, 0x15f4590, 0x15e3130, 0x15ec770, 0x15f5f30, 0x15eb070, 0x15e66f0, 0x15f4050, 0x15f4b50, 0x15fa370, 0x15eb190, 0x15ef070, 0x15f04d0, 0x15eab30, 0x15f8270, 0x15f2b10, 0x15efa10, 0x15f4f50, 0x15e5f90, 0x15ed550, 0x15f9e30, 0x15f2c30, 0x15ebe70, 0x15eb130, 0x15e7b50, 0x15e8f90, 0x15ec7d0, 0x15fa5b0, 0x15ef6f0, 0x15f4850, 0x15ef3d0, 0x15ec710, 0x15f46f0, 0x15ed4f0, 0x15f3e70, 0x15ec830, 0x15f4cb0, 0x15f43d0, 0x15ed610, 0x15ea730, 0x15ea2f0, 0x15e9eb0, 0x15f3710, 0x15f3950, 0x15eb0d0, 0x15e9a70, 0x15f32d0, 0x15e8410, 0x15f2e90, 0x15f8190, 0x15efd30, 0x15ee810, 0x15ebdb0, 0x15fa0b0, 0x15ee530, 0x15f7d90, 0x15f3a70, 0x15e6cd0, 0x15ed870, 0x15f1970, 0x15eecb0, 0x15f9f70, 0x15f3db0, 0x15f6850, 0x15ee210, 0x15f9a70, 0x15ee990, 0x15f91f0, 0x15e89b0, 0x15f9710, 0x15edef0, 0x15f3430, 0x15e57f0, 0x15f4210, 0x15edbd0, 0x15f9ed0, 0x15f9430, 0x15f8950, 0x15e5a90, 0x15e7370, 0x15fa430, 0x15f8bb0, 0x15f49f0, 0x15e5270, 0x15ebe10, 0x15e9550, 0x15f54b0, 0x15f0ad0, 0x15ebd50, 0x15e5d30, 0x15f30b0, 0x15f7eb0, 0x15f0d50, 0x15fa010, 0x15f1370, 0x15f3e10, 0x15e5550, 0x15e32d0, 0x15e31d0, 0x15e3250, 0x15e3210, 0x15e30b0, 0x15e3310, 0x15e3350, 0x15e3290, 0x15e3190, 0x1d29930, 0x1d19ef0, 0x15e3410, 0x15e3450, 0x15e3df0, 0x15e3d30, 0x15e3d90, 0x15e3cd0, 0x15fa210, 0x15fa250, 0x15fa290, 0x1d3c990, 0x1d3c9f0, 0x1d3c930, 0x1d3c650, 0x1d3ca50, 0x1d3be10, 0x1d3c190, 0x1d3be50, 0x1d3bf50, 0x1d3bf10, 0x1d3c250, 0x1d3be90, 0x1d3bed0, 0x1d3c210, 0x1d3c350, 0x1d3c390, 0x1d3bb10, 0x1d3c290, 0x1d3c3d0, 0x1d3c2d0, 0x1d3c150, 0x1d3c1d0, 0x1d3c310, 0x1d3bff0, 0x1d3c7b0, 0x1d3c8d0, 0x1d3c6f0, 0x1d3c810, 0x1d3c5f0, 0x1d3c750, 0x1d3c870, 0x1d3c030, 0x1d3c090, 0x1d3bf90, 0x1d3c0f0, 0x1d3bab0, 0x1d3caf0, 0x1d3c6b0, 0x1d3cab0, 0x1d3cdf0, 0x1d3ce30, 0x1d3cd30, 0x1d3cd70, 0x1d3ce70, 0x1d3cdb0, 0x1d3ccb0, 0x1d3b9b0, 0x1d3d790, 0x1d3d130, 0x1d3d8b0, 0x1d3d690, 0x1d3d590, 0x1d3d7f0, 0x1d3d910, 0x1d3d610, 0x1d3d510, 0x1d3d0d0, 0x1d3d850, 0x1d3d010, 0x1d3d9d0, 0x1d3da50, 0x1d3d3f0, 0x1d3d2f0, 0x1d3db30, 0x1d3d1f0, 0x1d1cdd0, 0x1c74eb0, 0x1d1ce30, 0x1c74ef0, 0x1c8edb0, 0x1c8ecf0, 0x1c8ec90, 0x1c8edf0, 0x1c8ee30, 0x1c8ee70, 0x1c74290, 0x1c8ec30, 0x1c8ed50, 0x1c96030, 0x1c95fb0, 0x1c96330, 0x1c96230, 0x1c96130, 0x1c962b0, 0x1c961b0, 0x1c960b0, 0x1c8eaf0, 0x1c96470, 0x1c96410, 0x1c96530, 0x1c8ebd0, 0x1c96590, 0x1c963b0, 0x1c964d0, 0x1c8eb70, 0x1ca2ab0, 0x1ca43b0, 0x1ca57f0, 0x1ca26f0, 0x1ca3b30, 0x1ca2af0, 0x1ca22b0, 0x1ca36f0, 0x1ca41f0, 0x1ca32b0, 0x1ca1a30, 0x1ca2e70, 0x1ca38b0, 0x1ca42b0, 0x1ca2a30, 0x1ca2b30, 0x1ca2330, 0x1ca52b0, 0x1ca4370, 0x1ca3a30, 0x1ca4e70, 0x1ca35f0, 0x1ca21b0, 0x1ca4a30, 0x1ca1d70, 0x1ca31b0, 0x1ca45f0, 0x1ca1930, 0x1ca5a30, 0x1ca2d70, 0x1ca3b70, 0x1ca41b0, 0x1ca14f0, 0x1ca55f0, 0x1ca2930, 0x1ca3d70, 0x1ca51b0, 0x1ca24f0, 0x1ca3930, 0x1ca4d70, 0x1ca20b0, 0x1ca34f0, 0x1ca4930, 0x1ca25f0, 0x1ca1c70, 0x1ca4bb0, 0x1ca30b0, 0x1ca2630, 0x1ca44f0, 0x1ca1830, 0x1ca5930, 0x1ca2c70, 0x1ca40b0, 0x1ca26b0, 0x1ca13f0, 0x1ca54f0, 0x1ca2830, 0x1ca3770, 0x1ca3c70, 0x1ca50b0, 0x1ca23f0, 0x1ca3830, 0x1ca4770, 0x1ca4c70, 0x1ca1fb0, 0x1ca47b0, 0x1ca33f0, 0x1ca47f0, 0x1ca36b0, 0x1ca4830, 0x1ca2170, 0x1ca4fb0, 0x1ca4f70, 0x1ca3730, 0x1ca4b70, 0x1ca1eb0, 0x1ca56f0, 0x1ca32f0, 0x1ca4730, 0x1ca29f0, 0x1ca1a70, 0x1ca2eb0, 0x1ca42f0, 0x1ca1630, 0x1ca5730, 0x1ca2a70, 0x1ca3eb0, 0x1ca11f0, 0x1ca52f0, 0x1ca2730, 0x1ca21f0, 0x1ca3630, 0x1ca5030, 0x1ca2b70, 0x1ca1db0, 0x1ca2bb0, 0x1ca31f0, 0x1ca4630, 0x1ca4bf0, 0x1ca1970, 0x1ca5770, 0x1ca2db0, 0x1ca4c30, 0x1ca1530, 0x1ca5630, 0x1ca2970, 0x1ca3db0, 0x1ca2770, 0x1ca51f0, 0x1ca2530, 0x1ca3970, 0x1ca4db0, 0x1ca1230, 0x1ca20f0, 0x1ca3530, 0x1ca4970, 0x1ca1cb0, 0x1ca30f0, 0x1ca4530, 0x1ca1870, 0x1ca2cb0, 0x1ca40f0, 0x1ca1430, 0x1ca5530, 0x1ca2870, 0x1ca3cb0, 0x1ca50f0, 0x1ca2430, 0x1ca3870, 0x1ca2ef0, 0x1ca1ff0, 0x1ca2f30, 0x1ca3430, 0x1ca4870, 0x1ca1bb0, 0x1ca2ff0, 0x1ca2fb0, 0x1ca4430, 0x1ca5870, 0x1ca4ff0, 0x1ca5830, 0x1ca3330, 0x1ca1170, 0x1ca22f0, 0x1ca11b0, 0x1ca4330, 0x1ca1670, 0x1ca3ef0, 0x1ca5330, 0x1ca2670, 0x1ca3ab0, 0x1ca4ef0, 0x1ca12b0, 0x1ca2230, 0x1ca3670, 0x1ca12f0, 0x1ca4ab0, 0x1ca1330, 0x1ca3230, 0x1ca4670, 0x1ca3370, 0x1ca19b0, 0x1ca2df0, 0x1ca1df0, 0x1ca33b0, 0x1ca4230, 0x1ca1570, 0x1ca5670, 0x1ca53f0, 0x1ca29b0, 0x1ca1130, 0x1ca5430, 0x1ca5230, 0x1ca2570, 0x1ca39b0, 0x1ca1370, 0x1ca2130, 0x1ca49b0, 0x1ca1cf0, 0x1ca3130, 0x1ca4570, 0x1ca18b0, 0x1ca3570, 0x1ca59b0, 0x1ca2cf0, 0x1ca4130, 0x1ca35b0, 0x1ca2f70, 0x1ca1470, 0x1ca5570, 0x1ca28b0, 0x1ca15f0, 0x1ca3cf0, 0x1ca4eb0, 0x1ca53b0, 0x1ca5130, 0x1ca2470, 0x1ca4cf0, 0x1ca1e70, 0x1ca2030, 0x1ca16b0, 0x1ca3470, 0x1ca48b0, 0x1ca16f0, 0x1ca3df0, 0x1ca1bf0, 0x1ca3030, 0x1ca1730, 0x1ca4470, 0x1ca17b0, 0x1ca58b0, 0x1ca1770, 0x1ca2bf0, 0x1ca43f0, 0x1ca4030, 0x1ca37b0, 0x1ca5470, 0x1ca27b0, 0x1ca2370, 0x1ca4cb0, 0x1ca57b0, 0x1ca5970, 0x1ca4df0, 0x1ca3f30, 0x1ca1270, 0x1ca5370, 0x1ca19f0, 0x1ca3af0, 0x1ca4f30, 0x1ca2270, 0x1ca4af0, 0x1ca1e30, 0x1ca1ab0, 0x1ca3270, 0x1ca46b0, 0x1ca1af0, 0x1ca2e30, 0x1ca1b30, 0x1ca4270, 0x1ca15b0, 0x1ca56b0, 0x1ca1c30, 0x1ca1b70, 0x1ca3e30, 0x1ca3bb0, 0x1ca25b0, 0x1ca3bf0, 0x1ca39f0, 0x1ca4e30, 0x1ca49f0, 0x1ca4a70, 0x1ca4b30, 0x1ca1d30, 0x1ca1f70, 0x1ca3170, 0x1ca45b0, 0x1ca18f0, 0x1ca59f0, 0x1ca2d30, 0x1ca3d30, 0x1ca4170, 0x1ca5270, 0x1ca14b0, 0x1ca55b0, 0x1ca28f0, 0x1ca5170, 0x1ca24b0, 0x1ca38f0, 0x1ca4d30, 0x1ca2070, 0x1ca34b0, 0x1ca46f0, 0x1ca48f0, 0x1ca3e70, 0x1ca3a70, 0x1ca3070, 0x1ca44b0, 0x1ca17f0, 0x1ca58f0, 0x1ca1ef0, 0x1ca0190, 0x1ca2c30, 0x1ca4070, 0x1ca1f30, 0x1ca13b0, 0x1ca54b0, 0x1ca27f0, 0x1ca3f70, 0x1ca3c30, 0x1ca5070, 0x1ca3fb0, 0x1ca23b0, 0x1ca37f0, 0x1ca3ff0, 0x1d15770, 0x1ca6630, 0x1d156b0, 0x1d157d0, 0x1d15710, 0x1ca65f0, 0x1cbc610, 0x1d13ed0, 0x1d13ff0, 0x1cd0890, 0x1d13f30, 0x1cc32b0, 0x1d14050, 0x1cd08f0, 0x1c75970, 0x1cd0950, 0x1cd09b0, 0x1c759d0, 0x1c75a30, 0x1cc3310, 0x1d13f90, 0x1cce790, 0x1cc37b0, 0x1cc38b0, 0x1cc3770, 0x1cc3830, 0x1cc3730, 0x1ccffb0, 0x1cc3870, 0x1cd0510, 0x1d17310, 0x1cc76b0, 0x1d17350, 0x1cc3270, 0x1ccf330, 0x1ccaa30, 0x1ccde70, 0x1ccddb0, 0x1ccddf0, 0x1ccde30, 0x1cc33d0, 0x1cc3450, 0x1cc3410, 0x1cce3f8, 0x1cce438, 0x1cce4a8, 0x1cce4f0, 0x1cce518, 0x1cce558, 0x1cce5c8, 0x1cc3370, 0x1cce470, 0x1cce590, 0x1cc3390, 0x1cc38f0, 0x1cce3d0, 0x1d141b0, 0x1d14190, 0x1d14870, 0x1d30c50, 0x1d148b0, 0x1d30c90, 0x1d14830, 0x1d1b1b0, 0x1d19710, 0x1d1b630, 0x1d1b930, 0x1d1b0b0, 0x1d1c0b0, 0x1d1b830, 0x1d1afb0, 0x1d1bfb0, 0x1d15630, 0x1d1b730, 0x1d1aeb0, 0x1d1beb0, 0x1d1bdb0, 0x1d1b530, 0x1d1bcb0, 0x1d1b430, 0x1d1bbb0, 0x1d1b330, 0x1d1bab0, 0x1d1b230, 0x1d1b9b0, 0x1d1b130, 0x1d1c130, 0x1d1b8b0, 0x1d1b030, 0x1d1c030, 0x1d1b7b0, 0x1d1af30, 0x1d1bf30, 0x1d1b6b0, 0x1d1ae30, 0x1d1be30, 0x1d1b5b0, 0x1d1bd30, 0x1d1b4b0, 0x1d1bc30, 0x1d1b3b0, 0x1d1bb30, 0x1d1b2b0, 0x1d1ba30, 0x1d1c2d0, 0x1d1c830, 0x1d1c310, 0x1d1d030, 0x1d15950, 0x1d15990, 0x1d15910, 0x1d14270, 0x15e30f0, 0x1d361f0, 0x1d159d0, 0x1d142b0, 0x1c7e350, 0x1c7e3d0, 0x1c7e2d0, 0x1cee050, 0x1cee230, 0x1cee190, 0x1cebd70, 0x1cee0f0, 0x1d19bb0, 0x1d19bf0, 0x1d19c30, 0x1cec030, 0x1cebff0, 0x1c747f0, 0x1d17e10, 0x1d17e50, 0x1d17e90, 0x1c74e30, 0x1d17ef0, 0x1c74df0, 0x1d18690, 0x1d186d0, 0x1d152f0, 0x1d151f0, 0x1d15350, 0x1d154c8, 0x1d15410, 0x1d153b0, 0x1d15470, 0x1d3cf30, 0x1d3cb50, 0x1d3ba30, 0x1d3cfb0, 0x1d3cf70, 0x1d20910, 0x1d0c1f0, 0x1cb6470, 0x1d30d10, 0x1cf1010, 0x1c86070, 0x1d204d0, 0x1d02090, 0x1d286d0, 0x1cbc0b0, 0x1c800d0, 0x1d22b30, 0x1cfddf0, 0x1d09950, 0x1cda130, 0x1d04990, 0x1d21090, 0x1cea170, 0x1c85bb0, 0x1cde3d0, 0x1d31490, 0x1d1f810, 0x1c90850, 0x1d20c50, 0x1d28e50, 0x1cb4050, 0x1cdb5b0, 0x1d20810, 0x1d20550, 0x1cef5b0, 0x1cd5730, 0x1d203d0, 0x1cf8bf0, 0x1cfa070, 0x1c88ff0, 0x1cd42b0, 0x1d06330, 0x1d22ff0, 0x1d34310, 0x1ce35d0, 0x1c81df0, 0x1cc9ff0, 0x1cec070, 0x1ce6910, 0x1c921f0, 0x1cbdad0, 0x1cf56b0, 0x1d20f90, 0x1cb97b0, 0x1c9e2d0, 0x1d25130, 0x1d20b50, 0x1cd70d0, 0x1c843f0, 0x1c95a70, 0x1cc1350, 0x1d34490, 0x1ccc410, 0x1cfa590, 0x1d20710, 0x1caca10, 0x1c85230, 0x1d07cd0, 0x1d11530, 0x1c7e450, 0x1cd7610, 0x1d202d0, 0x1cdab70, 0x1d10490, 0x1ce1710, 0x1c95550, 0x1cc6230, 0x1ce82b0, 0x1d0a330, 0x1c93b90, 0x1cbf470, 0x1c7de10, 0x1ca70d0, 0x1c97a70, 0x1cb0290, 0x1d212d0, 0x1d34390, 0x1c9fc70, 0x1ce4530, 0x1c8c550, 0x1c88670, 0x1d20e90, 0x1d0db90, 0x1d20290, 0x1cec570, 0x1c80588, 0x1caa4d0, 0x1cb4590, 0x1cfbf30, 0x1d20e50, 0x1d31950, 0x1d02ad0, 0x1c945d0, 0x1ca75f0, 0x1cdc510, 0x1d20610, 0x1ccbef0, 0x1ccfff0, 0x1cbf9d0, 0x1ce9c50, 0x1d0bcd0, 0x1d1ed90, 0x1cb5f50, 0x1ce2670, 0x1ca01d0, 0x1c99410, 0x1cac4d0, 0x1ca6690, 0x1cf05d0, 0x1cf86d0, 0x1cd3870, 0x1c848b0, 0x1ca06f0, 0x1d211d0, 0x1caaa30, 0x1cf4290, 0x1c93150, 0x1d293d0, 0x1d08710, 0x1d341d0, 0x1cfd8d0, 0x1d04470, 0x1c754b0, 0x1ca9f70, 0x1cc6750, 0x1d20d90, 0x1cddeb0, 0x1ccd890, 0x1cc8130, 0x1d28d50, 0x1c8a7b8, 0x1c90330, 0x1d242f0, 0x1ceb5f0, 0x1d0d670, 0x1ce2150, 0x1d0ff70, 0x1cb78f0, 0x1d20510, 0x1d335d0, 0x1d34350, 0x1cc7c10, 0x1d20cd0, 0x1cc1870, 0x1cc08d0, 0x1d210d0, 0x1cae8f0, 0x1d34190, 0x1d0cc30, 0x1ccc930, 0x1cc9ad0, 0x1cc4370, 0x1ce63f0, 0x1d20c90, 0x1cbd5b0, 0x1d28e90, 0x1cf5190, 0x1ced150, 0x1cfc970, 0x1cb9290, 0x1cae3d0, 0x1c8acf0, 0x1d344d0, 0x1c83f30, 0x1d20850, 0x1d129b0, 0x1d21c90, 0x1cd5c50, 0x1d34b30, 0x1cd6bb0, 0x1d20410, 0x1d26950, 0x1cc0e30, 0x1cbea30, 0x1cedb50, 0x1d00c10, 0x1ce4a50, 0x1cba710, 0x1d077b0, 0x1d11010, 0x1cda650, 0x1d32c50, 0x1c87370, 0x1ca8ad0, 0x1ce7d90, 0x1cbef50, 0x1d181d0, 0x1d20fd0, 0x1cf0af0, 0x1cbac30, 0x1cafd70, 0x1caba50, 0x1d20b90, 0x1ce2b90, 0x1cbb150, 0x1d20750, 0x1ce11f0, 0x1d025b0, 0x1c80f50, 0x1c7f750, 0x1d20310, 0x1d234b0, 0x1ca0c10, 0x1d26eb0, 0x1cdbff0, 0x1ccb9d0, 0x1d21150, 0x1c835b0, 0x1cabf70, 0x1ce9730, 0x1ce9210, 0x1d0b7b0, 0x1cc6c70, 0x1c7f290, 0x1d21310, 0x1cb1710, 0x1d34210, 0x1cd8cb0, 0x1d255f0, 0x1cb0cd0, 0x1ce87d0, 0x1d20ed0, 0x1d11f70, 0x1cd4cf0, 0x1ccecf0, 0x1cf6d30, 0x1d20a90, 0x1d28d90, 0x1cfd3b0, 0x1d28c90, 0x1d03f50, 0x1cff790, 0x1d0ad70, 0x1c869f0, 0x1d322d0, 0x1d20650, 0x1ce8cf0, 0x1c9adb0, 0x1cdd990, 0x1ccd370, 0x1d1edd0, 0x1c8fe10, 0x1cb73d0, 0x1cdee10, 0x1ce07b0, 0x1c9a890, 0x1cb30b0, 0x1cf1a50, 0x1ce4f70, 0x1c8b6b0, 0x1d26e60, 0x1d21210, 0x1d342d0, 0x1cfba10, 0x1ceeb70, 0x1c805d0, 0x1c89e30, 0x1cff270, 0x1c8eeb0, 0x1cf81b0, 0x1d22150, 0x1cc03b0, 0x1d12ed0, 0x1d20dd0, 0x1cce7d0, 0x1d058f0, 0x1d31e10, 0x1c82c30, 0x1c8cf10, 0x1cd8790, 0x1c7e910, 0x1d20990, 0x1cdf330, 0x1d28b90, 0x1cacf50, 0x1d30d90, 0x1cc95b0, 0x1cc3e50, 0x1ce5ed0, 0x1c917b0, 0x1cbd090, 0x1cd0f70, 0x1d362d0, 0x1d0eaf0, 0x1cf4c70, 0x1caaf90, 0x1cb8d70, 0x1cadeb0, 0x1cf33f0, 0x1d0f010, 0x1cd6690, 0x1c822b0, 0x1c95030, 0x1d22670, 0x1d006f0, 0x1d21110, 0x1d10af0, 0x1cef090, 0x1d26430, 0x1ce0cd0, 0x1ceb0d0, 0x1d28ed0, 0x1c9b2d0, 0x1cc57f0, 0x1cf9110, 0x1d20a50, 0x1c97030, 0x1d20890, 0x1d01130, 0x1d19150, 0x1cd3d90, 0x1c98ef0, 0x1d20450, 0x1d28c50, 0x1cc22b0, 0x1cfb4f0, 0x1cd91d0, 0x1cdf850, 0x1cb11f0, 0x1d28210, 0x1d30e50, 0x1ca6bb0, 0x1d08c30, 0x1d12490, 0x1cd5210, 0x1cc4db0, 0x1d34250, 0x1c9f230, 0x1cdbad0, 0x1cf7250, 0x1cfe310, 0x1ce30b0, 0x1d21010, 0x1cc7190, 0x1d0b290, 0x1cb5510, 0x1d07290, 0x1c989d0, 0x1d20bd0, 0x1cc52d0, 0x1d28dd0, 0x1c856f0, 0x1d30fd0, 0x1d20790, 0x1c8c070, 0x1cd3350, 0x1cfce90, 0x1cc4890, 0x1d343d0, 0x1d03a30, 0x1d25ab0, 0x1d20350, 0x1cdd470, 0x1c8d3d0, 0x1ccce50, 0x1cc76f0, 0x1ce4010, 0x1c82770, 0x1c88b30, 0x1ceabb0, 0x1d34410, 0x1ccf410, 0x1cb6eb0, 0x1c9a370, 0x1cb2b90, 0x1cf1530, 0x1d20f10, 0x1cb7e10, 0x1cd1490, 0x1d19f30, 0x1cd47d0, 0x1d27890, 0x1c894b0, 0x1d20ad0, 0x1cf7c90, 0x1d28cd0, 0x1c9de10, 0x1cbfe90, 0x1cfe830, 0x1cb2150, 0x1cecbb0, 0x1d053d0, 0x1d03510, 0x1d20690, 0x1cd2e30, 0x1ca9530, 0x1cd8270, 0x1d23970, 0x1c97550, 0x1cc9090, 0x1cc3930, 0x1d20250, 0x1c91290, 0x1ce3af0, 0x1cbcb70, 0x1d0e5d0, 0x1cf4750, 0x1c8f8f0, 0x1cb8850, 0x1cad990, 0x1cb35d0, 0x1d21250, 0x1d09e70, 0x1cefff0, 0x1cd6170, 0x1c94b10, 0x1ced650, 0x1d20e10, 0x1cf9630, 0x1cd2910, 0x1ca9010, 0x1c93670, 0x1d001d0, 0x1d06d70, 0x1c881b0, 0x1d209d0, 0x1d33a90, 0x1d28bd0, 0x1cd9c10, 0x1d30dd0, 0x1c8f3d0, 0x1ce7350, 0x1c92c30, 0x1cbe510, 0x1d20590, 0x1ccf930, 0x1c8a810, 0x1cf60f0, 0x1cba1f0, 0x1c96b10, 0x1caf330, 0x1c9ed10, 0x1c9f750, 0x1d1e8d0, 0x1d34290, 0x1cc1d90, 0x1cfafd0, 0x1c8ca30, 0x1d01b70, 0x1d20d10, 0x1caf850, 0x1d28f10, 0x1ce7870, 0x1cb4ff0, 0x1d208d0, 0x1cbbb90, 0x1c984b0, 0x1d30cd0, 0x1c818d0, 0x1cf2ed0, 0x1d247b0, 0x1d20490, 0x1cf3910, 0x1d09430, 0x1c99930, 0x1c87830, 0x1d33110, 0x1cbf980, 0x1ca8030, 0x1d13910, 0x1cd19b0, 0x1cdcf50, 0x1d0d150, 0x1d21050, 0x1cea690, 0x1d0c710, 0x1cb5a30, 0x1cb6990, 0x1d24c70, 0x1cf7770, 0x1d20c10, 0x1d28e10, 0x1c99e50, 0x1cb2670, 0x1d27370, 0x1c83a70, 0x1c81410, 0x1d207d0, 0x1cefad0, 0x1d1e250, 0x1d20950, 0x1d20390, 0x1cb3b10, 0x1ca8590, 0x1cc8650, 0x1d04eb0, 0x1d217d0, 0x1d1f2f0, 0x1cf9b50, 0x1cf2490, 0x1cde8f0, 0x1cc8b70, 0x1d34450, 0x1ce5490, 0x1d1fd30, 0x1c8bb90, 0x1c90d70, 0x1cbc650, 0x1c8e0b0, 0x1cb8330, 0x1cad470, 0x1cf5bd0, 0x1d30d50, 0x1d20f50, 0x1cf29b0, 0x1c8b1d0, 0x1c7fc10, 0x1c86eb0, 0x1ccb4b0, 0x1d32790, 0x1cb1c30, 0x1d20b10, 0x1d28d10, 0x1cf1f70, 0x1d0e0b0, 0x1d206d0, 0x1d06850, 0x1cffcb0, 0x1c91cd0, 0x1cd96f0, 0x1c87cf0, 0x1ce0290, 0x1cca510, 0x1cc5d10, 0x1ce6e30, 0x1c92710, 0x1cbdff0, 0x1d0fa50, 0x1c80a90, 0x1c8a2f0, 0x1cb9cd0, 0x1c7ddd0, 0x1c965f0, 0x1caee10, 0x1ccaf90, 0x1c9e7f0, 0x1cdfd70, 0x1d21290, 0x1d27d50, 0x1c81d98, 0x1cab4f0, 0x1c830f0, 0x1cd0a50, 0x1cf3dd0, 0x1c7edd0, 0x1ca9a50, 0x1c84d70, 0x1cfaab0, 0x1cfed50, 0x1d05e10, 0x1d01650, 0x1c8e5d0, 0x1d36790, 0x1d081f0, 0x1d20a10, 0x1d11a50, 0x1d28c10, 0x1d30e10, 0x1cdb090, 0x1ccaa70, 0x1d23e30, 0x1ce1c30, 0x1d205d0, 0x1d0a850, 0x1c940b0, 0x1cb4ad0, 0x1c7ccd0, 0x1c86530, 0x1cd1ed0, 0x1ce59b0, 0x1cbb670, 0x1c97f90, 0x1cb07b0, 0x1d0f530, 0x1d25f70, 0x1cd23f0, 0x1d21190, 0x1cfc450, 0x1ccdeb0, 0x1d02ff0, 0x1ca7b10, 0x1d133f0, 0x1c75470, 0x1d20d50, 0x1cee6b0, 0x1c89970, 0x1cdca30, 0x1ccfeb0, 0x1ccfe50, 0x1ccff10, 0x1cc3570, 0x1d17630, 0x1ccf210, 0x1cc3650, 0x1cc3490, 0x1d17550, 0x1d17230, 0x1d179b0, 0x1cc2fd0, 0x1cce6b0, 0x1cc3190, 0x1d17390, 0x1d178d0, 0x1d17470, 0x1d177f0, 0x1cc30b0, 0x1d17710, 0x1cee650, 0x1cf0510, 0x1cf0570, 0x1d19630, 0x1d19610, 0x1c74190, 0x1c742d0, 0x1c743b0, 0x15e4810, 0x1d1a430, 0x15e4850, 0x1d19670, 0x1d3ae90, 0x1d1a470, 0x15e4890, 0x1c74310, 0x1d36250, 0x1d1a4b0, 0x1d3aed0, 0x15e47d0, 0x1c74350, 0x1d14b90, 0x1d14e90, 0x1d14d10, 0x1d19950, 0x1d19990, 0x1d160c8, 0x1d161d0, 0x1d16408, 0x1d16430, 0x1d16690, 0x1d16520, 0x1d16548, 0x1d16db0, 0x1d16638, 0x1d16660, 0x1d16b90, 0x1d16bd0, 0x1d160f0, 0x1d16768, 0x1d18bf0, 0x1d187d8, 0x1d16908, 0x1d15ff0, 0x1d15bb0, 0x1d169d8, 0x1d16a10, 0x1d16a88, 0x1d16c70, 0x1d16ab0, 0x1d18b30, 0x1d16740, 0x1d16850, 0x1d16890, 0x1d16c38, 0x1d16cd8, 0x1d18770, 0x1d16d78, 0x1d16e18, 0x1d16e50, 0x1d16d10, 0x1d16ef0, 0x1d16350, 0x1d16590, 0x1d16eb8, 0x1d16fa0, 0x1d16fc8, 0x1d16470, 0x1d16030, 0x1d167b0, 0x1d199d0, 0x1d16af0, 0x1d16270, 0x1d18150, 0x1d16390, 0x1d16970, 0x1d16190, 0x1d15f50, 0x1d18c50, 0x1d15f90, 0x1d16930, 0x1d162b0, 0x1d15f10, 0x1d29d90, 0x1d29dd0, 0x1d29d10, 0x1d345f0, 0x1d298f0, 0x1d1a4f0, 0x1cc0df0, 0x1d142f0, 0x1c74490, 0x1d1a3f0, 0x1d17c90, 0x1d15a90, 0x1d18730, 0x1d1a530, 0x1d29d50, 0x1c744d0, 0x1c7cc30, 0x1cbc5d0, 0x1d17cd0, 0x1c7d190, 0x1d29970, 0x1d1c350, 0x1c74510, 0x1d17dd0, 0x1d17d10, 0x1d17d50, 0x1d1c7b0, 0x1d30230, 0x1c74450, 0x1d15a10, 0x1d15a50, 0x1d18f30, 0x1d17c50, 0x1d17d90, 0x1d1cb30, 0x1d2a190, 0x1d2a2d0, 0x1d2a0f0, 0x1d2a230, 0x1d2a370, 0x1d2a050, 0x1d2a010, 0x1d29f30, 0x1c746b0, 0x15f80b0, 0x1c746f0, 0x1d2a410, 0x1d3b0b0, 0x1d15070, 0x15e3c90, 0x1d1e870, 0x1d2fef0, 0x1d2a470, 0x1d2e1c0, 0x1d2c3e8, 0x1d2b180, 0x1d2eac8, 0x1d2ad40, 0x1d2af40, 0x1d2bd80, 0x1d2b520, 0x1d2b720, 0x1d2fa60, 0x1d2bb78, 0x1d2fcc0, 0x1d2bf80, 0x1d2c078, 0x1d2e0a8, 0x1d2f050, 0x1d2c140, 0x1d2fc10, 0x1d2d5a8, 0x1d2e170, 0x1d2c238, 0x1d2e2a8, 0x1d2f608, 0x1d2e488, 0x1d2c4e0, 0x1d1e830, 0x1d2c5a8, 0x1d2e688, 0x1d2c698, 0x1d2de68, 0x1d2e858, 0x1d2a998, 0x1d2c9f8, 0x1d2ec78, 0x1d2bc70, 0x1d2ac48, 0x1d2ed88, 0x1d2cdb8, 0x1d2ae28, 0x1d2cf48, 0x1d2bd38, 0x1d2ef88, 0x1d2b028, 0x1d2d148, 0x1d2f158, 0x1d2b0f0, 0x1d2e750, 0x1d2b278, 0x1d2b340, 0x1d2c870, 0x1d2d3c8, 0x1d2f438, 0x1d2dd50, 0x1d2b458, 0x1d2d210, 0x1d2f500, 0x1d2aef0, 0x1d2ebb0, 0x1d2b608, 0x1d2be68, 0x1d2dab0, 0x1d2d7a8, 0x1d2f7f8, 0x1d2b818, 0x1d2d870, 0x1d2b8e0, 0x1d2f988, 0x1d2b9d8, 0x1d2a8b0, 0x1d2fb48, 0x1d2d9e8, 0x1d2dc88, 0x1d2bf30, 0x1d2cbb0, 0x1d2fe00, 0x1d2e370, 0x1d2fec8, 0x1d2b6d0, 0x1d2d490, 0x1d2aa98, 0x1d2c360, 0x1d2c5d8, 0x1d2b918, 0x1d2c778, 0x1d2e798, 0x1d2c178, 0x1d2c938, 0x1d2e9a0, 0x1d2cab8, 0x1d2ccb8, 0x1d2f098, 0x1d2f280, 0x1d2b398, 0x1d2bb00, 0x1d2fd30, 0x1d2e560, 0x1d2df80, 0x1d2ea30, 0x1d2ab78, 0x1d2ecb8, 0x1d2ce78, 0x1d2d020, 0x1d2dd98, 0x1d2d2a0, 0x1d2f310, 0x1d2d4d8, 0x1d2f538, 0x1d2ee60, 0x1d2e3b8, 0x1d2d680, 0x1d2f6f8, 0x1d2f8b8, 0x1d2d8c0, 0x1d2db60, 0x1d30390, 0x1d302b0, 0x1d30550, 0x1d307b0, 0x1d30150, 0x1d30970, 0x1d30890, 0x1d30a50, 0x1d30470, 0x1d35750, 0x1d35690, 0x1cecb10, 0x1d18950, 0x1d188b0, 0x1d18cb0, 0x1d29f70, 0x1d1a650, 0x1ceca70, 0x1d18a90, 0x1ced0b0, 0x1d35830, 0x1d351d0, 0x1d1a7b0, 0x1d17050, 0x1d189f0, 0x1d1a5b0, 0x1cebeb0, 0x1d198b0, 0x1d354b0, 0x1d35330, 0x1d35790, 0x1d18810, 0x1cebe10, 0x1cebf50, 0x1d35af0, 0x1d18d90, 0x1d35a10, 0x1d18df0, 0x1d35cb0, 0x1d35bd0, 0x1d19d70, 0x1d36030, 0x1d347b0, 0x1d36110, 0x1d1cd70, 0x1d35f30, 0x1d1e710, 0x1d358d0, 0x1d1cc70, 0x1d34830, 0x1d355b0, 0x1d36170, 0x1d34770, 0x1d348f0, 0x1d1a850, 0x1d1e7f0, 0x1d35f70, 0x1d19db0, 0x1d1a750, 0x1d18ff0, 0x1d1ccb0, 0x1d18ef0, 0x1d353d0, 0x1d34930, 0x1d35930, 0x1d19cf0, 0x1d35fb0, 0x1d34730, 0x1d1ccf0, 0x1d360d0, 0x1d34870, 0x1d19790, 0x1d197d0, 0x1d35430, 0x1d19d30, 0x1d35ff0, 0x1d1cd30, 0x1d35ef0, 0x1d3aff0, 0x1d1cc30, 0x1d347f0, 0x1d34130, 0x1d18fb0, 0x1d18f70, 0x1d3a018, 0x1d38ab8, 0x1d38058, 0x1d378b0, 0x1d38098, 0x1d3a0b8, 0x1d380c0, 0x1d35e10, 0x1d39f10, 0x1d3a0f8, 0x1d37ad8, 0x1d38138, 0x1d38188, 0x1d39470, 0x1d3a1f8, 0x1d38630, 0x1d38228, 0x1d3a470, 0x1d38278, 0x1d3a5c0, 0x1d387b0, 0x1d38318, 0x1d3a340, 0x1d39b38, 0x1d38398, 0x1d37d10, 0x1d378d0, 0x1d38480, 0x1d3a4c8, 0x1d38580, 0x1d15b90, 0x1d37b00, 0x1599100, 0x1d3a6c0, 0x1d3a120, 0x1d395b0, 0x1d3a158, 0x1d39c08, 0x1d38b58, 0x1d39398, 0x1d3a8b8, 0x1d388d8, 0x1d387f0, 0x1d377f0, 0x1d39700, 0x1d39c58, 0x1d3a370, 0x1d38a58, 0x1d38a80, 0x1d37b60, 0x1d39c30, 0x1d38bd8, 0x1d38ca0, 0x1d38cd8, 0x1d37c50, 0x1d38d80, 0x1d39240, 0x1d37810, 0x1d38df8, 0x1d39278, 0x1d38ee8, 0x1d3a810, 0x1d37910, 0x1599290, 0x1d37930, 0x1d38f98, 0x1d38fc0, 0x1d38ff8, 0x1d38710, 0x1599130, 0x1c741d0, 0x1d390a0, 0x15990d8, 0x1d37c70, 0x1d39118, 0x1d38e90, 0x1d37830, 0x1d391c0, 0x1d391e8, 0x1d39218, 0x1d38c78, 0x1d3a318, 0x1d39878, 0x15992f8, 0x1d37dd8, 0x1599320, 0x1d39328, 0x1d3ac78, 0x1d39c80, 0x1d394b8, 0x1d37950, 0x1d37850, 0x1d39588, 0x1d39628, 0x1d3a570, 0x1d3a3c0, 0x1d396d8, 0x1d37970, 0x1d38db0, 0x1d39738, 0x1d38970, 0x1599390, 0x1d37b98, 0x1d38530, 0x1d39970, 0x1d39808, 0x1d39ec8, 0x1d3a278, 0x1d37a90, 0x1d37870, 0x1d39948, 0x1d39998, 0x1d39448, 0x1d379b8, 0x1d399c0, 0x1d38430, 0x1d399f8, 0x1599150, 0x1d3aa90, 0x1d37a40, 0x1d3ab38, 0x1d39ac8, 0x1d37b38, 0x1d379e0, 0x1d37bc0, 0x1d3a9f8, 0x1d37bf8, 0x1d37c20, 0x1d3a950, 0x1d378f0, 0x1d37a18, 0x1d37890, 0x1d39cb8, 0x1d37f78, 0x1d389a0, 0x1d35df0, 0x1d39ef0, 0x1d38890, 0x15992b0, 0x1d38670, 0x1d39d88, 0x1d37a70, 0x1d39df8, 0x1d3a670, 0x1d37e88, 0x1d37ed8, 0x1d39f58, 0x1d38350, 0x1d37fb8, 0x1d3abd0, 0x1d3b150, 0x1d3b4d0, 0x1d3b1b0, 0x1d3b3f0, 0x1d3b230, 0x1d3b550, 0x1d3b2b0, 0x1d3b770, 0x1d3b890, 0x1d3af10, 0x1d1c710, 0x1d1c650, 0x1d1c6b0, 0x1d3b690, 0x1d3b6f0, 0x1d3b0f0, 0x159b630, 0x159add0, 0x15ab070, 0x15ab050, 0x1d14990, 0x159ca50, 0x1d149b0, 0x15a9870, 0x159ae50, 0x15fbc70, 0x15df670, 0x159ca70, 0x15a9e90, 0x15a8d90, 0x15ded50, 0x15fc2b0, 0x159adf0, 0x1d149d0, 0x159b478, 0x15fa730, 0x1c7d1d0, 0x15a9890, 0x15fb630, 0x15fbc90, 0x159b5d0, 0x15facd0, 0x159ae70, 0x15a9fe8 ]
DEBUG = True
subvtable_candidates = list()
basevtable_candidates = list()
for vtable in vtables:
ott = Qword(vtable-16)
if ott != 0:
subvtable_candidates.append(vtable)
if DEBUG:
print "0x%x - SUBVTABLE" % vtable
else:
basevtable_candidates.append(vtable)
if DEBUG:
print "0x%x - BASEVTABLE" % vtable
print
print "Possible base-vtables:"
for vtable in basevtable_candidates:
print "0x%x" % vtable
print
print "Possible sub-vtables:"
for vtable in subvtable_candidates:
print "0x%x" % vtable
print
| 548.268293
| 21,759
| 0.813114
| 2,070
| 22,479
| 8.827053
| 0.971981
| 0.001532
| 0.001751
| 0.002627
| 0.007224
| 0.007224
| 0.00405
| 0.00405
| 0
| 0
| 0
| 0.54524
| 0.095823
| 22,479
| 41
| 21,760
| 548.268293
| 0.353752
| 0.000756
| 0
| 0.269231
| 0
| 0
| 0.003749
| 0
| 0
| 0
| 0.794047
| 0
| 0
| 0
| null | null | 0
| 0.115385
| null | null | 0.346154
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ab6710a479b6b16a6e9dc5c8e6504b99d346e290
| 119
|
py
|
Python
|
test-simple/calc.py
|
Tobils/recipe-app-api
|
3b0948dfd7dbefc57ae85ba8b51fcf77c5c04344
|
[
"MIT"
] | 1
|
2021-02-09T04:14:18.000Z
|
2021-02-09T04:14:18.000Z
|
test-simple/calc.py
|
Tobils/recipe-app-api
|
3b0948dfd7dbefc57ae85ba8b51fcf77c5c04344
|
[
"MIT"
] | null | null | null |
test-simple/calc.py
|
Tobils/recipe-app-api
|
3b0948dfd7dbefc57ae85ba8b51fcf77c5c04344
|
[
"MIT"
] | null | null | null |
""" add 2 number """
def add(x, y):
return x + y
""" substract y from x """
def substract(x, y):
return y - x
| 14.875
| 26
| 0.529412
| 21
| 119
| 3
| 0.428571
| 0.095238
| 0.253968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0.285714
| 119
| 8
| 27
| 14.875
| 0.729412
| 0.10084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
abb765fee4958674e53441fedc856ec2d8b1f629
| 82
|
py
|
Python
|
project_name/serializers.py
|
dthorell/django-microservice
|
db66efbe240a465682989c729ea38b890ee614f3
|
[
"MIT"
] | null | null | null |
project_name/serializers.py
|
dthorell/django-microservice
|
db66efbe240a465682989c729ea38b890ee614f3
|
[
"MIT"
] | 2
|
2021-06-09T17:46:04.000Z
|
2021-06-10T18:45:20.000Z
|
project_name/serializers.py
|
dthorell/django-microservice
|
db66efbe240a465682989c729ea38b890ee614f3
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
# TODO: write here your model serializers
| 20.5
| 41
| 0.829268
| 11
| 82
| 6.090909
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 82
| 3
| 42
| 27.333333
| 0.957143
| 0.47561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
abedcabcfe29e864831d0deaddc996f78c3ca286
| 88
|
py
|
Python
|
antipetros_discordbot/auxiliary_classes/aux_server_classes/__init__.py
|
official-antistasi-community/Antipetros_Discord_Bot
|
1b5c8b61c09e61cdff671e259f0478d343a50c8d
|
[
"MIT"
] | null | null | null |
antipetros_discordbot/auxiliary_classes/aux_server_classes/__init__.py
|
official-antistasi-community/Antipetros_Discord_Bot
|
1b5c8b61c09e61cdff671e259f0478d343a50c8d
|
[
"MIT"
] | null | null | null |
antipetros_discordbot/auxiliary_classes/aux_server_classes/__init__.py
|
official-antistasi-community/Antipetros_Discord_Bot
|
1b5c8b61c09e61cdff671e259f0478d343a50c8d
|
[
"MIT"
] | 1
|
2021-02-12T01:10:51.000Z
|
2021-02-12T01:10:51.000Z
|
from .server_item import *
from .helper import *
from .is_online_message_items import *
| 22
| 38
| 0.795455
| 13
| 88
| 5.076923
| 0.692308
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 88
| 3
| 39
| 29.333333
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f9ef1c766a0a46ece41bbb3a7a8db7b96419694c
| 210
|
py
|
Python
|
python/8kyu/generate_range_of_integers.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/generate_range_of_integers.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/generate_range_of_integers.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
"""Kata url: https://www.codewars.com/kata/55eca815d0d20962e1000106."""
from typing import List
def generate_range(_min: int, _max: int, step: int) -> List[int]:
return list(range(_min, _max + 1, step))
| 26.25
| 71
| 0.7
| 30
| 210
| 4.733333
| 0.666667
| 0.112676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104972
| 0.138095
| 210
| 7
| 72
| 30
| 0.679558
| 0.309524
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
e60b5e32067fb2bda2b28e090317819a571c8cb5
| 184
|
py
|
Python
|
codewars/8kyu/doha22/kata8/alam/test.py
|
doha22/Training_one
|
0cd7cf86c7da0f6175834146296b763d1841766b
|
[
"MIT"
] | null | null | null |
codewars/8kyu/doha22/kata8/alam/test.py
|
doha22/Training_one
|
0cd7cf86c7da0f6175834146296b763d1841766b
|
[
"MIT"
] | 2
|
2019-01-22T10:53:42.000Z
|
2019-01-31T08:02:48.000Z
|
codewars/8kyu/doha22/kata8/alam/test.py
|
doha22/Training_one
|
0cd7cf86c7da0f6175834146296b763d1841766b
|
[
"MIT"
] | 13
|
2019-01-22T10:37:42.000Z
|
2019-01-25T13:30:43.000Z
|
import unittest
from alarm import set_alarm
def test_set_alarm(benchmark):
assert benchmark(set_alarm,(True, True)) == False
assert benchmark(set_alarm,(False, True)) == True
| 26.285714
| 53
| 0.75
| 26
| 184
| 5.115385
| 0.423077
| 0.240602
| 0.270677
| 0.345865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146739
| 184
| 7
| 54
| 26.285714
| 0.847134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e6214603206384ef5fda0936c275e027c90d4a2d
| 3,622
|
py
|
Python
|
tests/dltranz_tests/test_distribution_target_loss.py
|
KirillVladimirov/pytorch-lifestream
|
83005b950d41de8afc11711fc955ffafb5ff7a9e
|
[
"Apache-2.0"
] | null | null | null |
tests/dltranz_tests/test_distribution_target_loss.py
|
KirillVladimirov/pytorch-lifestream
|
83005b950d41de8afc11711fc955ffafb5ff7a9e
|
[
"Apache-2.0"
] | null | null | null |
tests/dltranz_tests/test_distribution_target_loss.py
|
KirillVladimirov/pytorch-lifestream
|
83005b950d41de8afc11711fc955ffafb5ff7a9e
|
[
"Apache-2.0"
] | 1
|
2022-02-05T15:06:48.000Z
|
2022-02-05T15:06:48.000Z
|
import torch
import numpy as np
from dltranz.loss import DistributionTargetsLoss
def test_best_loss():
eps = 1e-7
prediction = {'neg_sum': torch.tensor([[np.log(10 + 1)]]),
'neg_distribution': torch.tensor([[100., 0., 0., 0., 0., 0.]]),
'pos_sum': torch.tensor([[0]]),
'pos_distribution': torch.tensor([[0., 100., 0., 0., 0., 0.]])}
label = {'neg_sum': np.array([[10]]),
'neg_distribution': np.array([[1., 0., 0., 0., 0., 0.]]),
'pos_sum': np.array([[0]]),
'pos_distribution': np.array([[0., 1., 0., 0., 0., 0.]])}
loss = DistributionTargetsLoss()
out = loss(prediction, label)
assert abs(out.item() - 0.) < eps
assert type(out) is torch.Tensor
def test_loss_300():
eps = 1e-7
prediction = {'neg_sum': torch.tensor([[10]]),
'neg_distribution': torch.tensor([[100., 0., 0., 0., 0., 0.]]),
'pos_sum': torch.tensor([[0]]),
'pos_distribution': torch.tensor([[0., 100., 0., 0., 0., 0.]])}
label = {'neg_sum': np.array([[0]]),
'neg_distribution': np.array([[1., 0., 0., 0., 0., 0.]]),
'pos_sum': np.array([[0]]),
'pos_distribution': np.array([[0., 1., 0., 0., 0., 0.]])}
loss = DistributionTargetsLoss()
out = loss(prediction, label)
assert abs(out.item() - 300.) < eps
assert type(out) is torch.Tensor
def test_usual_loss_first():
eps = 1e-7
prediction = {'neg_sum': torch.tensor([[-1.]]),
'neg_distribution': torch.tensor([[0.1, 0.2, 0.1, 0.1, 0.3, 0.2]]),
'pos_sum': torch.tensor([[ 1.]]),
'pos_distribution': torch.tensor([[0.1, 0.2, 0.1, 0.1, 0.3, 0.2]])}
label = {'neg_sum': np.array([[-1.]]),
'neg_distribution': np.array([[0.1, 0.2, 0.1, 0.1, 0.3, 0.2]]),
'pos_sum': np.array([[1.]]),
'pos_distribution': np.array([[0.1, 0.2, 0.1, 0.1, 0.3, 0.2]])}
loss = DistributionTargetsLoss()
out = loss(prediction, label)
assert abs(out.item() - 12.138458251953125) < eps
assert type(out) is torch.Tensor
def test_usual_loss_second():
eps = 1e-7
prediction = {'neg_sum': torch.tensor([[-1.]]),
'neg_distribution': torch.tensor([[0.1, 0.2, 0.1, 0.1, 0.3, 0.2]]),
'pos_sum': torch.tensor([[ 1.]]),
'pos_distribution': torch.tensor([[0.3, 0.5, 0., 0.1, 0.1, 0.0]])}
label = {'neg_sum': np.array([[-10.]]),
'neg_distribution': np.array([[0.5, 0.5, 0.0, 0.0, 0.0, 0.0]]),
'pos_sum': np.array([[8.]]),
'pos_distribution': np.array([[0.1, 0.1, 0.1, 0.1, 0.1, 0.5]])}
loss = DistributionTargetsLoss()
out = loss(prediction, label)
assert abs(out.item() - 38.563011169433594) < eps
assert type(out) is torch.Tensor
def test_one_class():
eps = 1e-7
prediction = {'neg_sum': torch.tensor([[-1.]]),
'neg_distribution': torch.tensor([[1., 0., 0., 0., 0., 0.]]),
'pos_sum': torch.tensor([[ 1.]]),
'pos_distribution': torch.tensor([[0., 1., 0., 0., 0., 0.]])}
label = {'neg_sum': np.array([[-1.]]),
'neg_distribution': np.array([[1., 0., 0., 0., 0., 0.]]),
'pos_sum': np.array([[1.]]),
'pos_distribution': np.array([[0., 1., 0., 0., 0., 0.]])}
loss = DistributionTargetsLoss()
out = loss(prediction, label)
assert abs(out.item() - 10.703149795532227) < eps
assert type(out) is torch.Tensor
| 35.165049
| 85
| 0.503589
| 501
| 3,622
| 3.536926
| 0.097804
| 0.057562
| 0.060948
| 0.051919
| 0.895598
| 0.892212
| 0.892212
| 0.861738
| 0.823928
| 0.762415
| 0
| 0.101438
| 0.270569
| 3,622
| 102
| 86
| 35.509804
| 0.569266
| 0
| 0
| 0.630137
| 0
| 0
| 0.127002
| 0
| 0
| 0
| 0
| 0
| 0.136986
| 1
| 0.068493
| false
| 0
| 0.041096
| 0
| 0.109589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e6320f7bc2522f76b9d05fac4dac9bf5482ba3a5
| 330
|
py
|
Python
|
core/classifier/mfe_svm_classifier.py
|
yt7589/mgs
|
2faae1b69e6d4cde63afb9b2432b1bf49ebdd770
|
[
"Apache-2.0"
] | null | null | null |
core/classifier/mfe_svm_classifier.py
|
yt7589/mgs
|
2faae1b69e6d4cde63afb9b2432b1bf49ebdd770
|
[
"Apache-2.0"
] | null | null | null |
core/classifier/mfe_svm_classifier.py
|
yt7589/mgs
|
2faae1b69e6d4cde63afb9b2432b1bf49ebdd770
|
[
"Apache-2.0"
] | null | null | null |
from pyAudioAnalysis import ShortTermFeatures as aSF
from pyAudioAnalysis import MidTermFeatures as aMF
from pyAudioAnalysis import audioBasicIO as aIO
import numpy as np
import plotly.graph_objs as go
import plotly
import sklearn.svm as sks # import SVC
class MfeSvmClassifier:
@staticmethod
def train():
pass
| 27.5
| 52
| 0.793939
| 43
| 330
| 6.069767
| 0.604651
| 0.218391
| 0.287356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 330
| 12
| 53
| 27.5
| 0.966667
| 0.030303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| true
| 0.090909
| 0.636364
| 0
| 0.818182
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
e64742d7aa296f88e8ba11eeaca6dd175108b550
| 92
|
py
|
Python
|
webresume/api/tests.py
|
cmput401-fall2018/web-app-ci-cd-with-travis-ci-ZhimaoLin
|
f09753f7615c5b7c2cb6d51d51424b4f616a2241
|
[
"MIT"
] | null | null | null |
webresume/api/tests.py
|
cmput401-fall2018/web-app-ci-cd-with-travis-ci-ZhimaoLin
|
f09753f7615c5b7c2cb6d51d51424b4f616a2241
|
[
"MIT"
] | 4
|
2018-10-01T23:13:33.000Z
|
2020-06-05T19:10:58.000Z
|
webresume/api/tests.py
|
cmput401-fall2018/web-app-ci-cd-with-travis-ci-ZhimaoLin
|
f09753f7615c5b7c2cb6d51d51424b4f616a2241
|
[
"MIT"
] | 2
|
2018-10-01T19:46:09.000Z
|
2018-10-09T00:40:18.000Z
|
from django.test import TestCase
# Create your tests here.
def test_test():
assert True
| 18.4
| 32
| 0.75
| 14
| 92
| 4.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184783
| 92
| 5
| 33
| 18.4
| 0.906667
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
053165b9d07b0b228d5dfaab6bcfcf22c4983f87
| 47
|
py
|
Python
|
image-stitching/src/practice.py
|
blackmahub/de.hs-fulda.informatik.cv.image-stitching
|
5ec8448322b3591b11a746d7ad99dc8e0adbcac5
|
[
"MIT"
] | null | null | null |
image-stitching/src/practice.py
|
blackmahub/de.hs-fulda.informatik.cv.image-stitching
|
5ec8448322b3591b11a746d7ad99dc8e0adbcac5
|
[
"MIT"
] | null | null | null |
image-stitching/src/practice.py
|
blackmahub/de.hs-fulda.informatik.cv.image-stitching
|
5ec8448322b3591b11a746d7ad99dc8e0adbcac5
|
[
"MIT"
] | null | null | null |
print(["java", {"python": ("ML", "AI", "CV")}])
| 47
| 47
| 0.446809
| 6
| 47
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.488372
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
0560bd42c719432ea281befd7da7e85337efdb08
| 27
|
py
|
Python
|
esinet/minimum_norm/__init__.py
|
LukeTheHecker/ANNDip
|
008fbea39d80b2bd97ca4c5510b0aefb5a766443
|
[
"MIT"
] | 10
|
2021-01-27T14:13:18.000Z
|
2021-08-12T12:48:00.000Z
|
esinet/minimum_norm/__init__.py
|
LukeTheHecker/ANNDip
|
008fbea39d80b2bd97ca4c5510b0aefb5a766443
|
[
"MIT"
] | 1
|
2021-12-06T12:14:17.000Z
|
2021-12-06T12:19:14.000Z
|
esinet/minimum_norm/__init__.py
|
LukeTheHecker/ANNDip
|
008fbea39d80b2bd97ca4c5510b0aefb5a766443
|
[
"MIT"
] | 2
|
2021-05-18T23:54:03.000Z
|
2021-06-04T20:57:16.000Z
|
from .minimum_norm import *
| 27
| 27
| 0.814815
| 4
| 27
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 27
| 1
| 27
| 27
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
558f30966e1b0c96bd097eba4c0316db1c2d2a1e
| 53
|
py
|
Python
|
navtools/__init__.py
|
slott56/navtools
|
9860e4e79e3bd5d6e479755180a4d85a77502102
|
[
"BSD-3-Clause"
] | 5
|
2015-11-26T01:26:43.000Z
|
2022-01-07T19:50:24.000Z
|
navtools/__init__.py
|
slott56/navtools
|
9860e4e79e3bd5d6e479755180a4d85a77502102
|
[
"BSD-3-Clause"
] | 1
|
2022-01-07T05:53:09.000Z
|
2022-01-07T17:05:24.000Z
|
navtools/__init__.py
|
slott56/navtools
|
9860e4e79e3bd5d6e479755180a4d85a77502102
|
[
"BSD-3-Clause"
] | null | null | null |
"""Navtools 2021.08.29"""
__version__ = "2021.08.29"
| 17.666667
| 26
| 0.660377
| 8
| 53
| 3.875
| 0.625
| 0.387097
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.09434
| 53
| 2
| 27
| 26.5
| 0.3125
| 0.358491
| 0
| 0
| 0
| 0
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
55a47daf6dbe85fd7e6f37790b91ddf125836834
| 47
|
py
|
Python
|
libs/models/__init__.py
|
0h-n0/first_deep
|
8b4b1c3e2198774baaddac7b1045fecc95c59f0b
|
[
"MIT"
] | null | null | null |
libs/models/__init__.py
|
0h-n0/first_deep
|
8b4b1c3e2198774baaddac7b1045fecc95c59f0b
|
[
"MIT"
] | null | null | null |
libs/models/__init__.py
|
0h-n0/first_deep
|
8b4b1c3e2198774baaddac7b1045fecc95c59f0b
|
[
"MIT"
] | null | null | null |
from .cnn import CNN
from .rnn import RNNModel
| 15.666667
| 25
| 0.787234
| 8
| 47
| 4.625
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 26
| 23.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e9471d26e68758672e6b24a57743d37bfcb749e0
| 473
|
py
|
Python
|
easycodef/errors/_errors.py
|
fiveio/easy-codef-py
|
97bd6831909e4d31af0ec7ed479b63fc977cd302
|
[
"MIT"
] | 1
|
2019-09-17T00:47:08.000Z
|
2019-09-17T00:47:08.000Z
|
easycodef/errors/_errors.py
|
fiveio/easy-codef-py
|
97bd6831909e4d31af0ec7ed479b63fc977cd302
|
[
"MIT"
] | null | null | null |
easycodef/errors/_errors.py
|
fiveio/easy-codef-py
|
97bd6831909e4d31af0ec7ed479b63fc977cd302
|
[
"MIT"
] | null | null | null |
class Error(Exception):
"""Base class"""
pass
class TokenGenerateError(Error):
"""
토큰 생성 에러
:param: message: 에러 메세지
"""
def __init__(self, message):
self.message = message
class ConnectedIdGenerateError(Error):
"""
커넥티드 아이디 생성 에러
:param: message: 에러 메세지
"""
def __init__(self, message):
self.message = message
class UseApiError(Error):
def __init__(self, message):
self.message = message
| 15.766667
| 38
| 0.604651
| 51
| 473
| 5.372549
| 0.372549
| 0.240876
| 0.120438
| 0.19708
| 0.583942
| 0.583942
| 0.583942
| 0.452555
| 0.452555
| 0.452555
| 0
| 0
| 0.281184
| 473
| 29
| 39
| 16.310345
| 0.805882
| 0.17759
| 0
| 0.545455
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.090909
| 0
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
e955c5f7216626e1203ea5c95f60e47c503e989c
| 4,793
|
py
|
Python
|
tests/integrations/test_location_eligibility.py
|
uk-gov-mirror/alphagov.govuk-shielded-vulnerable-people-service
|
5b191980dec554155e9d431a514a945072032e7c
|
[
"MIT"
] | 3
|
2020-08-16T19:36:26.000Z
|
2020-10-29T14:35:01.000Z
|
tests/integrations/test_location_eligibility.py
|
uk-gov-mirror/alphagov.govuk-shielded-vulnerable-people-service
|
5b191980dec554155e9d431a514a945072032e7c
|
[
"MIT"
] | 101
|
2020-09-03T11:10:00.000Z
|
2021-10-01T03:03:46.000Z
|
tests/integrations/test_location_eligibility.py
|
alphagov-mirror/govuk-shielded-vulnerable-people-service
|
f9cb4ae9046fc402f0878503733a23d42546cc53
|
[
"MIT"
] | 6
|
2020-07-28T09:03:20.000Z
|
2021-04-10T18:04:56.000Z
|
from unittest.mock import patch
import pytest
from flask import Flask
from vulnerable_people_form.integrations.location_eligibility import (is_postcode_in_england,
get_postcode_tier, get_uprn_tier,
get_shielding_advice_by_uprn,
get_shielding_advice_by_postcode)
from vulnerable_people_form.form_pages.shared.constants import PostcodeTier
_current_app = Flask(__name__)
def test_get_uprn_tier_should_raise_err():
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": "INVALID_VALUE"}]]}), \
pytest.raises(ValueError) as exception_info:
get_uprn_tier("asasas")
assert "RDS procedure returned unrecognised value" in str(exception_info.value)
def test_get_postcode_tier_should_raise_err():
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": "INVALID_VALUE"}]]}), \
pytest.raises(ValueError) as exception_info:
get_postcode_tier("asasas")
assert "RDS procedure returned unrecognised value" in str(exception_info.value)
def test_is_postcode_in_england_should_raise_error():
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"stringValue": "INVALID_VALUE"}]]}), \
pytest.raises(ValueError) as exception_info:
is_postcode_in_england("LSas1BA111")
assert "RDS procedure returned unrecognised value" in str(exception_info.value)
def test_get_shielding_advice_by_postcode_should_raise_error():
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": "INVALID_VALUE"}]]}), \
pytest.raises(ValueError) as exception_info:
get_shielding_advice_by_postcode("LSas1BA111")
assert "RDS procedure returned unrecognised value" in str(exception_info.value)
def test_get_shielding_advice_by_uprn_should_raise_error():
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": "INVALID_VALUE"}]]}), \
pytest.raises(ValueError) as exception_info:
get_shielding_advice_by_uprn("asasas")
assert "RDS procedure returned unrecognised value" in str(exception_info.value)
@pytest.mark.parametrize("stored_proc_return_value, expected_output",
[(1, PostcodeTier.MEDIUM), (2, PostcodeTier.HIGH)])
def test_get_postcode_tier_should_return_correct_tier(
stored_proc_return_value, expected_output):
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": stored_proc_return_value}]]}):
postcode_tier = get_postcode_tier("LS1 1BA")
assert postcode_tier == expected_output
@pytest.mark.parametrize("stored_proc_return_value, expected_output",
[("YES", True), ("NO", False)])
def test_is_postcode_in_england_should_return_correct_eligibility_value(
stored_proc_return_value, expected_output):
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"stringValue": stored_proc_return_value}]]}):
postcode_in_england = is_postcode_in_england("LS1 1BA")
assert postcode_in_england == expected_output
@pytest.mark.parametrize("stored_proc_return_value, expected_output",
[(0, 0), (1, 1)])
def test_get_shielding_advice_by_uprn_should_return_correct_eligibility_value(
stored_proc_return_value, expected_output):
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": stored_proc_return_value}]]}):
uprn_shielding = get_shielding_advice_by_uprn("10000000")
assert uprn_shielding == expected_output
@pytest.mark.parametrize("stored_proc_return_value, expected_output",
[(0, 0), (1, 1)])
def test_get_shielding_advice_by_postcode_should_return_correct_eligibility_value(
stored_proc_return_value, expected_output):
with patch("vulnerable_people_form.integrations.location_eligibility.execute_sql",
return_value={"records": [[{"longValue": stored_proc_return_value}]]}):
postcode_shielding = get_shielding_advice_by_postcode("BB1 1TA")
assert postcode_shielding == expected_output
| 50.989362
| 103
| 0.709159
| 536
| 4,793
| 5.886194
| 0.147388
| 0.073217
| 0.060856
| 0.079873
| 0.846276
| 0.794612
| 0.767036
| 0.752773
| 0.729636
| 0.711886
| 0
| 0.008318
| 0.197371
| 4,793
| 93
| 104
| 51.537634
| 0.811801
| 0
| 0
| 0.507042
| 0
| 0
| 0.264135
| 0.14855
| 0
| 0
| 0
| 0
| 0.126761
| 1
| 0.126761
| false
| 0
| 0.070423
| 0
| 0.197183
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e961df5621ca66a4e84e3da9468fa49c1caee7eb
| 28
|
py
|
Python
|
examples/NIPS/UrbanSounds8K/__init__.py
|
dais-ita/DeepProbCEP
|
22790c1672c1cce49a59d18921c710f61cdde2f2
|
[
"MIT"
] | 6
|
2020-09-10T03:40:53.000Z
|
2021-05-26T07:30:20.000Z
|
examples/NIPS/UrbanSounds8K/__init__.py
|
dais-ita/DeepProbCEP
|
22790c1672c1cce49a59d18921c710f61cdde2f2
|
[
"MIT"
] | null | null | null |
examples/NIPS/UrbanSounds8K/__init__.py
|
dais-ita/DeepProbCEP
|
22790c1672c1cce49a59d18921c710f61cdde2f2
|
[
"MIT"
] | 1
|
2020-11-23T15:55:57.000Z
|
2020-11-23T15:55:57.000Z
|
from .sounds_utils import *
| 14
| 27
| 0.785714
| 4
| 28
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e9c6131de9d484c93efccdc4e9a06f8812efa730
| 38,322
|
py
|
Python
|
CouncilTag/ingest/data.py
|
vinvasir/engage-backend
|
0050d0c70c1cc0127e5bdef506ccea5b47e2db39
|
[
"Apache-2.0"
] | null | null | null |
CouncilTag/ingest/data.py
|
vinvasir/engage-backend
|
0050d0c70c1cc0127e5bdef506ccea5b47e2db39
|
[
"Apache-2.0"
] | null | null | null |
CouncilTag/ingest/data.py
|
vinvasir/engage-backend
|
0050d0c70c1cc0127e5bdef506ccea5b47e2db39
|
[
"Apache-2.0"
] | null | null | null |
import requests
from bs4 import BeautifulSoup
import unicodedata
import datetime
import pytz
from calendar import timegm
local_tz = pytz.timezone("America/Los_Angeles")
city_council_agendas_url = "https://www.smgov.net/departments/clerk/agendas.aspx"
list_of_sections = [u'SPECIAL AGENDA ITEMS', u'CONSENT CALENDAR', u'STUDY SESSION',
u'CONTINUED ITEMS', u'ADMINISTRATIVE PROCEEDINGS', u'ORDINANCES',
u'STAFF ADMINISTRATIVE ITEMS', u'PUBLIC HEARINGS']
def agenda_date_to_epoch(date_str, year):
'''Transforms scraped date to epoch time'''
naive_dt = datetime.datetime.strptime(
str(year) + " " + date_str.string.strip(), '%Y %B %d %I:%M %p')
local_dt = local_tz.localize(naive_dt, is_dst=None)
utc_dt = local_dt.astimezone(pytz.utc)
utc_timetuple = utc_dt.timetuple()
return timegm(utc_timetuple)
def parse_query_params(params):
'''
Takes the split key value pairs which are made up of ["key=value", "key=value", "key="]
value may be empty except for MeetingID and ID keys
Returns a dictionary with two keys "MeetingID" and "ID"
'''
query = dict()
for param in params:
split_param = param.split("=")
query[split_param[0]] = split_param[1]
return query
def process_information_section(body):
table_body = body.find('table')
if table_body is not None:
table_row = table_body.find('tr')
if table_row is not None:
td_children = table_row.find_all('td')
department = td_children[0].get_text().replace('&', 'and')
sponsors = td_children[1].get_text()
if sponsors == '':
sponsors = None
return department, sponsors
def process_actions_section(body):
actions = []
paragraphs = body.find_all('p')
list_actions = body.find('ol')
if list_actions is not None:
# preferred method
next = list_actions.find('li')
while next is not None:
if next.name == 'ol':
actions[-1] += unicodedata.normalize("NFKD", next.get_text())
else:
actions.append(unicodedata.normalize("NFKD", next.get_text()))
next = next.next_sibling
else:
paragraphs = paragraphs[1:]
for paragraph in paragraphs:
actions.append(unicodedata.normalize("NFKD", paragraph.get_text()))
if len(actions) > 0 and actions[0] == 'Staff recommends that the City Council:':
actions = actions[1:]
return actions
def process_agenda_item(session, prefix, href):
agenda_item = dict()
agenda_item_url = prefix + href
query = href.split('?')
query_params = query[1].split("&")
if query[0] != 'Detail_LegiFile.aspx':
return
r = session.get(agenda_item_url)
agenda_item_soup = BeautifulSoup(r.text, 'html.parser')
params = parse_query_params(query_params)
ID = params['ID']
MeetingID = params['MeetingID']
Title = agenda_item_soup.find(
'h1', {'id': 'ContentPlaceholder1_lblLegiFileTitle'})
if Title is None:
print("TITLE NONE FOR: ", agenda_item_url)
Title = "SOME TITLE"
return None
else:
Title = Title.get_text().strip()
bodies = agenda_item_soup.find_all(
'div', {'class': 'LegiFileSection'})
info = agenda_item_soup.find('div', {'class': 'LegiFileInfo'})
agenda_item['Title'] = Title
agenda_item['ID'] = ID
agenda_item['MeetingID'] = MeetingID
if info is not None:
info_body = info.find('div', {'class': 'LegiFileSectionContents'})
Department, Sponsors = process_information_section(info_body)
agenda_item['Department'] = Department
agenda_item['Sponsors'] = Sponsors
recommendations_body = agenda_item_soup.find('div', {'id': 'divItemDiscussion'})
summary_body = agenda_item_soup.find('div', {'id': 'divBody'})
if recommendations_body is not None:
agenda_item['Recommendations'] = process_actions_section(recommendations_body)
else:
agenda_item['Recommendations'] = []
agenda_item['Body'] = []
if summary_body is not None:
Body = summary_body.find_all('p')
for body_element in Body:
text = unicodedata.normalize("NFKD", body_element.get_text()).strip()
if text != '':
agenda_item['Body'].append(text)
return agenda_item
def process_siblings(section_begin, section_end):
next = section_begin
as_for_section = []
while next != section_end:
links = next.find_all('a')
for a in links:
a_parent_prev_sibs = a.find_parent().find_previous_siblings()
if len(a_parent_prev_sibs) == 2:
as_for_section.append(a.get('href'))
next = next.find_next_sibling()
return as_for_section
def scrape_agenda(agenda, sess):
soup_agenda = BeautifulSoup(agenda, 'html.parser')
meeting = soup_agenda.find('table', {'id': 'MeetingDetail'})
sections = meeting.find_all('td', {'class': 'Title'})
main_sections = []
processed_sections = {}
agenda_items = []
for section in sections:
strong = section.find('strong')
if strong is not None and strong.get_text() in list_of_sections:
parent_tr = section.find_parent()
main_sections.append(parent_tr)
for i in range(len(main_sections) - 1):
processed_sections[main_sections[i].get_text().split(
". ")[1]] = process_siblings(main_sections[i], main_sections[i + 1])
for key, values in processed_sections.items():
for value in values:
if value is None:
continue
agenda_item = process_agenda_item(
sess, 'http://santamonicacityca.iqm2.com/Citizens/', value)
if agenda_item is not None:
agenda_items.append(agenda_item)
return agenda_items
def get_data(year):
with requests.Session() as sess:
state_encoded = '/wEPDwULLTE0OTMxOTQ4OTYPZBYCZg9kFgJmD2QWAgIDEGRkFggCEQ8PFgQeCEltYWdlVXJsBVR+L0FwcF9UaGVtZXMvc21fQ2xlcmsvSW1hZ2VzL1JvdGF0aW5nSGVhZGVyL0NpdHktQ291bmNpbC1DaGFtYmVycy1XaXRoLUNpdHktU2VhbC5qcGceDUFsdGVybmF0ZVRleHQFJENpdHktQ291bmNpbC1DaGFtYmVycy1XaXRoLUNpdHktU2VhbBYCHgZ1c2VtYXAFCiNoZWFkZXJNYXBkAhMPZBYCAgEPFgIeBFRleHQFxwE8TUFQIE5BTUU9ImhlYWRlck1hcCI+PEFSRUEgSFJFRj0iL2RlcGFydG1lbnRzL2NsZXJrLyIgQUxUPSJTYW50YSBNb25pY2EgT2ZmaWNlIG9mIHRoZSBDaXR5IENsZXJrIEhvbWVwYWdlIiBUSVRMRT0iU2FudGEgTW9uaWNhIE9mZmljZSBvZiB0aGUgQ2l0eSBDbGVyayBIb21lIiAgIFNIQVBFPVJFQ1QgQ09PUkRTPSI1LDUsNDAwLDI0MCI+PC9NQVA+ZAIXD2QWAgIBD2QWAmYPZBYGZg9kFgICAQ9kFgQCAQ9kFgICAQ9kFgICAQ8UKwACFCsAAg8WAh4XRW5hYmxlQWpheFNraW5SZW5kZXJpbmdoZBAWDWYCAQICAgMCBAIFAgYCBwIIAgkCCgILAgwWDRQrAAIPFgwfAwUJSG9tZSBQYWdlHgVWYWx1ZQUBMB4LTmF2aWdhdGVVcmwFEy9kZXBhcnRtZW50cy9jbGVyay8eB1Rvb2xUaXAFCUhvbWUgUGFnZR4GVGFyZ2V0BQVfc2VsZh4IU2VsZWN0ZWRoZGQUKwACDxYMHwMFFEJvYXJkcyAmIENvbW1pc3Npb25zHwUFATEfBmUfBwUUQm9hcmRzICYgQ29tbWlzc2lvbnMfCGUfCWhkEBYJZgIBAgICAwIEAgUCBgIHAggWCRQrAAIPFgwfAwUsQm9hcmRzLCBDb21taXNzaW9ucyAmIE90aGVyIEFwcG9pbnRlZCBCb2RpZXMfBQUBMh8GBRovZGVwYXJ0bWVudHMvY2xlcmsvYm9hcmRzLx8HBSxCb2FyZHMsIENvbW1pc3Npb25zICYgT3RoZXIgQXBwb2ludGVkIEJvZGllcx8IBQVfc2VsZh8JaGRkFCsAAg8WDB8DBR8oT3RoZXIpIEVsZWN0ZWQgUmVwcmVzZW50YXRpdmVzHwUFATMfBgUvL0RlcGFydG1lbnRzL0NsZXJrL0VsZWN0ZWRfUmVwcmVzZW50YXRpdmVzLmFzcHgfBwUfKE90aGVyKSBFbGVjdGVkIFJlcHJlc2VudGF0aXZlcx8IBQVfc2VsZh8JaGRkFCsAAg8WDB8DBRhBcHBsaWNhdGlvbnMgJiBWYWNhbmNpZXMfBQUBNB8GBSwvZGVwYXJ0bWVudHMvY2xlcmsvYm9hcmRzL3ZhY2FuY2llc19vbGQuYXNweB8HBRhBcHBsaWNhdGlvbnMgJiBWYWNhbmNpZXMfCAUFX3NlbGYfCWhkZBQrAAIPFgwfAwUYTW9udGhseSBNZWV0aW5nIFNjaGVkdWxlHwUFATUfBgVcL0RlcGFydG1lbnRzL0NsZXJrL0JvYXJkc19hbmRfQ29tbWlzc2lvbnMvQm9hcmRzX19fQ29tbWlzc2lvbnNfTW9udGhseV9NZWV0aW5nX1NjaGVkdWxlLmFzcHgfBwUYTW9udGhseSBNZWV0aW5nIFNjaGVkdWxlHwgFBV9zZWxmHwloZGQUKwACDxYMHwMFFlZpZGVvIE1lZXRpbmcgQXJjaGl2ZXMfBQUBNh8GBRUvQ29udGVudC5hc3B4P2lkPTQyOTIfBwUWVmlkZW8gTWVldGluZyBBcmNoaXZlcx8IBQZfYmxhbmsfCWhkZBQrAAIPFgwfAwUWQXVkaW8gTWVldGluZyBBcmNoaXZlcx8FBQE3HwYFFS9jb250ZW50LmFzcHg/aWQ9NDMxMR8HBRZBdWRpbyBNZWV0aW5nIEFyY2hpdmVzHwgFBl9ibGFuax8JaGRkFCsAAg8WDB8DBR8yMDE4IEJvYXJkL0NvbW1pc3Npb24gVmlkZW8tTkVXHwUFATgfBgUcaHR0cHM6Ly95b3V0dS5iZS9yMGpfMzVUX0VBQR8HBR8yMDE4IEJvYXJkL0NvbW1pc3Npb24gVmlkZW8tTkVXHwgFBl9ibGFuax8JaGRkFCsAAg8WDB8DBR1Ib3cgdG8gUnVuIEVmZmVjdGl2ZSBNZWV0aW5ncx8FBQE5HwYFUmh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vQVNYLnBocD9wdWJsaXNoX2lkPTY1MCZzbj1zYW50YW1vbmljYS5ncmFuaWN1cy5jb20fBwUdSG93IHRvIFJ1biBFZmZlY3RpdmUgTWVldGluZ3MfCAUGX2JsYW5rHwloZGQUKwACDxYMHwMFBEZBUXMfBQUCMTAfBgVaL0RlcGFydG1lbnRzL0NsZXJrL0JvYXJkc19hbmRfQ29tbWlzc2lvbnMvRkFRc19BYm91dF9TZXJ2aW5nX29uX2FfQm9hcmRfb3JfQ29tbWlzc2lvbi5hc3B4HwcFBEZBUXMfCAUFX3NlbGYfCWhkZA8WCWZmZmZmZmZmZhYBBXRUZWxlcmlrLldlYi5VSS5SYWRQYW5lbEl0ZW0sIFRlbGVyaWsuV2ViLlVJLCBWZXJzaW9uPTIwMDkuMi43MDEuMzUsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49MTIxZmFlNzgxNjViYTNkNBQrAAIPFgwfAwUPQ2hvb3NlIENpdmlsaXR5HwUFAjExHwYFEC9jaG9vc2VjaXZpbGl0eS8fBwUPQ2hvb3NlIENpdmlsaXR5HwgFBV9zZWxmHwloZGQUKwACDxYMHwMFGENpdHkgQ291bmNpbCBJbmZvcm1hdGlvbh8FBQIxMh8GZR8HBRhDaXR5IENvdW5jaWwgSW5mb3JtYXRpb24fCGUfCWhkEBYFZgIBAgICAwIEFgUUKwACDxYMHwMFFUNpdHkgQ291bmNpbCBIb21lcGFnZR8FBQIxMx8GBSlodHRwOi8vd3d3LnNtZ292Lm5ldC9kZXBhcnRtZW50cy9jb3VuY2lsLx8HBRVDaXR5IENvdW5jaWwgSG9tZXBhZ2UfCAUFX3NlbGYfCWhkZBQrAAIPFgwfAwUwQ291bmNpbCBBZ2VuZGFzLCBTdGFmZiBSZXBvcnRzLCBNaW51dGVzICYgVmlkZW9zHwUFAjE0HwYFM2h0dHA6Ly93d3cuc21nb3YubmV0L2RlcGFydG1lbnRzL2NsZXJrL2FnZW5kYXMuYXNweB8HBTBDb3VuY2lsIEFnZW5kYXMsIFN0YWZmIFJlcG9ydHMsIE1pbnV0ZXMgJiBWaWRlb3MfCAUFX3NlbGYfCWhkZBQrAAIPFgwfAwUlUHVibGljIFBvcnRhbCAoUmVjb3JkcyBwcmlvciB0byAxOTg4KR8FBQIxNR8GBTFodHRwczovL3B1YmxpY2RvY3Muc21nb3YubmV0L3dlYmxpbmsvd2VsY29tZS5hc3B4HwcFJVB1YmxpYyBQb3J0YWwgKFJlY29yZHMgcHJpb3IgdG8gMTk4OCkfCAUFX3NlbGYfCWhkZBQrAAIPFgwfAwUvVGF4cGF5ZXIgUHJvdGVjdGlvbi9PYWtzIEluaXRpYXRpdmUgTG9nIChFeGNlbCkfBQUCMTYfBgVML3VwbG9hZGVkRmlsZXMvRGVwYXJ0bWVudHMvQ2xlcmsvVGF4cGF5ZXIgUHJvdGVjdGlvbiBMb2cgKExhc3QgNiB5ZWFycykueGxzeB8HBS9UYXhwYXllciBQcm90ZWN0aW9uL09ha3MgSW5pdGlhdGl2ZSBMb2cgKEV4Y2VsKR8IBQVfc2VsZh8JaGRkFCsAAg8WDB8DBR8oT3RoZXIpIEVsZWN0ZWQgUmVwcmVzZW50YXRpdmVzHwUFAjE3HwYFLy9EZXBhcnRtZW50cy9DbGVyay9FbGVjdGVkX1JlcHJlc2VudGF0aXZlcy5hc3B4HwcFHyhPdGhlcikgRWxlY3RlZCBSZXByZXNlbnRhdGl2ZXMfCAUFX3NlbGYfCWhkZA8WBWZmZmZmFgEFdFRlbGVyaWsuV2ViLlVJLlJhZFBhbmVsSXRlbSwgVGVsZXJpay5XZWIuVUksIFZlcnNpb249MjAwOS4yLjcwMS4zNSwgQ3VsdHVyZT1uZXV0cmFsLCBQdWJsaWNLZXlUb2tlbj0xMjFmYWU3ODE2NWJhM2Q0FCsAAg8WDB8DBQxGb3JtcyAmIEZlZXMfBQUCMTgfBgUpL0RlcGFydG1lbnRzL0NsZXJrL0NpdHlfRm9ybXNfX19GZWVzLmFzcHgfBwUMRm9ybXMgJiBGZWVzHwgFBV9zZWxmHwloZGQUKwACDxYMHwMFCUVsZWN0aW9ucx8FBQIxOR8GZR8HBQlFbGVjdGlvbnMfCGUfCWhkEBYDZgIBAgIWAxQrAAIPFgwfAwUJU00gVm90ZXMhHwUFAjIwHwYFFWh0dHA6Ly93d3cuc212b3RlLm9yZx8HBQlTTSBWb3RlcyEfCAUGX2JsYW5rHwloZGQUKwACDxYMHwMFHkNhbXBhaWduIERpc2Nsb3N1cmUgU3RhdGVtZW50cx8FBQIyMR8GBSdodHRwOi8vc212b3RlLm9yZy9Db250ZW50LmFzcHg/aWQ9MjYzNTMfBwUeQ2FtcGFpZ24gRGlzY2xvc3VyZSBTdGF0ZW1lbnRzHwgFBl9ibGFuax8JaGRkFCsAAg8WDB8DBRdFbGVjdGVkIFJlcHJlc2VudGF0aXZlcx8FBQIyMh8GBS8vRGVwYXJ0bWVudHMvQ2xlcmsvRWxlY3RlZF9SZXByZXNlbnRhdGl2ZXMuYXNweB8HBRdFbGVjdGVkIFJlcHJlc2VudGF0aXZlcx8IBQVfc2VsZh8JaGRkDxYDZmZmFgEFdFRlbGVyaWsuV2ViLlVJLlJhZFBhbmVsSXRlbSwgVGVsZXJpay5XZWIuVUksIFZlcnNpb249MjAwOS4yLjcwMS4zNSwgQ3VsdHVyZT1uZXV0cmFsLCBQdWJsaWNLZXlUb2tlbj0xMjFmYWU3ODE2NWJhM2Q0FCsAAg8WDB8DBRVMb2JieWlzdCBSZWdpc3RyYXRpb24fBQUCMjMfBgU2L0RlcGFydG1lbnRzL0NsZXJrL0xvYmJ5aXN0L0xvYmJ5aXN0X1JlZ2lzdHJhdGlvbi5hc3B4HwcFFUxvYmJ5aXN0IFJlZ2lzdHJhdGlvbh8IBQVfc2VsZh8JaGRkFCsAAg8WDB8DBR1NdW5pY2lwYWwgQ29kZSAmIENpdHkgQ2hhcnRlch8FBQIyNB8GBSJodHRwOi8vcWNvZGUudXMvY29kZXMvc2FudGFtb25pY2EvHwcFHU11bmljaXBhbCBDb2RlICYgQ2l0eSBDaGFydGVyHwgFBl9ibGFuax8JaGRkFCsAAg8WDB8DBRFQYXNzcG9ydCBTZXJ2aWNlcx8FBQIyNR8GBTsvRGVwYXJ0bWVudHMvQ2xlcmsvUGFzc3BvcnRfU2VydmljZXMvUGFzc3BvcnRfU2VydmljZXMuYXNweB8HBRFQYXNzcG9ydCBTZXJ2aWNlcx8IBQVfc2VsZh8JaGRkFCsAAg8WDB8DBQ1QdWJsaWMgUG9ydGFsHwUFAjI2HwYFMWh0dHBzOi8vcHVibGljZG9jcy5zbWdvdi5uZXQvd2VibGluay93ZWxjb21lLmFzcHgfBwUNUHVibGljIFBvcnRhbB8IBQZfYmxhbmsfCWhkZBQrAAIPFgwfAwUVUmVjb3JkcyBSZXF1ZXN0IC0gTkVXHwUFAjI3HwYFQGh0dHBzOi8vc2FudGFtb25pY2FjYS5teWN1c3RoZWxwLmNvbS9XRUJBUFAvX3JzL3N1cHBvcnRob21lLmFzcHgfBwUVUmVjb3JkcyBSZXF1ZXN0IC0gTkVXHwgFBV9zZWxmHwloZGQUKwACDxYMHwMFGVN1cHBvcnQgU2VydmljZXMgRGl2aXNpb24fBQUCMjgfBgUxL0RlcGFydG1lbnRzL0NsZXJrL1N1cHBvcnRfU2VydmljZXNfRGl2aXNpb24uYXNweB8HBRlTdXBwb3J0IFNlcnZpY2VzIERpdmlzaW9uHwgFBV9zZWxmHwloZGQUKwACDxYMHwMFH1N0YXRlbWVudHMgb2YgRWNvbm9taWMgSW50ZXJlc3QfBQUCMjkfBgUvaHR0cDovL3NzbC5uZXRmaWxlLmNvbS9wdWIvRGVmYXVsdC5hc3B4P2FpZD1DU00fBwUfU3RhdGVtZW50cyBvZiBFY29ub21pYyBJbnRlcmVzdB8IBQVfc2VsZh8JaGRkDxYNZmZmZmZmZmZmZmZmZhYBBXRUZWxlcmlrLldlYi5VSS5SYWRQYW5lbEl0ZW0sIFRlbGVyaWsuV2ViLlVJLCBWZXJzaW9uPTIwMDkuMi43MDEuMzUsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49MTIxZmFlNzgxNjViYTNkNGQWGmYPDxYMHwMFCUhvbWUgUGFnZR8FBQEwHwYFEy9kZXBhcnRtZW50cy9jbGVyay8fBwUJSG9tZSBQYWdlHwgFBV9zZWxmHwloZGQCAQ8PFgwfAwUUQm9hcmRzICYgQ29tbWlzc2lvbnMfBQUBMR8GZR8HBRRCb2FyZHMgJiBDb21taXNzaW9ucx8IZR8JaGQWEmYPDxYMHwMFLEJvYXJkcywgQ29tbWlzc2lvbnMgJiBPdGhlciBBcHBvaW50ZWQgQm9kaWVzHwUFATIfBgUaL2RlcGFydG1lbnRzL2NsZXJrL2JvYXJkcy8fBwUsQm9hcmRzLCBDb21taXNzaW9ucyAmIE90aGVyIEFwcG9pbnRlZCBCb2RpZXMfCAUFX3NlbGYfCWhkZAIBDw8WDB8DBR8oT3RoZXIpIEVsZWN0ZWQgUmVwcmVzZW50YXRpdmVzHwUFATMfBgUvL0RlcGFydG1lbnRzL0NsZXJrL0VsZWN0ZWRfUmVwcmVzZW50YXRpdmVzLmFzcHgfBwUfKE90aGVyKSBFbGVjdGVkIFJlcHJlc2VudGF0aXZlcx8IBQVfc2VsZh8JaGRkAgIPDxYMHwMFGEFwcGxpY2F0aW9ucyAmIFZhY2FuY2llcx8FBQE0HwYFLC9kZXBhcnRtZW50cy9jbGVyay9ib2FyZHMvdmFjYW5jaWVzX29sZC5hc3B4HwcFGEFwcGxpY2F0aW9ucyAmIFZhY2FuY2llcx8IBQVfc2VsZh8JaGRkAgMPDxYMHwMFGE1vbnRobHkgTWVldGluZyBTY2hlZHVsZR8FBQE1HwYFXC9EZXBhcnRtZW50cy9DbGVyay9Cb2FyZHNfYW5kX0NvbW1pc3Npb25zL0JvYXJkc19fX0NvbW1pc3Npb25zX01vbnRobHlfTWVldGluZ19TY2hlZHVsZS5hc3B4HwcFGE1vbnRobHkgTWVldGluZyBTY2hlZHVsZR8IBQVfc2VsZh8JaGRkAgQPDxYMHwMFFlZpZGVvIE1lZXRpbmcgQXJjaGl2ZXMfBQUBNh8GBRUvQ29udGVudC5hc3B4P2lkPTQyOTIfBwUWVmlkZW8gTWVldGluZyBBcmNoaXZlcx8IBQZfYmxhbmsfCWhkZAIFDw8WDB8DBRZBdWRpbyBNZWV0aW5nIEFyY2hpdmVzHwUFATcfBgUVL2NvbnRlbnQuYXNweD9pZD00MzExHwcFFkF1ZGlvIE1lZXRpbmcgQXJjaGl2ZXMfCAUGX2JsYW5rHwloZGQCBg8PFgwfAwUfMjAxOCBCb2FyZC9Db21taXNzaW9uIFZpZGVvLU5FVx8FBQE4HwYFHGh0dHBzOi8veW91dHUuYmUvcjBqXzM1VF9FQUEfBwUfMjAxOCBCb2FyZC9Db21taXNzaW9uIFZpZGVvLU5FVx8IBQZfYmxhbmsfCWhkZAIHDw8WDB8DBR1Ib3cgdG8gUnVuIEVmZmVjdGl2ZSBNZWV0aW5ncx8FBQE5HwYFUmh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vQVNYLnBocD9wdWJsaXNoX2lkPTY1MCZzbj1zYW50YW1vbmljYS5ncmFuaWN1cy5jb20fBwUdSG93IHRvIFJ1biBFZmZlY3RpdmUgTWVldGluZ3MfCAUGX2JsYW5rHwloZGQCCA8PFgwfAwUERkFRcx8FBQIxMB8GBVovRGVwYXJ0bWVudHMvQ2xlcmsvQm9hcmRzX2FuZF9Db21taXNzaW9ucy9GQVFzX0Fib3V0X1NlcnZpbmdfb25fYV9Cb2FyZF9vcl9Db21taXNzaW9uLmFzcHgfBwUERkFRcx8IBQVfc2VsZh8JaGRkAgIPDxYMHwMFD0Nob29zZSBDaXZpbGl0eR8FBQIxMR8GBRAvY2hvb3NlY2l2aWxpdHkvHwcFD0Nob29zZSBDaXZpbGl0eR8IBQVfc2VsZh8JaGRkAgMPDxYMHwMFGENpdHkgQ291bmNpbCBJbmZvcm1hdGlvbh8FBQIxMh8GZR8HBRhDaXR5IENvdW5jaWwgSW5mb3JtYXRpb24fCGUfCWhkFgpmDw8WDB8DBRVDaXR5IENvdW5jaWwgSG9tZXBhZ2UfBQUCMTMfBgUpaHR0cDovL3d3dy5zbWdvdi5uZXQvZGVwYXJ0bWVudHMvY291bmNpbC8fBwUVQ2l0eSBDb3VuY2lsIEhvbWVwYWdlHwgFBV9zZWxmHwloZGQCAQ8PFgwfAwUwQ291bmNpbCBBZ2VuZGFzLCBTdGFmZiBSZXBvcnRzLCBNaW51dGVzICYgVmlkZW9zHwUFAjE0HwYFM2h0dHA6Ly93d3cuc21nb3YubmV0L2RlcGFydG1lbnRzL2NsZXJrL2FnZW5kYXMuYXNweB8HBTBDb3VuY2lsIEFnZW5kYXMsIFN0YWZmIFJlcG9ydHMsIE1pbnV0ZXMgJiBWaWRlb3MfCAUFX3NlbGYfCWhkZAICDw8WDB8DBSVQdWJsaWMgUG9ydGFsIChSZWNvcmRzIHByaW9yIHRvIDE5ODgpHwUFAjE1HwYFMWh0dHBzOi8vcHVibGljZG9jcy5zbWdvdi5uZXQvd2VibGluay93ZWxjb21lLmFzcHgfBwUlUHVibGljIFBvcnRhbCAoUmVjb3JkcyBwcmlvciB0byAxOTg4KR8IBQVfc2VsZh8JaGRkAgMPDxYMHwMFL1RheHBheWVyIFByb3RlY3Rpb24vT2FrcyBJbml0aWF0aXZlIExvZyAoRXhjZWwpHwUFAjE2HwYFTC91cGxvYWRlZEZpbGVzL0RlcGFydG1lbnRzL0NsZXJrL1RheHBheWVyIFByb3RlY3Rpb24gTG9nIChMYXN0IDYgeWVhcnMpLnhsc3gfBwUvVGF4cGF5ZXIgUHJvdGVjdGlvbi9PYWtzIEluaXRpYXRpdmUgTG9nIChFeGNlbCkfCAUFX3NlbGYfCWhkZAIEDw8WDB8DBR8oT3RoZXIpIEVsZWN0ZWQgUmVwcmVzZW50YXRpdmVzHwUFAjE3HwYFLy9EZXBhcnRtZW50cy9DbGVyay9FbGVjdGVkX1JlcHJlc2VudGF0aXZlcy5hc3B4HwcFHyhPdGhlcikgRWxlY3RlZCBSZXByZXNlbnRhdGl2ZXMfCAUFX3NlbGYfCWhkZAIEDw8WDB8DBQxGb3JtcyAmIEZlZXMfBQUCMTgfBgUpL0RlcGFydG1lbnRzL0NsZXJrL0NpdHlfRm9ybXNfX19GZWVzLmFzcHgfBwUMRm9ybXMgJiBGZWVzHwgFBV9zZWxmHwloZGQCBQ8PFgwfAwUJRWxlY3Rpb25zHwUFAjE5HwZlHwcFCUVsZWN0aW9ucx8IZR8JaGQWBmYPDxYMHwMFCVNNIFZvdGVzIR8FBQIyMB8GBRVodHRwOi8vd3d3LnNtdm90ZS5vcmcfBwUJU00gVm90ZXMhHwgFBl9ibGFuax8JaGRkAgEPDxYMHwMFHkNhbXBhaWduIERpc2Nsb3N1cmUgU3RhdGVtZW50cx8FBQIyMR8GBSdodHRwOi8vc212b3RlLm9yZy9Db250ZW50LmFzcHg/aWQ9MjYzNTMfBwUeQ2FtcGFpZ24gRGlzY2xvc3VyZSBTdGF0ZW1lbnRzHwgFBl9ibGFuax8JaGRkAgIPDxYMHwMFF0VsZWN0ZWQgUmVwcmVzZW50YXRpdmVzHwUFAjIyHwYFLy9EZXBhcnRtZW50cy9DbGVyay9FbGVjdGVkX1JlcHJlc2VudGF0aXZlcy5hc3B4HwcFF0VsZWN0ZWQgUmVwcmVzZW50YXRpdmVzHwgFBV9zZWxmHwloZGQCBg8PFgwfAwUVTG9iYnlpc3QgUmVnaXN0cmF0aW9uHwUFAjIzHwYFNi9EZXBhcnRtZW50cy9DbGVyay9Mb2JieWlzdC9Mb2JieWlzdF9SZWdpc3RyYXRpb24uYXNweB8HBRVMb2JieWlzdCBSZWdpc3RyYXRpb24fCAUFX3NlbGYfCWhkZAIHDw8WDB8DBR1NdW5pY2lwYWwgQ29kZSAmIENpdHkgQ2hhcnRlch8FBQIyNB8GBSJodHRwOi8vcWNvZGUudXMvY29kZXMvc2FudGFtb25pY2EvHwcFHU11bmljaXBhbCBDb2RlICYgQ2l0eSBDaGFydGVyHwgFBl9ibGFuax8JaGRkAggPDxYMHwMFEVBhc3Nwb3J0IFNlcnZpY2VzHwUFAjI1HwYFOy9EZXBhcnRtZW50cy9DbGVyay9QYXNzcG9ydF9TZXJ2aWNlcy9QYXNzcG9ydF9TZXJ2aWNlcy5hc3B4HwcFEVBhc3Nwb3J0IFNlcnZpY2VzHwgFBV9zZWxmHwloZGQCCQ8PFgwfAwUNUHVibGljIFBvcnRhbB8FBQIyNh8GBTFodHRwczovL3B1YmxpY2RvY3Muc21nb3YubmV0L3dlYmxpbmsvd2VsY29tZS5hc3B4HwcFDVB1YmxpYyBQb3J0YWwfCAUGX2JsYW5rHwloZGQCCg8PFgwfAwUVUmVjb3JkcyBSZXF1ZXN0IC0gTkVXHwUFAjI3HwYFQGh0dHBzOi8vc2FudGFtb25pY2FjYS5teWN1c3RoZWxwLmNvbS9XRUJBUFAvX3JzL3N1cHBvcnRob21lLmFzcHgfBwUVUmVjb3JkcyBSZXF1ZXN0IC0gTkVXHwgFBV9zZWxmHwloZGQCCw8PFgwfAwUZU3VwcG9ydCBTZXJ2aWNlcyBEaXZpc2lvbh8FBQIyOB8GBTEvRGVwYXJ0bWVudHMvQ2xlcmsvU3VwcG9ydF9TZXJ2aWNlc19EaXZpc2lvbi5hc3B4HwcFGVN1cHBvcnQgU2VydmljZXMgRGl2aXNpb24fCAUFX3NlbGYfCWhkZAIMDw8WDB8DBR9TdGF0ZW1lbnRzIG9mIEVjb25vbWljIEludGVyZXN0HwUFAjI5HwYFL2h0dHA6Ly9zc2wubmV0ZmlsZS5jb20vcHViL0RlZmF1bHQuYXNweD9haWQ9Q1NNHwcFH1N0YXRlbWVudHMgb2YgRWNvbm9taWMgSW50ZXJlc3QfCAUFX3NlbGYfCWhkZAIFD2QWAmYPDxYCHgdWaXNpYmxlaGRkAgEPZBYCAgEPZBYCAgEPZBYCAgMPZBYGAgEPFgIfAwU0Q291bmNpbCBBZ2VuZGFzLCBTdGFmZiBSZXBvcnRzLCBNaW51dGVzICZhbXA7IFZpZGVvc2QCBQ8QDxYCHgtfIURhdGFCb3VuZGdkEBUfBDIwMTgEMjAxNwQyMDE2BDIwMTUEMjAxNAQyMDEzBDIwMTIEMjAxMQQyMDEwBDIwMDkEMjAwOAQyMDA3BDIwMDYEMjAwNQQyMDA0BDIwMDMEMjAwMgQyMDAxBDIwMDAEMTk5OQQxOTk4BDE5OTcEMTk5NgQxOTk1BDE5OTQEMTk5MwQxOTkyBDE5OTEEMTk5MAQxOTg5BDE5ODgVHwQyMDE4BDIwMTcEMjAxNgQyMDE1BDIwMTQEMjAxMwQyMDEyBDIwMTEEMjAxMAQyMDA5BDIwMDgEMjAwNwQyMDA2BDIwMDUEMjAwNAQyMDAzBDIwMDIEMjAwMQQyMDAwBDE5OTkEMTk5OAQxOTk3BDE5OTYEMTk5NQQxOTk0BDE5OTMEMTk5MgQxOTkxBDE5OTAEMTk4OQQxOTg4FCsDH2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2cWAQIBZAIJDxYCHgtfIUl0ZW1Db3VudAIlFkpmD2QWAmYPFQUTRGVjZW1iZXIgMjYgNTozMCBQTXY8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMTA4IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBDYW5jZWxsZWQ8L2E+AABqPGEgaHJlZj0iLy9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vVmlld1B1Ymxpc2hlci5waHA/dmlld19pZD0yIiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbyBBcmNoaXZlPC9hPmQCAQ9kFgJmDxUFE0RlY2VtYmVyIDEyIDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEyOSIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTEyNiIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTQwNDQiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCAg9kFgJmDxUFE0RlY2VtYmVyIDA1IDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEwNyIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTEyMiIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTQwNDAiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCAw9kFgJmDxUFE05vdmVtYmVyIDI4IDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEwNiIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTEyNSIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTQwMzUiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCBA9kFgJmDxUFE05vdmVtYmVyIDE0IDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEwNSIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTExMiIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTQwMzIiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCBQ9kFgJmDxUFEk9jdG9iZXIgMjQgNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMTA0IiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMTExIiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9NDAyMyIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIGD2QWAmYPFQUST2N0b2JlciAxMCA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTExMDMiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTExMTAiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD00MDA3IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAgcPZBYCZg8VBRJPY3RvYmVyIDEwIDQ6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEyMCIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTEwOSIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTQwMDciIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCCA9kFgJmDxUFFFNlcHRlbWJlciAyNiA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTExMDIiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTExMDgiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD00MDA0IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAgkPZBYCZg8VBRRTZXB0ZW1iZXIgMTIgNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMTAxIiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMTA3IiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9Mzk5NyIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIKD2QWAmYPFQURQXVndXN0IDIyIDU6MzAgUE12PGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEzMSIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgQ2FuY2VsbGVkPC9hPgAAajxhIGhyZWY9Ii8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL1ZpZXdQdWJsaXNoZXIucGhwP3ZpZXdfaWQ9MiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW8gQXJjaGl2ZTwvYT5kAgsPZBYCZg8VBRFBdWd1c3QgMDggNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDk5IiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMTA2IiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9Mzk4MyIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIMD2QWAmYPFQUPSnVseSAyNSA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTEwOTgiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTExMDUiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zOTc5IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAg0PZBYCZg8VBQ9KdWx5IDI1IDQ6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEzMCIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTEwNCIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTM5NzkiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCDg9kFgJmDxUFD0p1bHkgMTEgNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDk3IiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMTAzIiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9Mzk3MyIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIPD2QWAmYPFQUPSnVseSAxMCA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTExMjYiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTExMDIiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zOTcyIiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAhAPZBYCZg8VBQ9KdW5lIDI3IDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTA5NiIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTEwMCIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTM5NjgiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCEQ9kFgJmDxUFD0p1bmUgMjcgNDozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMTI1IiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMDk5IiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9Mzk2OCIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAISD2QWAmYPFQUPSnVuZSAxMyA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTEwOTUiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwOTgiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zOTYwIiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAhMPZBYCZg8VBQ9KdW5lIDA1IDY6MDAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEyOCIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTA5NyIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+ajxhIGhyZWY9Ii8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL1ZpZXdQdWJsaXNoZXIucGhwP3ZpZXdfaWQ9MiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW8gQXJjaGl2ZTwvYT5kAhQPZBYCZg8VBQ5NYXkgMjQgNDowMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMTE3IiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMTAxIiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9Mzk0OSIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIVD2QWAmYPFQUOTWF5IDIzIDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTA5NCIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTA5NiIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTM5NDciIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCFg9kFgJmDxUFDk1heSAyMyA0OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTExMjQiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwOTUiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zOTQ3IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAhcPZBYCZg8VBQ5NYXkgMDkgNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDkzIiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMDk0IiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9MzkzNSIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIYD2QWAmYPFQUOTWF5IDA5IDM6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTEyMyIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTA5MyIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTM5MzUiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCGQ9kFgJmDxUFEUFwcmlsIDI5IDEwOjAwIEFNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTExMjIiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwOTEiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPmo8YSBocmVmPSIvL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9WaWV3UHVibGlzaGVyLnBocD92aWV3X2lkPTIiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvIEFyY2hpdmU8L2E+ZAIaD2QWAmYPFQUQQXByaWwgMjUgNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDkyIiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMDkyIiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9MzkyNyIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIbD2QWAmYPFQUQQXByaWwgMTggNDozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDkxIiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMDkwIiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9MzkyMiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIcD2QWAmYPFQUQTWFyY2ggMjggNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDkwIiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMDg5IiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9MzkxNiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIdD2QWAmYPFQURTWFyY2ggMjUgMTA6MDAgQU1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTExOCIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTA4NyIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+ajxhIGhyZWY9Ii8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL1ZpZXdQdWJsaXNoZXIucGhwP3ZpZXdfaWQ9MiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW8gQXJjaGl2ZTwvYT5kAh4PZBYCZg8VBRBNYXJjaCAwNyA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTEwODkiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwODMiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zOTAwIiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAh8PZBYCZg8VBRNGZWJydWFyeSAyOCA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTEwODgiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwODYiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zODk2IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAiAPZBYCZg8VBRNGZWJydWFyeSAxNCA1OjMwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTEwODciIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwODUiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zODg3IiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAiEPZBYCZg8VBRJKYW51YXJ5IDI4IDk6MDAgQU1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTExNiIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTA4NCIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+cjxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYS5ncmFuaWN1cy5jb20vTWVkaWFQbGF5ZXIucGhwP3ZpZXdfaWQ9MiZjbGlwX2lkPTM4OTAiIHRhcmdldD0iX2JsYW5rIj5NZWV0aW5nIFZpZGVvPC9hPmQCIg9kFgJmDxUFEkphbnVhcnkgMjQgNTozMCBQTWs8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2FjaXR5Y2EuaXFtMi5jb20vQ2l0aXplbnMvRGV0YWlsX01lZXRpbmcuYXNweD9JRD0xMDg2IiB0YXJnZXQ9Il9ibGFuayI+QWdlbmRhPC9hPgBuPGEgaHJlZj0iaHR0cDovL1NhbnRhTW9uaWNhQ2l0eUNBLklRTTIuY29tL0NpdGl6ZW5zL0ZpbGVPcGVuLmFzcHg/VHlwZT0xNSZJRD0xMDgyIiB0YXJnZXQ9Il9ibGFuayI+TWludXRlczwvYT5yPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhLmdyYW5pY3VzLmNvbS9NZWRpYVBsYXllci5waHA/dmlld19pZD0yJmNsaXBfaWQ9Mzg3MiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW88L2E+ZAIjD2QWAmYPFQUSSmFudWFyeSAxOCA2OjAwIFBNazxhIGhyZWY9Imh0dHA6Ly9zYW50YW1vbmljYWNpdHljYS5pcW0yLmNvbS9DaXRpemVucy9EZXRhaWxfTWVldGluZy5hc3B4P0lEPTExMTUiIHRhcmdldD0iX2JsYW5rIj5BZ2VuZGE8L2E+AG48YSBocmVmPSJodHRwOi8vU2FudGFNb25pY2FDaXR5Q0EuSVFNMi5jb20vQ2l0aXplbnMvRmlsZU9wZW4uYXNweD9UeXBlPTE1JklEPTEwODAiIHRhcmdldD0iX2JsYW5rIj5NaW51dGVzPC9hPnI8YSBocmVmPSJodHRwOi8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL01lZGlhUGxheWVyLnBocD92aWV3X2lkPTImY2xpcF9pZD0zODYwIiB0YXJnZXQ9Il9ibGFuayI+TWVldGluZyBWaWRlbzwvYT5kAiQPZBYCZg8VBRJKYW51YXJ5IDEwIDU6MzAgUE1rPGEgaHJlZj0iaHR0cDovL3NhbnRhbW9uaWNhY2l0eWNhLmlxbTIuY29tL0NpdGl6ZW5zL0RldGFpbF9NZWV0aW5nLmFzcHg/SUQ9MTA4NSIgdGFyZ2V0PSJfYmxhbmsiPkFnZW5kYTwvYT4AbjxhIGhyZWY9Imh0dHA6Ly9TYW50YU1vbmljYUNpdHlDQS5JUU0yLmNvbS9DaXRpemVucy9GaWxlT3Blbi5hc3B4P1R5cGU9MTUmSUQ9MTA3OSIgdGFyZ2V0PSJfYmxhbmsiPk1pbnV0ZXM8L2E+ajxhIGhyZWY9Ii8vc2FudGFtb25pY2EuZ3JhbmljdXMuY29tL1ZpZXdQdWJsaXNoZXIucGhwP3ZpZXdfaWQ9MiIgdGFyZ2V0PSJfYmxhbmsiPk1lZXRpbmcgVmlkZW8gQXJjaGl2ZTwvYT5kAgIPFgIfCmhkAhkPZBYCZg8WAh8DBSpDaXR5IG9mIFNhbnRhIE1vbmljYSZuYnNwOyYjMTY5OyZuYnNwOzIwMThkGAEFHl9fQ29udHJvbHNSZXF1aXJlUG9zdEJhY2tLZXlfXxYBBSpjdGwwMCRjdGwwMCR2ZXJ0aWNhbExlZnRCYXIkbWFpbk1lbnUkY3RsMDD/3eyL8mby2LrlW5q7Dqa6ObqU0mYIhZxR8jpwRPmAyA=='
payload = {
"__EVENTTARGET": 'ctl00$ctl00$bodyContent$mainContent$ddlYears',
"__EVENTARGUMENT": '',
"__LASTFOCUS": '',
"__VIEWSTATE": state_encoded,
"__VIEWSTATEGENERATOR": '0072609D',
"__EVENTVALIDATION": '/wEdACNIEPIK6907Rytq6F/nNnflmZnDb6ISpcvG865ChG769Wk0LM5+UfY5xCA+9tU1+LliBw+AAtqRG2rCRXqUtZgKClIJ7GykSJOig7+2A4wJ4BYv2+eqZuQ161WUyH5guiMzQhMk5XG/xqiKn0dQCEa9LfCqzIhjzRrKCJTz8NGty7MvJPj9B6ISBoCTj1sLAz7xScII9CxqFo1XpwziCfgSgkJgpF+YLSxo/pMulnHWDj4RUESCJFnNqcAiyRQViaRctbdfjV6+gj8+2/YXgPzAx2JtNO6Uqk0Qr3TBkBwcAhYqJD08yxewJDVETiDFrG7h4+3OWoa5qDLBnfdQlHCT7k0T7DYG785uDg62aUK6kwIYtcWRQ8GD47g88jOjHwpSQ1goPdoZz7kzQr/ySzmCXCGKccSVWRPEcLgr7Gqw17UXOgl7MhFM7DNCmRoUhkj7xo1gT5dReFaEiq7d64b7q6qdAOJCBUyxDA9SpPIQTuH6ZgrzGq0UXzO/VWHC6UDFu/miQk80B0TJ8DKeezeJwhIIX6qlqX8VTfquPEBWgIZ9nBH3HdEjJEzaG4tvsRosazoHmT4R2JFAOe4Hjfvxy72Gbhf3k6OHGh/YESp1Zz9ck/KQKCtW3sCKGlae3zUzGKqcZ6CQJJlZGT/bU/20RL/5aDaGW7+L6sbvEAtxWAx9/L8xvPxAHLl4Xf7CO+OTSPTUoNo4JISrPCiV6DL6betJ+dWiDR9pVIILRdinPMTvtzMmFXjTX/dcd8/GF5ML2n4osnpg1BwY9ex7lS9v',
"ctl00_ctl00_verticalLeftBar_mainMenu_ctl00_ClientState": "",
"ctl00$ctl00$bodyContent$mainContent$ddlYears": year,
"ctl00$ctl00$bodyContent$mainContent$CouncilSearch$txtSearch": ""
}
r = sess.post(city_council_agendas_url, data=payload)
soup = BeautifulSoup(r.text, 'html.parser')
agendas = dict()
table = soup.find('table', {'class': 'agendaTable'})
rows = table.findAll('tr')
for row in rows:
cells = row.findChildren('td')
try:
date = agenda_date_to_epoch(cells[0], year)
except:
date = None
if date and cells[1].string == "Agenda":
agenda = sess.get(cells[1].findChildren(
'a', {'href': True})[0]['href']).text
if "CONSENT CALENDAR" in agenda:
agendas[date] = scrape_agenda(agenda, sess)
return agendas
| 207.145946
| 30,314
| 0.925056
| 1,005
| 38,322
| 35.089552
| 0.344279
| 0.007089
| 0.002297
| 0.002552
| 0.009613
| 0.005331
| 0.001531
| 0
| 0
| 0
| 0
| 0.115782
| 0.048458
| 38,322
| 184
| 30,315
| 208.271739
| 0.851309
| 0.00655
| 0
| 0.024242
| 0
| 0.012121
| 0.84504
| 0.823931
| 0
| 1
| 0
| 0
| 0
| 1
| 0.048485
| false
| 0
| 0.036364
| 0
| 0.145455
| 0.006061
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
758a62af93144a89fab6a08f01d91eb274a35585
| 181
|
py
|
Python
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/accounts/models/users.py
|
Casanova-Development/cookiecutter-backend-django
|
ba669853b37826c699aba50f0ea3b478c21164a3
|
[
"BSD-3-Clause"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/accounts/models/users.py
|
Casanova-Development/cookiecutter-backend-django
|
ba669853b37826c699aba50f0ea3b478c21164a3
|
[
"BSD-3-Clause"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/accounts/models/users.py
|
Casanova-Development/cookiecutter-backend-django
|
ba669853b37826c699aba50f0ea3b478c21164a3
|
[
"BSD-3-Clause"
] | null | null | null |
"""Users models for application Accounts."""
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
"""Define a custom default user model."""
pass
| 20.111111
| 51
| 0.729282
| 22
| 181
| 6
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160221
| 181
| 8
| 52
| 22.625
| 0.868421
| 0.40884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
759359d122edb92374eac02a353263c7e3152563
| 31
|
py
|
Python
|
halotools/empirical_models/composite_models/sfr_models/__init__.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 83
|
2015-01-15T14:54:16.000Z
|
2021-12-09T11:28:02.000Z
|
halotools/empirical_models/composite_models/sfr_models/__init__.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 579
|
2015-01-14T15:57:37.000Z
|
2022-01-13T18:58:44.000Z
|
halotools/empirical_models/composite_models/sfr_models/__init__.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 70
|
2015-01-14T15:15:58.000Z
|
2021-12-22T18:18:31.000Z
|
from .smhm_binary_sfr import *
| 15.5
| 30
| 0.806452
| 5
| 31
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
75dbef1aba6b4cc33bb33e4df08bc12d7838195c
| 189
|
py
|
Python
|
play-1.2.4/python/Lib/site-packages/readline.py
|
AppSecAI-TEST/restcommander
|
a2523f31356938f5c7fc6d379b7678da0b1e077a
|
[
"Apache-2.0"
] | 550
|
2015-01-05T16:59:00.000Z
|
2022-03-20T16:55:25.000Z
|
framework/python/Lib/site-packages/readline.py
|
lafayette/JBTT
|
94bde9d90abbb274d29ecd82e632d43a4320876e
|
[
"MIT"
] | 15
|
2015-02-05T06:00:47.000Z
|
2018-07-07T14:34:04.000Z
|
framework/python/Lib/site-packages/readline.py
|
lafayette/JBTT
|
94bde9d90abbb274d29ecd82e632d43a4320876e
|
[
"MIT"
] | 119
|
2015-01-08T00:48:24.000Z
|
2022-01-27T14:13:15.000Z
|
# -*- coding: UTF-8 -*-
#this file is needed in site-packages to emulate readline
#necessary for rlcompleter since it relies on the existance
#of a readline module
from pyreadline import *
| 31.5
| 59
| 0.761905
| 29
| 189
| 4.965517
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.164021
| 189
| 5
| 60
| 37.8
| 0.905063
| 0.820106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2f041e9d8dee51da93474cdd949930d98bd58053
| 27,816
|
py
|
Python
|
v1/backend/src/rankings/urls.py
|
avgupta456/statbotics
|
8847cec161104ec54f4c501653cd4ec558d30379
|
[
"MIT"
] | 14
|
2020-05-28T21:54:45.000Z
|
2022-03-17T19:39:23.000Z
|
v1/backend/src/rankings/urls.py
|
avgupta456/statbotics
|
8847cec161104ec54f4c501653cd4ec558d30379
|
[
"MIT"
] | 59
|
2020-05-28T21:39:45.000Z
|
2022-03-25T23:51:39.000Z
|
backend/src/rankings/urls.py
|
statbotics/statbotics
|
37bb0e3730d5b3aff7b6a5ba6e78ef2eada950bc
|
[
"MIT"
] | 1
|
2020-07-04T07:30:40.000Z
|
2020-07-04T07:30:40.000Z
|
# type: ignore
from django.conf.urls import url
from django.urls import include, path
from drf_yasg import openapi
from drf_yasg.views import SwaggerUIRenderer, get_schema_view
from rest_framework import permissions, routers
from src.rankings.views import (
event_pred_views,
event_views,
match_views,
model_views,
team_event_views,
team_match_views,
team_views,
team_year_views,
year_views,
)
SwaggerUIRenderer.template = "drf-yasg.html" # monkey-patching is bad :(
schema_view = get_schema_view(
openapi.Info(
title="Statbotics.io API",
default_version="v1",
),
public=True,
permission_classes=(permissions.AllowAny,),
)
router = routers.DefaultRouter()
router.register(r"_years", model_views.YearView, "year")
router.register(r"_teams", model_views.TeamView, "team")
router.register(r"_team_years", model_views.TeamYearView, "team_year")
router.register(r"_events", model_views.EventView, "event")
router.register(r"_team_events", model_views.TeamEventView, "team_event")
router.register(r"_matches", model_views.MatchView, "match")
router.register(r"_team_matches", model_views.TeamMatchView, "team_match")
# commented out url patterns still need models
urlpatterns = [
url(
r"^swagger/$",
schema_view.with_ui("swagger", cache_timeout=0),
name="schema-swagger-ui",
),
]
"""TEAMS"""
urlpatterns.extend(
[
path("v1/team/<num>", team_views.Team),
path("v1/teams", team_views.Teams),
path("v1/teams/", team_views.Teams),
path("v1/teams/by/<metric>", team_views.TeamsByMetric),
path("v1/teams/active", team_views.TeamsActive),
path("v1/teams/active/by/<metric>", team_views.TeamsActiveByMetric),
path("v1/teams/country/<country>", team_views._Teams),
path("v1/teams/country/<country>/by/<metric>", team_views._Teams),
path("v1/teams/country/<country>/active", team_views._TeamsActive),
path("v1/teams/country/<country>/active/by/<metric>", team_views._TeamsActive),
path("v1/teams/country/<country>/state/<state>", team_views._Teams),
path("v1/teams/country/<country>/state/<state>/by/<metric>", team_views._Teams),
path(
"v1/teams/country/<country>/state/<state>/active", team_views._TeamsActive
),
path(
"v1/teams/country/<country>/state/<state>/active/by/<metric>",
team_views._TeamsActive,
),
path("v1/teams/district/<district>", team_views._Teams),
path("v1/teams/district/<district>/by/<metric>", team_views._Teams),
path("v1/teams/district/<district>/active", team_views._TeamsActive),
path(
"v1/teams/district/<district>/active/by/<metric>", team_views._TeamsActive
),
]
)
"""TEAM EVENTS"""
urlpatterns.extend(
[
path("v1/team_year/team/<num>/year/<year>", team_year_views.TeamYear),
path("v1/team_years/team/<num>", team_year_views.TeamYearsNum),
path(
"v1/team_years/team/<num>/by/<metric>",
team_year_views.TeamYearsNumByMetric,
),
path("v1/team_years/year/<year>", team_year_views.TeamYearsYear),
path(
"v1/team_years/year/<year>/by/<metric>",
team_year_views.TeamYearsYearByMetric,
),
path("v1/team_years/year/<year>/country/<country>", team_year_views._TeamYears),
path(
"v1/team_years/year/<year>/country/<country>/by/<metric>",
team_year_views._TeamYears,
),
path(
"v1/team_years/year/<year>/country/<country>/state/<state>",
team_year_views._TeamYears,
),
path(
"v1/team_years/year/<year>/country/<country>/state/<state>/by/<metric>",
team_year_views._TeamYears,
),
path(
"v1/team_years/year/<year>/district/<district>", team_year_views._TeamYears
),
path(
"v1/team_years/year/<year>/district/<district>/by/<metric>",
team_year_views._TeamYears,
),
path("v1/team_years/", team_year_views.TeamYears),
path("v1/team_years/page/<page>", team_year_views._TeamYears),
path("v1/team_years/by/<metric>", team_year_views.TeamYearsByMetric),
path("v1/team_years/by/<metric>/page/<page>", team_year_views._TeamYears),
path("v1/team_years/country/<country>", team_year_views._TeamYears),
path("v1/team_years/country/<country>/page/<page>", team_year_views._TeamYears),
path("v1/team_years/country/<country>/by/<metric>", team_year_views._TeamYears),
path(
"v1/team_years/country/<country>/by/<metric>/page/<page>",
team_year_views._TeamYears,
),
path(
"v1/team_years/country/<country>/state/<state>", team_year_views._TeamYears
),
path(
"v1/team_years/country/<country>/state/<state>/by/<metric>",
team_year_views._TeamYears,
),
path("v1/team_years/district/<district>", team_year_views._TeamYears),
path(
"v1/team_years/district/<district>/by/<metric>", team_year_views._TeamYears
),
]
)
"""TEAM EVENTS"""
urlpatterns.extend(
[
path("v1/team_event/team/<num>/event/<event>", team_event_views.TeamEvent),
path("v1/team_events/team/<num>", team_event_views.TeamEventsNum),
path(
"v1/team_events/team/<num>/by/<metric>",
team_event_views.TeamEventsNumByMetric,
),
path("v1/team_events/team/<num>/type/<type>", team_event_views._TeamEvents),
path(
"v1/team_events/team/<num>/type/<type>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/team/<num>/week/<week>", team_event_views._TeamEvents),
path(
"v1/team_events/team/<num>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/type/<type>/week/<week>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/type/<type>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/year/<year>", team_event_views.TeamEventsYear),
path("v1/team_events/year/<year>/page/<page>", team_event_views._TeamEvents),
path(
"v1/team_events/year/<year>/by/<metric>",
team_event_views.TeamEventsYearByMetric,
),
path(
"v1/team_events/year/<year>/by/<metric>/page/<page>",
team_event_views._TeamEvents,
),
path("v1/team_events/year/<year>/type/<type>", team_event_views._TeamEvents),
path(
"v1/team_events/year/<year>/type/<type>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/year/<year>/week/<week>", team_event_views._TeamEvents),
path(
"v1/team_events/year/<year>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/country/<country>/page/<page>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/country/<country>/by/<metric>/page/<page>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/year/<year>/type/<type>", team_event_views._TeamEvents),
path(
"v1/team_events/year/<year>/type/<type>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/year/<year>/week/<week>", team_event_views._TeamEvents),
path(
"v1/team_events/year/<year>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/week/<week>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/week/<week>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/week/<week>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/week/<week>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/week/<week>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/week/<week>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/year/<year>/type/<type>/week/<week>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/year/<year>", team_event_views.TeamEventsNumYear
),
path(
"v1/team_events/team/<num>/year/<year>/by/<metric>",
team_event_views.TeamEventsNumYearByMetric,
),
path(
"v1/team_events/team/<num>/year/<year>/type/<type>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/year/<year>/type/<type>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/year/<year>/week/<week>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/year/<year>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/year/<year>/type/<type>/week/<week>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/team/<num>/year/<year>/type/<type>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/event/<event>", team_event_views.TeamEventsEvent),
path(
"v1/team_events/event/<event>/by/<metric>",
team_event_views.TeamEventsEventByMetric,
),
path("v1/team_events", team_event_views.TeamEvents),
path("v1/team_events/page/<page>", team_event_views._TeamEvents),
path("v1/team_events/country/<country>", team_event_views._TeamEvents),
path(
"v1/team_events/country/<country>/page/<page>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/district/<district>", team_event_views._TeamEvents),
path(
"v1/team_events/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/type/<type>", team_event_views._TeamEvents),
path("v1/team_events/type/<type>/by/<metric>", team_event_views._TeamEvents),
path(
"v1/team_events/type/<type>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/week/<week>", team_event_views._TeamEvents),
path("v1/team_events/week/<week>/by/<metric>", team_event_views._TeamEvents),
path(
"v1/team_events/week/<week>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/week/<week>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/week/<week>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/week/<week>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/week/<week>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/week/<week>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
path("v1/team_events/type/<type>/week/<week>", team_event_views._TeamEvents),
path(
"v1/team_events/type/<type>/week/<week>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/week/<week>/country/<country>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/week/<week>/country/<country>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/week/<week>/country/<country>/state/<state>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/week/<week>/country/<country>/state/<state>/by/<metric>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/week/<week>/district/<district>",
team_event_views._TeamEvents,
),
path(
"v1/team_events/type/<type>/week/<week>/district/<district>/by/<metric>",
team_event_views._TeamEvents,
),
]
)
"""TEAM MATCHES"""
urlpatterns.extend(
[
path("v1/team_match/team/<num>/match/<match>", team_match_views.TeamMatch),
path("v1/team_matches", team_match_views.TeamMatches),
path("v1/team_matches/elims", team_match_views._TeamMatchesElim),
path("v1/team_matches/page/<page>", team_match_views._TeamMatches),
path("v1/team_matches/page/<page>/elims", team_match_views._TeamMatchesElim),
path("v1/team_matches/team/<num>", team_match_views.TeamMatchesTeam),
path("v1/team_matches/team/<num>/elims", team_match_views._TeamMatchesElim),
path("v1/team_matches/year/<year>", team_match_views.TeamMatchesYear),
path("v1/team_matches/year/<year>/elims", team_match_views._TeamMatchesElim),
path("v1/team_matches/year/<year>/page/<page>", team_match_views._TeamMatches),
path(
"v1/team_matches/year/<year>/page/<page>",
team_match_views._TeamMatchesElim,
),
path("v1/team_matches/event/<event>", team_match_views.TeamMatchesEvent),
path("v1/team_matches/event/<event>/elims", team_match_views._TeamMatchesElim),
path("v1/team_matches/match/<match>", team_match_views.TeamMatchesMatch),
path(
"v1/team_matches/team/<num>/year/<year>",
team_match_views.TeamMatchesTeamYear,
),
path(
"v1/team_matches/team/<num>/year/<year>/elims",
team_match_views._TeamMatchesElim,
),
path(
"v1/team_matches/team/<num>/event/<event>",
team_match_views.TeamMatchesTeamEvent,
),
path(
"v1/team_matches/team/<num>/event/<event>/elims",
team_match_views._TeamMatchesElim,
),
]
)
"""YEARS"""
urlpatterns.extend(
[
path("v1/year/<year>", year_views.Year),
path("v1/years", year_views.Years),
path("v1/years/by/<metric>", year_views.YearsByMetric),
]
)
"""EVENTS"""
urlpatterns.extend(
[
path("v1/event/<event>", event_views.Event),
path("v1/events", event_views.Events),
path("v1/events/by/<metric>", event_views.EventsByMetric),
path("v1/events/country/<country>", event_views._Events),
path("v1/events/country/<country>/by/<metric>", event_views._Events),
path("v1/events/country/<country>/state/<state>", event_views._Events),
path(
"v1/events/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path("v1/events/district/<district>", event_views._Events),
path("v1/events/district/<district>/by/<metric>", event_views._Events),
path("v1/events/type/<type>", event_views._Events),
path("v1/events/type/<type>/by/<metric>", event_views._Events),
path("v1/events/type/<type>/country/<country>", event_views._Events),
path(
"v1/events/type/<type>/country/<country>/by/<metric>", event_views._Events
),
path(
"v1/events/type/<type>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/type/<type>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path("v1/events/type/<type>/district/<district>", event_views._Events),
path(
"v1/events/type/<type>/district/<district>/by/<metric>",
event_views._Events,
),
path("v1/events/week/<week>", event_views._Events),
path("v1/events/week/<week>/by/<metric>", event_views._Events),
path("v1/events/week/<week>/country/<country>", event_views._Events),
path(
"v1/events/week/<week>/country/<country>/by/<metric>", event_views._Events
),
path(
"v1/events/week/<week>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/week/<week>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path("v1/events/week/<week>/district/<district>", event_views._Events),
path(
"v1/events/week/<week>/district/<district>/by/<metric>",
event_views._Events,
),
path("v1/events/type/<type>/week/<week>", event_views._Events),
path("v1/events/type/<type>/week/<week>/by/<metric>", event_views._Events),
path(
"v1/events/type/<type>/week/<week>/country/<country>", event_views._Events
),
path(
"v1/events/type/<type>/week/<week>/country/<country>/by/<metric>",
event_views._Events,
),
path(
"v1/events/type/<type>/week/<week>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/type/<type>/week/<week>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path(
"v1/events/type/<type>/week/<week>/district/<district>",
event_views._Events,
),
path(
"v1/events/type/<type>/week/<week>/district/<district>/by/<metric>",
event_views._Events,
),
path("v1/events/year/<year>", event_views.EventsYear),
path("v1/events/year/<year>/by/<metric>", event_views.EventsYearByMetric),
path("v1/events/year/<year>/country/<country>", event_views._Events),
path(
"v1/events/year/<year>/country/<country>/by/<metric>", event_views._Events
),
path(
"v1/events/year/<year>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/year/<year>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path("v1/events/year/<year>/district/<district>", event_views._Events),
path(
"v1/events/year/<year>/district/<district>/by/<metric>",
event_views._Events,
),
path("v1/events/year/<year>/type/<type>", event_views._Events),
path("v1/events/year/<year>/type/<type>/by/<metric>", event_views._Events),
path(
"v1/events/year/<year>/type/<type>/country/<country>", event_views._Events
),
path(
"v1/events/year/<year>/type/<type>/country/<country>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/district/<district>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/district/<district>/by/<metric>",
event_views._Events,
),
path("v1/events/year/<year>/week/<week>", event_views._Events),
path("v1/events/year/<year>/week/<week>/by/<metric>", event_views._Events),
path(
"v1/events/year/<year>/week/<week>/country/<country>", event_views._Events
),
path(
"v1/events/year/<year>/week/<week>/country/<country>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/week/<week>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/year/<year>/week/<week>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/week/<week>/district/<district>",
event_views._Events,
),
path(
"v1/events/year/<year>/week/<week>/district/<district>/by/<metric>",
event_views._Events,
),
path("v1/events/year/<year>/type/<type>/week/<week>", event_views._Events),
path(
"v1/events/year/<year>/type/<type>/week/<week>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/week/<week>/country/<country>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/week/<week>/country/<country>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/week/<week>/country/<country>/state/<state>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/week/<week>/country/<country>/state/<state>/by/<metric>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/week/<week>/district/<district>",
event_views._Events,
),
path(
"v1/events/year/<year>/type/<type>/week/<week>/district/<district>/by/<metric>",
event_views._Events,
),
]
)
"""MATCHES"""
urlpatterns.extend(
[
path("v1/match/<match>", match_views.Match),
path("v1/matches", match_views.Matches),
path("v1/matches/page/<page>", match_views._Matches),
path("v1/matches/elims", match_views._MatchesElim),
path("v1/matches/elims/page/<page>", match_views._MatchesElim),
path("v1/matches/year/<year>", match_views.MatchesYear),
path("v1/matches/year/<year>/elims", match_views._MatchesElim),
path("v1/matches/year/<year>/page/<page>", match_views._Matches),
path("v1/matches/event/<event>", match_views.MatchesEvent),
path("v1/matches/event/<event>/elims", match_views._MatchesElim),
]
)
"""EVENT SIM"""
urlpatterns.extend(
[
path("v1/event_sim/event/<event>/simple", event_pred_views.QuickSim),
path("v1/event_sim/event/<event>/full", event_pred_views.Sim),
path(
"v1/event_sim/event/<event>/full/iterations/<iterations>",
event_pred_views.Sim,
),
path(
"v1/event_sim/event/<event>/index/<index>/simple", event_pred_views.MeanSim
),
path(
"v1/event_sim/event/<event>/index/<index>/full", event_pred_views.IndexSim
),
path(
"v1/event_sim/event/<event>/index/<index>/full/iterations/<iterations>",
event_pred_views.IndexSim,
),
]
)
urlpatterns.append(path("v1/", include(router.urls)))
| 38.473029
| 109
| 0.579702
| 3,094
| 27,816
| 4.969619
| 0.04331
| 0.092482
| 0.087799
| 0.095734
| 0.872724
| 0.831491
| 0.801249
| 0.782193
| 0.72405
| 0.700963
| 0
| 0.011523
| 0.25435
| 27,816
| 722
| 110
| 38.526316
| 0.729811
| 0.002984
| 0
| 0.572453
| 0
| 0.021521
| 0.399421
| 0.385081
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008608
| 0
| 0.008608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f93c37f8dbb1e504769add17add9cf3550c2f7bc
| 8,632
|
py
|
Python
|
modules/obsolete_modules/modules_skip_connection.py
|
ravi-0841/spect-pitch-gan
|
ea4b9ea8396df753e25e0b2cb210288f683d3903
|
[
"MIT"
] | null | null | null |
modules/obsolete_modules/modules_skip_connection.py
|
ravi-0841/spect-pitch-gan
|
ea4b9ea8396df753e25e0b2cb210288f683d3903
|
[
"MIT"
] | null | null | null |
modules/obsolete_modules/modules_skip_connection.py
|
ravi-0841/spect-pitch-gan
|
ea4b9ea8396df753e25e0b2cb210288f683d3903
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from modules.base_modules_default_init import *
def sampler(input_pitch, input_mfc, final_filters=1, reuse=False, \
scope_name='sampler_generator'):
# Inputs have shape [batch_size, num_features, time]
input_mfc_transposed = tf.transpose(input_mfc, perm=[0, 2, 1],
name='sampler_input_mfc_transposed')
input_pitch_transposed = tf.transpose(input_pitch, perm=[0, 2, 1],
name='sampler_input_pitch_transposed')
with tf.variable_scope(scope_name) as scope:
# Discriminator would be reused in CycleGAN
if reuse:
scope.reuse_variables()
else:
assert scope.reuse is False
h1_mfc = conv1d_layer(inputs=input_mfc_transposed, filters=64,
kernel_size=15, strides=1, activation=None,
name='h1_mfc_conv')
h1_mfc_gates = conv1d_layer(inputs=input_mfc_transposed, filters=64,
kernel_size=15, strides=1, activation=None,
name='h1_mfc_conv_gates')
h1_mfc_glu = gated_linear_layer(inputs=h1_mfc, gates=h1_mfc_gates,
name='h1_mfc_glu')
h1_pitch = conv1d_layer(inputs=input_pitch_transposed, filters=16,
kernel_size=15, strides=1, activation=None,
name='h1_pitch_conv')
h1_pitch_gates = conv1d_layer(inputs=input_pitch_transposed, filters=16,
kernel_size=15, strides=1, activation=None,
name='h1_pitch_conv_gates')
h1_pitch_glu = gated_linear_layer(inputs=h1_pitch, gates=h1_pitch_gates,
name='h1_pitch_glu')
h1_glu = tf.concat([h1_mfc_glu, h1_pitch_glu], axis=-1, name='concat_glu')
# Downsample
d1 = downsample1d_block(inputs=h1_glu, filters=128, \
kernel_size=5, strides=2, \
name_prefix='downsample1d_block1_')
d2 = downsample1d_block(inputs=d1, filters=256, \
kernel_size=5, strides=2, \
name_prefix='downsample1d_block2_')
# Residual blocks
r1 = residual1d_block(inputs=d2, filters=512, \
kernel_size=3, strides=1, \
name_prefix='residual1d_block1_')
r2 = residual1d_block(inputs=r1, filters=512, \
kernel_size=3, strides=1, \
name_prefix='residual1d_block2_')
# Upsample
u1 = upsample1d_block(inputs=r2, filters=512, \
kernel_size=5, strides=1, \
shuffle_size=2, name_prefix='upsample1d_block1_')
u2 = upsample1d_block(inputs=u1, filters=256, \
kernel_size=5, strides=1, \
shuffle_size=2, name_prefix='upsample1d_block2_')
# Dropout for stochasticity
u2 = tf.nn.dropout(u2, keep_prob=0.5)
# Output
o1 = conv1d_layer(inputs=u2, filters=final_filters, \
kernel_size=15, strides=1, \
activation=None, name='o1_conv')
o2 = tf.transpose(o1, perm=[0, 2, 1], name='output_transpose')
return o2
def generator(input_pitch, input_mfc, final_filters=23, reuse=False, \
scope_name='generator'):
# Inputs have shape [batch_size, num_features, time]
input_mfc_transposed = tf.transpose(input_mfc, perm=[0, 2, 1],
name='generator_input_mfc_transposed')
input_pitch_transposed = tf.transpose(input_pitch, perm=[0, 2, 1],
name='generator_input_pitch_transposed')
with tf.variable_scope(scope_name) as scope:
# Discriminator would be reused in CycleGAN
if reuse:
scope.reuse_variables()
else:
assert scope.reuse is False
h1_mfc = conv1d_layer(inputs=input_mfc_transposed, filters=32,
kernel_size=15, strides=1, activation=None,
name='h1_mfc_conv')
h1_mfc_gates = conv1d_layer(inputs=input_mfc_transposed, filters=32,
kernel_size=15, strides=1, activation=None,
name='h1_mfc_conv_gates')
h1_mfc_glu = gated_linear_layer(inputs=h1_mfc, gates=h1_mfc_gates,
name='h1_mfc_glu')
h1_pitch = conv1d_layer(inputs=input_pitch_transposed, filters=32,
kernel_size=15, strides=1, activation=None,
name='h1_pitch_conv')
h1_pitch_gates = conv1d_layer(inputs=input_pitch_transposed, filters=32,
kernel_size=15, strides=1, activation=None,
name='h1_pitch_conv_gates')
h1_pitch_glu = gated_linear_layer(inputs=h1_pitch, gates=h1_pitch_gates,
name='h1_pitch_glu')
h1_glu = tf.concat([h1_mfc_glu, h1_pitch_glu], axis=-1, name='concat_glu')
# Downsample
d1 = downsample1d_block(inputs=h1_glu, filters=128,
kernel_size=5, strides=2,
name_prefix='downsample1d_block1_')
d2 = downsample1d_block(inputs=d1, filters=256,
kernel_size=5, strides=2,
name_prefix='downsample1d_block2_')
# Residual blocks
r1 = residual1d_block(inputs=d2, filters=512,
kernel_size=3, strides=1,
name_prefix='residual1d_block1_')
r2 = residual1d_block(inputs=r1, filters=512,
kernel_size=3, strides=1,
name_prefix='residual1d_block2_')
r3 = residual1d_block(inputs=r2, filters=512,
kernel_size=3, strides=1,
name_prefix='residual1d_block3_')
# Upsample
u1 = upsample1d_block(inputs=r3, filters=256,
kernel_size=5, strides=1, shuffle_size=2,
name_prefix='upsample1d_block1_')
u1 = tf.add(u1, d1, name='add_downsample_1')
u2 = upsample1d_block(inputs=u1, filters=128,
kernel_size=5, strides=1, shuffle_size=2,
name_prefix='upsample1d_block2_')
u2 = tf.add(u2, h1_glu, name='add_downsample_2')
# Dropout for stochasticity
u2 = tf.nn.dropout(u2, keep_prob=0.5)
# Output
o1 = conv1d_layer(inputs=u2, filters=final_filters, \
kernel_size=15, strides=1, \
activation=None, name='o1_conv')
o2 = tf.transpose(o1, perm=[0, 2, 1], name='output_transpose')
return o2
def discriminator(input_mfc, input_pitch,
reuse=False, scope_name='discriminator'):
# input_mfc and input_pitch has shape [batch_size, num_features, time]
input_mfc = tf.transpose(input_mfc, perm=[0,2,1],
name='discriminator_mfc_transpose')
input_pitch = tf.transpose(input_pitch, perm=[0,2,1],
name='discriminator_pitch_transpose')
with tf.variable_scope(scope_name) as scope:
# Discriminator would be reused in CycleGAN
if reuse:
scope.reuse_variables()
else:
assert scope.reuse is False
h1_mfc = conv1d_layer(inputs=input_mfc, filters=64,
kernel_size=3, strides=1,
activation=None, name='h1_mfc_conv')
h1_mfc_gates = conv1d_layer(inputs=input_mfc, filters=64,
kernel_size=3, strides=1,
activation=None, name='h1_mfc_conv_gates')
h1_mfc_glu = gated_linear_layer(inputs=h1_mfc,
gates=h1_mfc_gates, name='h1_mfc_glu')
h1_pitch = conv1d_layer(inputs=input_pitch, filters=64,
kernel_size=3, strides=1,
activation=None, name='h1_pitch_conv')
h1_pitch_gates = conv1d_layer(inputs=input_pitch, filters=64,
kernel_size=3, strides=1,
activation=None, name='h1_pitch_conv_gates')
h1_pitch_glu = gated_linear_layer(inputs=h1_pitch,
gates=h1_pitch_gates, name='h1_pitch_glu')
h1_glu = tf.concat([h1_mfc_glu, h1_pitch_glu], axis=-1,
name='concat_inputs')
d1 = downsample1d_block(inputs=h1_glu, filters=128,
kernel_size=3, strides=2,
name_prefix='downsample2d_block1_')
d2 = downsample1d_block(inputs=d1, filters=256,
kernel_size=3, strides=2,
name_prefix='downsample2d_block2_')
d3 = downsample1d_block(inputs=d2, filters=256,
kernel_size=3, strides=2,
name_prefix='downsample2d_block3_')
# Output
o1 = tf.layers.dense(inputs=d3, units=1, \
activation=tf.nn.sigmoid)
return o1
| 40.909953
| 82
| 0.605422
| 1,054
| 8,632
| 4.643264
| 0.105313
| 0.0613
| 0.048631
| 0.062934
| 0.910298
| 0.899877
| 0.870045
| 0.85615
| 0.842256
| 0.829587
| 0
| 0.05928
| 0.298424
| 8,632
| 210
| 83
| 41.104762
| 0.748844
| 0.051089
| 0
| 0.585526
| 0
| 0
| 0.106937
| 0.021534
| 0
| 0
| 0
| 0
| 0.019737
| 1
| 0.019737
| false
| 0
| 0.013158
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f95982d3734838b526a7c2aba805915b20f51c98
| 11,782
|
py
|
Python
|
delay_gp_funcs.py
|
ciiram/PyPol_II
|
50cf1d9f7a33d26f9d09a0fb90bb9bf9d2eee60a
|
[
"BSD-3-Clause"
] | 2
|
2017-09-29T07:27:20.000Z
|
2019-12-11T14:56:31.000Z
|
delay_gp_funcs.py
|
ciiram/PyPol_II
|
50cf1d9f7a33d26f9d09a0fb90bb9bf9d2eee60a
|
[
"BSD-3-Clause"
] | null | null | null |
delay_gp_funcs.py
|
ciiram/PyPol_II
|
50cf1d9f7a33d26f9d09a0fb90bb9bf9d2eee60a
|
[
"BSD-3-Clause"
] | null | null | null |
# This file contains a number of useful function definitions for implementing the
# delay estimation using a Gaussian process framework without convolution
#
# Ciira wa Maina, 2014
# Dedan Kimathi University of Technology.
# Nyeri-Kenya
import pylab as pb
import numpy as np
import scipy as sp
from scipy import integrate
from scipy import special
from scipy.optimize import fmin_tnc
import scipy.linalg
import sys
def rbf2(t1,t2,sigma,l):
'''
RBF Kernel
'''
t3= t1[:,None]-t2[None,:]
return sigma*sigma*np.exp(-(1/(2.0*l*l))*t3*t3)
def genCov_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs):
'''
Covariance of gene segments profiles and latent function
Input:
D: are the delays
sigma_rbf,l_rbf are the latent function parameters
l: variances of the convolution kernel
noise_std: noise variances
'''
K=np.zeros((len(t),len(t)))#assume len(t)= len(t1)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
for i in range(0,num_seg):
for j in range(i,num_seg):
if i==j:
K[indx[i]:indx[i+1],indx[j]:indx[j+1]]=alpha[i]*alpha[i]*rbf2(t[indx[i]:indx[i+1]],t[indx[i]:indx[i+1]],sigma_rbf,l_rbf)+noise_std[i]*noise_std[i]*np.eye(len_seg_obs[i])
else:
K[indx[i]:indx[i+1],indx[j]:indx[j+1]]=alpha[i]*alpha[j]*rbf2(t[indx[i]:indx[i+1]]-D[i],t[indx[j]:indx[j+1]]-D[j],sigma_rbf,l_rbf)
if i!=j:
K[indx[j]:indx[j+1],indx[i]:indx[i+1]]=K[indx[i]:indx[i+1],indx[j]:indx[j+1]].T
return K
def genCov_l_f_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs):
'''
Gradient of Covariance w.r.t l_f
Input:
D: are the delays
sigma_rbf,l_rbf are the latent function parameters
l: variances of the convolution kernel
noise_std: noise variances
'''
K=np.zeros((len(t),len(t)))#assume len(t)= len(t1)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
for i in range(0,num_seg):
for j in range(i,num_seg):
t1=t[indx[i]:indx[i+1]]
t2=t[indx[j]:indx[j+1]]
T_d=(t1[:,None]-t2[None,:])
if i==j:
K[indx[i]:indx[i+1],indx[j]:indx[j+1]]=alpha[i]*alpha[i]*rbf2(t[indx[i]:indx[i+1]],t[indx[i]:indx[i+1]],sigma_rbf,l_rbf)*T_d*T_d*(1/(l_rbf**3))
else:
K[indx[i]:indx[i+1],indx[j]:indx[j+1]]=alpha[i]*alpha[j]*rbf2(t[indx[i]:indx[i+1]]-D[i],t[indx[j]:indx[j+1]]-D[j],sigma_rbf,l_rbf)*(T_d+D[j])*(T_d+D[j])*(1/(l_rbf**3))
if i!=j:
K[indx[j]:indx[j+1],indx[i]:indx[i+1]]=K[indx[i]:indx[i+1],indx[j]:indx[j+1]].T
return K
def genCov_alpha_i_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,seg,len_seg_obs):
'''
Gradient of Covariance w.r.t alpha_i
Input:
D: are the delays
sigma_rbf,l_rbf are the latent function parameters
l: variances of the convolution kernel
noise_std: noise variances
'''
K=np.zeros((len(t),len(t)))#assume len(t)= len(t1)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
for j in range(0,num_seg):
if seg==j:
K[indx[seg]:indx[seg+1],indx[j]:indx[j+1]]=alpha[seg]*rbf2(t[indx[seg]:indx[seg+1]]-D[seg],t[indx[seg]:indx[seg+1]]-D[seg],sigma_rbf,l_rbf)
else:
K[indx[seg]:indx[seg+1],indx[j]:indx[j+1]]=alpha[j]*rbf2(t[indx[seg]:indx[seg+1]]-D[seg],t[indx[j]:indx[j+1]]-D[j],sigma_rbf,l_rbf)
return K+K.T
def genCov_D_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,seg,len_seg_obs):
'''
Gradient of Covariance w.r.t D_i
Input:
D: are the delays
sigma_rbf,l_rbf are the latent function parameters
l: variances of the convolution kernel
noise_std: noise variances
'''
K=np.zeros((len(t),len(t)))#assume len(t)= len(t1)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
for j in range(0,num_seg):
t1=t[indx[seg]:indx[seg+1]]
t2=t[indx[j]:indx[j+1]]
T_d=(t1[:,None]-t2[None,:])
if seg==j:
K[indx[seg]:indx[seg+1],indx[j]:indx[j+1]]=np.zeros((len(t1),len(t2)))
else:
K[indx[seg]:indx[seg+1],indx[j]:indx[j+1]]=alpha[seg]*alpha[j]*rbf2(t1-D[seg],t2-D[j],sigma_rbf,l_rbf)*(1.0/(l_rbf*l_rbf))*(T_d-D[seg]+D[j])
K[indx[j]:indx[j+1],indx[seg]:indx[seg+1]]=alpha[seg]*alpha[j]*rbf2(t2-D[j],t1-D[seg],sigma_rbf,l_rbf)*(1.0/(l_rbf*l_rbf))*(T_d.T-D[seg]+D[j])
return K
def genCov_sigma_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs):
'''
Gradient of Covariance w.r.t sigma_i
Input:
D: are the delays
sigma_rbf,l_rbf are the latent function parameters
l: variances of the convolution kernel
noise_std: noise variances
'''
K=np.zeros((len(t),len(t)))#assume len(t)= len(t1)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
for i in range(0,num_seg):
K[indx[i]:indx[i+1],indx[i]:indx[i+1]]=2.0*noise_std*np.eye(len_seg_obs[i])
return K
def loglik_tied_fsf_delay(params,t,Y,num_seg,trans,a,b,diag,len_seg_obs):
if trans==1:
params=paramInvTrans(params,a,b)
#unpack parameters
sigma_rbf=1.0
l_rbf=params[0]
ind=1
alpha=params[ind:ind+num_seg]
#initilize Delay
ind=ind+num_seg
D=np.zeros(num_seg)
D[1:num_seg]=params[ind:ind+num_seg-1]
D[0]=0.0
#initilize noise
ind=ind+num_seg-1
noise_std1=params[ind:len(params)]
noise_std=np.ones(num_seg)*noise_std1
Cov=genCov_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs)
if diag:
Cov=blk_diag(Cov,num_seg,len(t))#need to change
try:
L=np.linalg.cholesky(Cov)
except np.linalg.LinAlgError:
return -np.inf
alpha=sp.linalg.cho_solve((L,1),Y)
ll=-0.5*np.dot(Y[None,:],alpha[:,None])[0,0]-np.sum(np.log(np.diag(L)))-0.5*Y.size*np.log(2*np.pi)
return -ll
def grad_loglik_tied_fsf_delay(params,t,Y,num_seg,trans,a,b,diag,len_seg_obs):
grad=np.zeros(len(params))
if trans==1:
params=paramInvTrans(params,a,b)
#unpack parameters
sigma_rbf=1.0
l_rbf=params[0]
ind=1
alpha=params[ind:ind+num_seg]
#initilize Delay
ind=ind+num_seg
D=np.zeros(num_seg)
D[1:num_seg]=params[ind:ind+num_seg-1]
D[0]=0.0
ind=ind+num_seg-1
noise_std1=params[ind:len(params)]
noise_std=np.ones(num_seg)*noise_std1
Cov=genCov_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs)
if diag:
Cov=blk_diag(Cov,num_seg,len(t))
try:
L=np.linalg.cholesky(Cov)
#invCov=np.linalg.inv(Cov)
except np.linalg.LinAlgError:
return -np.inf
alpha_cho=sp.linalg.cho_solve((L,1),Y)[:,None]
#latent function parameters
gK=genCov_l_f_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs)
if diag:
gK=blk_diag(gK,num_seg,len(t))
#grad[0]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(np.dot(invCov,gK))
grad[0]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(sp.linalg.cho_solve((L,1),gK))
#alpha
ind=1
j=0
for i in range(ind,ind+num_seg):
gK=genCov_alpha_i_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,j,len_seg_obs)
if diag:
gK=blk_diag(gK,num_seg,len(t))
#grad[i]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(np.dot(invCov,gK))
grad[i]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(sp.linalg.cho_solve((L,1),gK))
j+=1
#Delay
ind=ind+num_seg
j=1
for i in range(ind,ind+num_seg-1):
gK=genCov_D_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,j,len_seg_obs)
if diag:
gK=blk_diag(gK,num_seg,len(t))
#grad[i]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(np.dot(invCov,gK))
grad[i]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(sp.linalg.cho_solve((L,1),gK))
j+=1
# noise
ind=ind+num_seg-1
gK=genCov_sigma_delay(t,alpha,D,sigma_rbf,l_rbf,noise_std1,num_seg,len_seg_obs)
if diag:
gK=blk_diag(gK,num_seg,len(t))
#grad[ind]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(np.dot(invCov,gK))
grad[ind]=-0.5*np.dot(alpha_cho.T,np.dot(gK,alpha_cho))[0,0]+0.5*np.trace(sp.linalg.cho_solve((L,1),gK))
if trans==1:
return grad*gradTrans(paramTrans(params,a,b),a,b)
elif trans==0:
return grad
def pred_Cov_tied_fsf_delay(t_obs,t_pred,Y,params,num_seg,seg,trans,a,b,len_seg_obs):
if trans==1:
params=paramInvTrans(params,a,b)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
#unpack parameters
sigma_rbf=1.0
l_rbf=params[0]
ind=1
alpha=params[ind:ind+num_seg]
#initilize Delay
ind=ind+num_seg
D=np.zeros(num_seg)
D[1:num_seg]=params[ind:ind+num_seg-1]
D[0]=0.0
ind=ind+num_seg-1
noise_std1=params[ind:len(params)]
noise_std=np.ones(num_seg)*noise_std1
B=genCov_delay(t_obs,alpha,D,sigma_rbf,l_rbf,noise_std,num_seg,len_seg_obs)
A=alpha[seg]*alpha[seg]*rbf2(t_pred,t_pred,sigma_rbf,l_rbf)+noise_std1**2*np.eye(t_pred.size)
C=np.zeros((t_pred.size,t_obs.size))
for i in range(0,num_seg):
t1=t_obs[indx[i]:indx[i+1]]
CC=np.zeros((t_pred.size,t1.size))
for j in range(0,t_pred.size):
for k in range(0,t1.size):
if t_pred[j]==t1[k]:
CC[j,k]=1.0
if i==seg:
C[0:len(t_pred),indx[i]:indx[i+1]]=alpha[i]*alpha[seg]*rbf2(t_pred-D[seg],t1-D[i],sigma_rbf,l_rbf)+noise_std[seg]*noise_std[seg]*CC
else:
C[0:len(t_pred),indx[i]:indx[i+1]]=alpha[i]*alpha[seg]*rbf2(t_pred-D[seg],t1-D[i],sigma_rbf,l_rbf)
mu=np.dot(C,np.dot(np.linalg.inv(B),Y[:,None]))
Cov=A-np.dot(C,np.dot(np.linalg.inv(B),C.T))+1e-8*np.eye(t_pred.size)
return {'Cov':Cov,'mu':mu}
def pred_Lat_tied_fsf(t_obs,t_pred,Y,params,num_seg,trans,a,b):
if trans==1:
params=paramInvTrans(params,a,b)
#unpack parameters
sigma_rbf=1.0
l_rbf=params[0]
ind=1
alpha=params[ind:ind+num_seg]
#initilize Delay
ind=ind+num_seg
D=np.zeros(num_seg)
D[1:num_seg]=params[ind:ind+num_seg-1]
D[0]=0.0
ind=ind+num_seg-1
#initilize l
l=params[ind:ind+num_seg]
#initilize noise
ind=ind+num_seg
noise_std1=params[ind:len(params)]
noise_std=np.ones(num_seg)*noise_std1
B=genCov(t_obs,alpha,D,sigma_rbf,l_rbf,l,noise_std,num_seg)
A=rbf2(t_pred,t_pred,sigma_rbf,l_rbf)
C=np.zeros((t_pred.size,t_obs.size*num_seg))
CC=np.zeros((t_pred.size,t_obs.size))
for i in range(0,t_pred.size):
for j in range(0,t_obs.size):
if t_pred[i]==t_obs[j]:
CC[i,j]=1.0
for i in range(0,num_seg):
C[0:len(t_pred),i*len(t_obs):(i+1)*len(t_obs)]=alpha[i]*cov_yiLat(t_pred,t_obs,D[i],sigma_rbf,l_rbf,l[i])
mu=np.dot(C,np.dot(np.linalg.inv(B),Y[:,None]))
Cov=A-np.dot(C,np.dot(np.linalg.inv(B),C.T))+1e-8*np.eye(t_pred.size)
return {'Cov':Cov,'mu':mu}
def pred_Lat_tied_fsf_new(t_obs,t_pred,Y,params,num_seg,trans,a,b,len_seg_obs):
if trans==1:
params=paramInvTrans(params,a,b)
indx=np.concatenate((np.array([0]),np.cumsum(len_seg_obs)))#allow each segment to have different length
#unpack parameters
sigma_rbf=1.0
l_rbf=params[0]
ind=1
alpha=params[ind:ind+num_seg]
#initilize Delay
ind=ind+num_seg
D=np.zeros(num_seg)
D[1:num_seg]=params[ind:ind+num_seg-1]
D[0]=0.0
ind=ind+num_seg-1
#initilize l
l=params[ind:ind+num_seg]
#initilize noise
ind=ind+num_seg
noise_std1=params[ind:len(params)]
noise_std=np.ones(num_seg)*noise_std1
B=genCov_new(t_obs,alpha,D,sigma_rbf,l_rbf,l,noise_std,num_seg,len_seg_obs)
A=rbf2(t_pred,t_pred,sigma_rbf,l_rbf)
C=np.zeros((t_pred.size,t_obs.size))
for i in range(0,num_seg):
t1=t_obs[indx[i]:indx[i+1]]
C[0:len(t_pred),indx[i]:indx[i+1]]=alpha[i]*cov_yiLat(t_pred,t1,D[i],sigma_rbf,l_rbf,l[i])
mu=np.dot(C,np.dot(np.linalg.inv(B),Y[:,None]))
Cov=A-np.dot(C,np.dot(np.linalg.inv(B),C.T))+1e-8*np.eye(t_pred.size)
return {'Cov':Cov,'mu':mu}
def paramTrans(x,a,b):
return np.log((x-a)/(b-x))
def paramInvTrans(x,a,b):
return a+((b-a)/(1+np.exp(-x)))
def gradTrans(x,a,b):
return (((b-a)*np.exp(x))/np.square(1+np.exp(x)))
| 24.143443
| 173
| 0.688932
| 2,540
| 11,782
| 3.04252
| 0.062598
| 0.061335
| 0.032609
| 0.052795
| 0.860637
| 0.853649
| 0.818841
| 0.799301
| 0.78455
| 0.769798
| 0
| 0.027688
| 0.111017
| 11,782
| 487
| 174
| 24.193018
| 0.710139
| 0.188932
| 0
| 0.668085
| 0
| 0
| 0.001591
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059574
| false
| 0
| 0.034043
| 0.012766
| 0.165957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f974c2c97dbaab9c6f183a3f769077a320f90f41
| 88
|
py
|
Python
|
AMAO/apps/Avaliacao/Questao/forms/__init__.py
|
arruda/amao
|
83648aa2c408b1450d721b3072dc9db4b53edbb8
|
[
"MIT"
] | 2
|
2017-04-26T14:08:02.000Z
|
2017-09-01T13:10:17.000Z
|
AMAO/apps/Avaliacao/Questao/forms/__init__.py
|
arruda/amao
|
83648aa2c408b1450d721b3072dc9db4b53edbb8
|
[
"MIT"
] | null | null | null |
AMAO/apps/Avaliacao/Questao/forms/__init__.py
|
arruda/amao
|
83648aa2c408b1450d721b3072dc9db4b53edbb8
|
[
"MIT"
] | null | null | null |
from consulta import *
from resolucao import *
from criar import *
from listar import *
| 17.6
| 23
| 0.772727
| 12
| 88
| 5.666667
| 0.5
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 4
| 24
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f98de20da39e2f50f479deb3debab8ecdc3680af
| 128
|
py
|
Python
|
app/auth/__init__.py
|
SuYehTarn/CS651-Group8-Feedback_Forum
|
d1163442aea81214c4dfa8de1d353ec719bfa7ab
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
SuYehTarn/CS651-Group8-Feedback_Forum
|
d1163442aea81214c4dfa8de1d353ec719bfa7ab
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
SuYehTarn/CS651-Group8-Feedback_Forum
|
d1163442aea81214c4dfa8de1d353ec719bfa7ab
|
[
"MIT"
] | null | null | null |
"""Module of the Auth blueprint
"""
from flask import Blueprint
auth = Blueprint('auth', __name__)
from app.auth import views
| 16
| 34
| 0.742188
| 18
| 128
| 5.055556
| 0.611111
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 128
| 7
| 35
| 18.285714
| 0.842593
| 0.21875
| 0
| 0
| 0
| 0
| 0.043011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
f995954a81516b76fa3d1b4ff206431fb6038688
| 37
|
py
|
Python
|
src/pyV3D/__init__.py
|
OpenMDAO/pyV3D
|
2baf32c489e2c91531b89e51a879ba8074ae2803
|
[
"Apache-2.0"
] | 3
|
2015-05-13T23:43:56.000Z
|
2021-01-20T10:15:17.000Z
|
src/pyV3D/__init__.py
|
OpenMDAO/pyV3D
|
2baf32c489e2c91531b89e51a879ba8074ae2803
|
[
"Apache-2.0"
] | 3
|
2016-10-07T08:28:20.000Z
|
2016-10-07T10:25:34.000Z
|
src/pyV3D/__init__.py
|
OpenMDAO/pyV3D
|
2baf32c489e2c91531b89e51a879ba8074ae2803
|
[
"Apache-2.0"
] | 2
|
2017-07-16T03:57:36.000Z
|
2019-10-01T23:57:45.000Z
|
from _pyV3D import *
import handler
| 9.25
| 20
| 0.783784
| 5
| 37
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.189189
| 37
| 3
| 21
| 12.333333
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.