hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed42fcb801fa38e9585e0b02fe9fd71eff57af66
| 1,863
|
py
|
Python
|
histogram.py
|
ccfelius/queueing
|
c38bd2fe230e52d6166a94449cec28f82e245ec2
|
[
"MIT"
] | 1
|
2020-12-10T17:36:40.000Z
|
2020-12-10T17:36:40.000Z
|
histogram.py
|
ccfelius/queueing
|
c38bd2fe230e52d6166a94449cec28f82e245ec2
|
[
"MIT"
] | null | null | null |
histogram.py
|
ccfelius/queueing
|
c38bd2fe230e52d6166a94449cec28f82e245ec2
|
[
"MIT"
] | 1
|
2021-01-05T13:08:03.000Z
|
2021-01-05T13:08:03.000Z
|
import matplotlib.pyplot as plt
import pandas as pd
import math
import numpy as np
from scipy import stats
import seaborn as sns
data = pd.read_csv("data/500-4.txt", sep="\t")
# example1 = data[data["SIM_TIME"] == 500]
simulations = 500
simtimes = [5, 50, 150, 500, 1000]
# for i in [1, 2, 4]:
# data = pd.read_csv(f"data/500-{i}.txt", sep="\t")
# example = data[data["SIM_TIME"] == simtime]
rhos = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.975]
print("DONE")
print("\n START MEAN, STDEV, CONF INT")
data = pd.read_csv(f"data/500-2.txt", sep="\t")
example = data[data["SIM_TIME"] == 150]
example1 = data[data["SIM_TIME"] == 500]
ex = example[example['RHO'] == 0.1]['AVG_WAIT']
ex2 = example1[example1['RHO'] == 0.1]['AVG_WAIT']
ex_9 = example[example['RHO'] == 0.9]['AVG_WAIT']
ex2_9 = example1[example1['RHO'] == 0.9]['AVG_WAIT']
print("\nMEAN 150, 500, rho 0.1, rho 0.9")
print(ex.mean(), ex2.mean())
print(ex_9.mean(), ex2_9.mean())
print("\nSTDEV 150, 500, rho 0.1, rho 0.9")
print(ex.std(), ex2.std())
print(ex_9.std(), ex2_9.std())
fig = plt.figure(facecolor='w')
ax = fig.add_subplot(111, facecolor='whitesmoke', axisbelow=True)
ax.hist(ex_9, bins = 100, alpha=0.8, color = 'cornflowerblue', label="Simtime=150")
ax.hist(ex2_9, bins = 100, alpha = 0.5, color='springgreen', label="Simtime=500")
# sns.displot(ex_9,)
# sns.displot(ex2_9)
ax.set_xlabel('Mean waiting time / time unit', fontsize=12)
ax.set_ylabel('Density', fontsize=12)
ax.set_title('Distribution mean waiting time', fontsize = 14)
ax.yaxis.set_tick_params(length=0)
ax.xaxis.set_tick_params(length=0)
ax.grid(b=True, which='major', c='w', lw=2, ls='-')
legend = ax.legend()
legend.get_frame().set_alpha(0.5)
for spine in ('top', 'right', 'bottom', 'left'):
ax.spines[spine].set_visible(False)
plt.savefig("plots/histogram-150-500-01.png", dpi=300)
plt.show()
| 30.540984
| 83
| 0.662909
| 336
| 1,863
| 3.580357
| 0.375
| 0.0266
| 0.036575
| 0.049875
| 0.264339
| 0.201164
| 0.121363
| 0.086451
| 0.038238
| 0.038238
| 0
| 0.09419
| 0.122383
| 1,863
| 60
| 84
| 31.05
| 0.64159
| 0.107354
| 0
| 0
| 0
| 0
| 0.224638
| 0.018116
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.146341
| 0
| 0.146341
| 0.195122
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed4391eec6c0ca45d66a840d1a081ef7d248ea7e
| 558
|
py
|
Python
|
python/1931.py
|
zheedong/BaekJoon
|
7f9e00085276a337d18ee3bb90c98126f7af4d3a
|
[
"MIT"
] | null | null | null |
python/1931.py
|
zheedong/BaekJoon
|
7f9e00085276a337d18ee3bb90c98126f7af4d3a
|
[
"MIT"
] | null | null | null |
python/1931.py
|
zheedong/BaekJoon
|
7f9e00085276a337d18ee3bb90c98126f7af4d3a
|
[
"MIT"
] | null | null | null |
n = int(input())
conf_set = []
for _ in range(n):
conf_set.append(tuple(map(int, input().split())))
conf_set.sort(key=lambda x : (x[1], x[0]))
# ๋๋๋ ์๊ฐ์ ๊ธฐ์ค์ผ๋ก ์ ๋ ฌ
# ์์๊ณผ ์ข
๋ฃ๊ฐ ๊ฐ์ ๊ฒฝ์ฐ๋ฅผ ํฌํจํ๊ธฐ ์ํด์ , ์์ ์๊ฐ๋ ์ค๋ฆ์ฐจ์์ผ๋ก ์ ๋ ฌํด ์ค์ผ ํ๋ค
solution_list = [conf_set[0]]
# Greedy Algorithm
for conf in conf_set[1:]:
last_conf = solution_list[-1]
_, last_end_time = last_conf
new_start_time, _ = conf
# ์ ๋ ฌ๋ ํ์์ list์ ๋ง์ง๋ง ๊ฐ์ ์์ ์๊ฐ๊ณผ, ์ ๋ต list ๋ง์ง๋ง์ ์ข
๋ฃ ์๊ฐ์ ๋น๊ตํ๋ค
if new_start_time >= last_end_time:
solution_list.append(conf)
print(len(solution_list))
| 23.25
| 59
| 0.664875
| 98
| 558
| 3.561224
| 0.571429
| 0.100287
| 0.063037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.21147
| 558
| 24
| 60
| 23.25
| 0.781818
| 0.241935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed446ef71ae2d9e250f77adc4d3e36a498ecb373
| 3,239
|
py
|
Python
|
egs2/mr_openslr64/asr1/local/data_prep.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 1
|
2022-03-25T14:41:05.000Z
|
2022-03-25T14:41:05.000Z
|
egs2/mr_openslr64/asr1/local/data_prep.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 2
|
2019-04-23T04:43:33.000Z
|
2019-05-13T13:06:52.000Z
|
egs2/mr_openslr64/asr1/local/data_prep.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright 2021 Carnegie Mellon University (Peter Wu)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
import argparse
import os
import random
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-d", help="downloads directory", type=str, default="downloads")
args = parser.parse_args()
tsv_path = "%s/line_index.tsv" % args.d
with open(tsv_path, "r") as inf:
tsv_lines = inf.readlines()
tsv_lines = [line.strip() for line in tsv_lines]
spk2utt = {}
utt2text = {}
for line in tsv_lines:
l_list = line.split("\t")
fid = l_list[0]
spk = l_list[0].split("_")[1]
text = l_list[1]
path = "%s/%s.wav" % (args.d, fid)
if os.path.exists(path):
utt2text[fid] = text
if spk in spk2utt:
spk2utt[spk].append(fid)
else:
spk2utt[spk] = [fid]
spks = sorted(list(spk2utt.keys()))
num_fids = 0
num_test_spks = 0
for spk in spks:
num_test_spks += 1
fids = sorted(list(set(spk2utt[spk])))
num_fids += len(fids)
if num_fids >= 2000:
break
num_test_spks = 2
test_spks = spks[:num_test_spks]
train_dev_spks = spks[num_test_spks:]
random.Random(0).shuffle(train_dev_spks)
num_train = int(len(train_dev_spks) * 0.9)
train_spks = train_dev_spks[:num_train]
dev_spks = train_dev_spks[num_train:]
spks_by_phase = {"train": train_spks, "dev": dev_spks, "test": test_spks}
flac_dir = "%s" % args.d
sr = 16000
for phase in spks_by_phase:
spks = spks_by_phase[phase]
text_strs = []
wav_scp_strs = []
spk2utt_strs = []
num_fids = 0
for spk in spks:
fids = sorted(list(set(spk2utt[spk])))
num_fids += len(fids)
if phase == "test" and num_fids > 2000:
curr_num_fids = num_fids - 2000
random.Random(1).shuffle(fids)
fids = fids[:curr_num_fids]
utts = [spk + "-" + f for f in fids]
utts_str = " ".join(utts)
spk2utt_strs.append("%s %s" % (spk, utts_str))
for fid, utt in zip(fids, utts):
cmd = "ffmpeg -i %s/%s.wav -f wav -ar %d -ab 16 -ac 1 - |" % (
flac_dir,
fid,
sr,
)
text_strs.append("%s %s" % (utt, utt2text[fid]))
wav_scp_strs.append("%s %s" % (utt, cmd))
phase_dir = "data/marathi_%s" % phase
if not os.path.exists(phase_dir):
os.makedirs(phase_dir)
text_strs = sorted(text_strs)
wav_scp_strs = sorted(wav_scp_strs)
spk2utt_strs = sorted(spk2utt_strs)
with open(os.path.join(phase_dir, "text"), "w+") as ouf:
for s in text_strs:
ouf.write("%s\n" % s)
with open(os.path.join(phase_dir, "wav.scp"), "w+") as ouf:
for s in wav_scp_strs:
ouf.write("%s\n" % s)
with open(os.path.join(phase_dir, "spk2utt"), "w+") as ouf:
for s in spk2utt_strs:
ouf.write("%s\n" % s)
| 32.39
| 88
| 0.538747
| 448
| 3,239
| 3.674107
| 0.252232
| 0.038275
| 0.043742
| 0.027339
| 0.309235
| 0.178007
| 0.117861
| 0.102066
| 0.102066
| 0.102066
| 0
| 0.026667
| 0.328496
| 3,239
| 99
| 89
| 32.717172
| 0.730115
| 0.040753
| 0
| 0.13253
| 0
| 0.012048
| 0.065722
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.036145
| 0
| 0.036145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed45225f06a1a8cadf577895bd9772f8a5fae7c7
| 1,240
|
py
|
Python
|
src/napari_geojson/_tests/test_writer.py
|
NHPatterson/napari-geojson
|
8e7925dc7600608673d489e24e8760c4669eaa0b
|
[
"BSD-3-Clause"
] | null | null | null |
src/napari_geojson/_tests/test_writer.py
|
NHPatterson/napari-geojson
|
8e7925dc7600608673d489e24e8760c4669eaa0b
|
[
"BSD-3-Clause"
] | 6
|
2021-12-31T06:04:05.000Z
|
2022-02-27T15:19:29.000Z
|
src/napari_geojson/_tests/test_writer.py
|
NHPatterson/napari-geojson
|
8e7925dc7600608673d489e24e8760c4669eaa0b
|
[
"BSD-3-Clause"
] | 1
|
2022-02-22T20:35:07.000Z
|
2022-02-22T20:35:07.000Z
|
import geojson
import pytest
from napari_geojson import write_shapes
ellipse = [[[0, 0], [0, 5], [5, 5], [5, 0]], "ellipse", "Polygon"]
line = [[[0, 0], [5, 5]], "line", "LineString"]
polygon = [[[0, 0], [5, 5], [0, 10]], "polygon", "Polygon"]
polyline = [[[0, 0], [5, 5], [0, 10]], "path", "LineString"]
rectangle = [[[0, 0], [0, 5], [5, 5], [5, 0]], "rectangle", "Polygon"]
sample_shapes = [ellipse, line, polygon, polyline, rectangle]
sample_shapes_ids = ["ellipse", "line", "polygon", "polyline", "rectangle"]
@pytest.mark.parametrize(
"coords,shape_type,expected", sample_shapes, ids=sample_shapes_ids
)
def test_write_each_shape(
make_napari_viewer, tmp_path, coords, shape_type, expected
): # noqa E501
"""Writer writes a shapes layer as GeoJSON."""
fname = str(tmp_path / "sample.geojson")
viewer = make_napari_viewer()
shapes_layer = viewer.add_shapes(coords, shape_type=shape_type)
# shape was written
assert len(shapes_layer.data) == 1
data, meta, _ = shapes_layer.as_layer_data_tuple()
write_shapes(fname, data, meta)
# read back
with open(fname) as fp:
collection = geojson.load(fp)
geom = collection["geometries"][0]
assert geom.type == expected
| 33.513514
| 75
| 0.647581
| 169
| 1,240
| 4.573965
| 0.343195
| 0.023286
| 0.019405
| 0.025873
| 0.129366
| 0.03881
| 0.020699
| 0.020699
| 0
| 0
| 0
| 0.03831
| 0.179032
| 1,240
| 36
| 76
| 34.444444
| 0.721022
| 0.06371
| 0
| 0
| 0
| 0
| 0.136167
| 0.02255
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.038462
| false
| 0
| 0.115385
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed487e9bb61e7b47c76c3fe0f4b895d4d0e7a7e7
| 12,688
|
py
|
Python
|
pgmpy/models/ClusterGraph.py
|
echoyi/pgmpy
|
c37dda4401f23ec73fc5d17d957867cd62e588d3
|
[
"MIT"
] | null | null | null |
pgmpy/models/ClusterGraph.py
|
echoyi/pgmpy
|
c37dda4401f23ec73fc5d17d957867cd62e588d3
|
[
"MIT"
] | null | null | null |
pgmpy/models/ClusterGraph.py
|
echoyi/pgmpy
|
c37dda4401f23ec73fc5d17d957867cd62e588d3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from collections import defaultdict
import numpy as np
from pgmpy.base import UndirectedGraph
from pgmpy.factors import factor_product
class ClusterGraph(UndirectedGraph):
r"""
Base class for representing Cluster Graph.
Cluster graph is an undirected graph which is associated with a subset of variables. The graph contains undirected
edges that connects clusters whose scopes have a non-empty intersection.
Formally, a cluster graph is :math:`\mathcal{U}` for a set of factors :math:`\Phi` over :math:`\mathcal{X}` is an
undirected graph, each of whose nodes :math:`i` is associated with a subset :math:`C_i \subseteq X`. A cluster
graph must be family-preserving - each factor :math:`\phi \in \Phi` must be associated with a cluster C, denoted
:math:`\alpha(\phi)`, such that :math:`Scope[\phi] \subseteq C_i`. Each edge between a pair of clusters :math:`C_i`
and :math:`C_j` is associated with a sepset :math:`S_{i,j} \subseteq C_i \cap C_j`.
Parameters
----------
data: input graph
Data to initialize graph. If data=None (default) an empty graph is created. The data is an edge list
Examples
--------
Create an empty ClusterGraph with no nodes and no edges
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
G can be grown by adding clique nodes.
**Nodes:**
Add a tuple (or list or set) of nodes as single clique node.
>>> G.add_node(('a', 'b', 'c'))
>>> G.add_nodes_from([('a', 'b'), ('a', 'b', 'c')])
**Edges:**
G can also be grown by adding edges.
>>> G.add_edge(('a', 'b', 'c'), ('a', 'b'))
or a list of edges
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
"""
def __init__(self, ebunch=None):
super(ClusterGraph, self).__init__()
if ebunch:
self.add_edges_from(ebunch)
self.factors = []
def add_node(self, node, **kwargs):
"""
Add a single node to the cluster graph.
Parameters
----------
node: node
A node should be a collection of nodes forming a clique. It can be
a list, set or tuple of nodes
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
>>> G.add_node(('a', 'b', 'c'))
"""
if not isinstance(node, (list, set, tuple)):
raise TypeError(
"Node can only be a list, set or tuple of nodes forming a clique"
)
node = tuple(node)
super(ClusterGraph, self).add_node(node, **kwargs)
def add_nodes_from(self, nodes, **kwargs):
"""
Add multiple nodes to the cluster graph.
Parameters
----------
nodes: iterable container
A container of nodes (list, dict, set, etc.).
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b'), ('a', 'b', 'c')])
"""
for node in nodes:
self.add_node(node, **kwargs)
def add_edge(self, u, v, **kwargs):
"""
Add an edge between two clique nodes.
Parameters
----------
u, v: nodes
Nodes can be any list or set or tuple of nodes forming a clique.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')])
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
"""
set_u = set(u)
set_v = set(v)
if set_u.isdisjoint(set_v):
raise ValueError("No sepset found between these two edges.")
super(ClusterGraph, self).add_edge(u, v)
def add_factors(self, *factors):
"""
Associate a factor to the graph.
See factors class for the order of potential values
Parameters
----------
*factor: pgmpy.factors.factors object
A factor object on any subset of the variables of the model which
is to be associated with the model.
Returns
-------
None
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> student = ClusterGraph()
>>> student.add_node(('Alice', 'Bob'))
>>> factor = DiscreteFactor(['Alice', 'Bob'], cardinality=[3, 2],
... values=np.random.rand(6))
>>> student.add_factors(factor)
"""
for factor in factors:
factor_scope = set(factor.scope())
nodes = [set(node) for node in self.nodes()]
if factor_scope not in nodes:
raise ValueError(
"Factors defined on clusters of variable not" "present in model"
)
self.factors.append(factor)
def get_factors(self, node=None):
"""
Return the factors that have been added till now to the graph.
If node is not None, it would return the factor corresponding to the
given node.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')])
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
>>> phi1 = DiscreteFactor(['a', 'b', 'c'], [2, 2, 2], np.random.rand(8))
>>> phi2 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4))
>>> phi3 = DiscreteFactor(['a', 'c'], [2, 2], np.random.rand(4))
>>> G.add_factors(phi1, phi2, phi3)
>>> G.get_factors()
>>> G.get_factors(node=('a', 'b', 'c'))
"""
if node is None:
return self.factors
else:
nodes = [set(n) for n in self.nodes()]
if set(node) not in nodes:
raise ValueError("Node not present in Cluster Graph")
factors = filter(lambda x: set(x.scope()) == set(node), self.factors)
return next(factors)
def remove_factors(self, *factors):
"""
Removes the given factors from the added factors.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> student = ClusterGraph()
>>> factor = DiscreteFactor(['Alice', 'Bob'], cardinality=[2, 2],
... value=np.random.rand(4))
>>> student.add_factors(factor)
>>> student.remove_factors(factor)
"""
for factor in factors:
self.factors.remove(factor)
def get_cardinality(self, node=None):
"""
Returns the cardinality of the node
Parameters
----------
node: any hashable python object (optional)
The node whose cardinality we want. If node is not specified returns a
dictionary with the given variable as keys and their respective cardinality
as values.
Returns
-------
int or dict : If node is specified returns the cardinality of the node.
If node is not specified returns a dictionary with the given
variable as keys and their respective cardinality as values.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> student = ClusterGraph()
>>> factor = DiscreteFactor(['Alice', 'Bob'], cardinality=[2, 2],
... values=np.random.rand(4))
>>> student.add_node(('Alice', 'Bob'))
>>> student.add_factors(factor)
>>> student.get_cardinality()
defaultdict(<class 'int'>, {'Bob': 2, 'Alice': 2})
>>> student.get_cardinality(node='Alice')
2
"""
if node:
for factor in self.factors:
for variable, cardinality in zip(factor.scope(), factor.cardinality):
if node == variable:
return cardinality
else:
cardinalities = defaultdict(int)
for factor in self.factors:
for variable, cardinality in zip(factor.scope(), factor.cardinality):
cardinalities[variable] = cardinality
return cardinalities
def get_partition_function(self):
r"""
Returns the partition function for a given undirected graph.
A partition function is defined as
.. math:: \sum_{X}(\prod_{i=1}^{m} \phi_i)
where m is the number of factors present in the graph
and X are all the random variables present.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')])
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
>>> phi1 = DiscreteFactor(['a', 'b', 'c'], [2, 2, 2], np.random.rand(8))
>>> phi2 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4))
>>> phi3 = DiscreteFactor(['a', 'c'], [2, 2], np.random.rand(4))
>>> G.add_factors(phi1, phi2, phi3)
>>> G.get_partition_function()
"""
if self.check_model():
factor = self.factors[0]
factor = factor_product(
factor, *[self.factors[i] for i in range(1, len(self.factors))]
)
return np.sum(factor.values)
def check_model(self):
"""
Check the model for various errors. This method checks for the following
errors.
* Checks if factors are defined for all the cliques or not.
* Check for running intersection property is not done explicitly over
here as it done in the add_edges method.
* Checks if cardinality information for all the variables is availble or not. If
not it raises an error.
* Check if cardinality of random variable remains same across all the
factors.
Returns
-------
check: boolean
True if all the checks are passed
"""
for clique in self.nodes():
factors = filter(lambda x: set(x.scope()) == set(clique), self.factors)
if not any(factors):
raise ValueError("Factors for all the cliques or clusters not defined.")
cardinalities = self.get_cardinality()
if len(set((x for clique in self.nodes() for x in clique))) != len(
cardinalities
):
raise ValueError("Factors for all the variables not defined.")
for factor in self.factors:
for variable, cardinality in zip(factor.scope(), factor.cardinality):
if cardinalities[variable] != cardinality:
raise ValueError(
"Cardinality of variable {var} not matching among factors".format(
var=variable
)
)
return True
def copy(self):
"""
Returns a copy of ClusterGraph.
Returns
-------
ClusterGraph: copy of ClusterGraph
Examples
--------
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b'), ('b', 'c')])
>>> G.add_edge(('a', 'b'), ('b', 'c'))
>>> phi1 = DiscreteFactor(['a', 'b'], [2, 2], np.random.rand(4))
>>> phi2 = DiscreteFactor(['b', 'c'], [2, 2], np.random.rand(4))
>>> G.add_factors(phi1, phi2)
>>> graph_copy = G.copy()
>>> graph_copy.factors
[<DiscreteFactor representing phi(a:2, b:2) at 0xb71b19cc>,
<DiscreteFactor representing phi(b:2, c:2) at 0xb4eaf3ac>]
>>> graph_copy.edges()
[(('a', 'b'), ('b', 'c'))]
>>> graph_copy.nodes()
[('a', 'b'), ('b', 'c')]
"""
copy = ClusterGraph(self.edges())
if self.factors:
factors_copy = [factor.copy() for factor in self.factors]
copy.add_factors(*factors_copy)
return copy
| 34.953168
| 119
| 0.534994
| 1,502
| 12,688
| 4.458722
| 0.155127
| 0.010751
| 0.008511
| 0.007167
| 0.409138
| 0.342392
| 0.305361
| 0.297297
| 0.266836
| 0.259071
| 0
| 0.00813
| 0.321406
| 12,688
| 362
| 120
| 35.049724
| 0.769686
| 0.566756
| 0
| 0.129032
| 0
| 0
| 0.086792
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.11828
| false
| 0
| 0.043011
| 0
| 0.247312
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed48aebeb275b2b66a2ca1e46510e44f4c833499
| 1,474
|
py
|
Python
|
plugins/commands_window/plugin.py
|
stonewell/eim
|
50fc4bb6e265ed8a5eb84577fd203e83934d55a7
|
[
"MIT"
] | null | null | null |
plugins/commands_window/plugin.py
|
stonewell/eim
|
50fc4bb6e265ed8a5eb84577fd203e83934d55a7
|
[
"MIT"
] | null | null | null |
plugins/commands_window/plugin.py
|
stonewell/eim
|
50fc4bb6e265ed8a5eb84577fd203e83934d55a7
|
[
"MIT"
] | null | null | null |
from PySide6.QtWidgets import QListWidgetItem
from yapsy.IPlugin import IPlugin
class Plugin(IPlugin):
def __init__(self):
IPlugin.__init__(self)
def activate(self):
IPlugin.activate(self)
return
def deactivate(self):
IPlugin.deactivate(self)
def set_current_window(self, editor):
self.editor_ = editor
self.ctx.register_command('commands_list', self.show_commands_window, None,
False)
self.ctx.bind_key('Alt+X', 'commands_list')
def show_commands_window(self, ctx):
self.commands_ = ctx.get_commands()
self.content_window_ = cw = ctx.create_list_content_window()
self.list_widget_ = l = cw.list_widget_
self.text_edit_ = t = cw.text_edit_
self.list_items_ = []
f_c = self.ctx.get_theme_def_color('default', 'foreground')
b_c = self.ctx.get_theme_def_color('default', 'background')
for cmd in self.commands_:
item = QListWidgetItem(cmd, l)
item.setForeground(f_c)
item.setBackground(b_c)
self.list_items_.append(item)
t.returnPressed.connect(self.execute_command)
l.itemDoubleClicked[QListWidgetItem].connect(self.execute_command)
self.content_window_.select_first_visible_item()
cw.show()
def execute_command(self):
self.item_double_clicked(self.list_widget_.currentItem())
def item_double_clicked(self, item):
self.ctx.run_command(item.text())
| 25.859649
| 80
| 0.683853
| 186
| 1,474
| 5.075269
| 0.360215
| 0.044492
| 0.038136
| 0.023305
| 0.065678
| 0.065678
| 0.065678
| 0.065678
| 0
| 0
| 0
| 0.000862
| 0.213026
| 1,474
| 56
| 81
| 26.321429
| 0.812931
| 0
| 0
| 0
| 0
| 0
| 0.045839
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.194444
| false
| 0
| 0.055556
| 0
| 0.305556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed493a7b9c217715b159a3e6f8cf67b68a3aa7f3
| 478
|
py
|
Python
|
custom_components/helpers.py
|
zroger49/broadlink_custom_component
|
c7b0f9648f1dbaad64e573561e852b689be5a755
|
[
"MIT"
] | null | null | null |
custom_components/helpers.py
|
zroger49/broadlink_custom_component
|
c7b0f9648f1dbaad64e573561e852b689be5a755
|
[
"MIT"
] | 2
|
2022-01-30T15:29:17.000Z
|
2022-03-13T10:54:58.000Z
|
custom_components/helpers.py
|
racelandshop/broadlink_custom_component
|
c7b0f9648f1dbaad64e573561e852b689be5a755
|
[
"MIT"
] | 1
|
2022-01-16T16:05:24.000Z
|
2022-01-16T16:05:24.000Z
|
"""Helpers for the Broadlink remote."""
from base64 import b64decode
from homeassistant.helpers import config_validation as cv
def decode_packet(value):
"""Decode a data packet given for a Broadlink remote."""
value = cv.string(value)
extra = len(value) % 4
if extra > 0:
value = value + ("=" * (4 - extra))
return b64decode(value)
def format_mac(mac):
"""Format a MAC address."""
return ":".join([format(octet, "02x") for octet in mac])
| 26.555556
| 60
| 0.65272
| 65
| 478
| 4.753846
| 0.538462
| 0.097087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029255
| 0.213389
| 478
| 17
| 61
| 28.117647
| 0.792553
| 0.221757
| 0
| 0
| 0
| 0
| 0.014045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed49cff59f1ab26e4ca17666763624af983410cc
| 8,641
|
py
|
Python
|
examples/pybullet/gym/pybullet_envs/bullet/kukaCamGymEnv.py
|
motionfigures/bullet3
|
4a66d6c80b38a87ecbdf2fd5c4d281f0b5913d22
|
[
"Zlib"
] | 51
|
2018-11-11T12:47:38.000Z
|
2022-03-06T08:39:43.000Z
|
examples/pybullet/gym/pybullet_envs/bullet/kukaCamGymEnv.py
|
motionfigures/bullet3
|
4a66d6c80b38a87ecbdf2fd5c4d281f0b5913d22
|
[
"Zlib"
] | 2
|
2019-11-15T03:21:45.000Z
|
2020-09-10T11:53:58.000Z
|
examples/pybullet/gym/pybullet_envs/bullet/kukaCamGymEnv.py
|
motionfigures/bullet3
|
4a66d6c80b38a87ecbdf2fd5c4d281f0b5913d22
|
[
"Zlib"
] | 14
|
2018-12-12T09:12:14.000Z
|
2021-10-17T14:30:25.000Z
|
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0,parentdir)
import math
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
import time
import pybullet as p
from . import kuka
import random
import pybullet_data
from pkg_resources import parse_version
maxSteps = 1000
RENDER_HEIGHT = 720
RENDER_WIDTH = 960
class KukaCamGymEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : 50
}
def __init__(self,
urdfRoot=pybullet_data.getDataPath(),
actionRepeat=1,
isEnableSelfCollision=True,
renders=False,
isDiscrete=False):
self._timeStep = 1./240.
self._urdfRoot = urdfRoot
self._actionRepeat = actionRepeat
self._isEnableSelfCollision = isEnableSelfCollision
self._observation = []
self._envStepCounter = 0
self._renders = renders
self._width = 341
self._height = 256
self._isDiscrete=isDiscrete
self.terminated = 0
self._p = p
if self._renders:
cid = p.connect(p.SHARED_MEMORY)
if (cid<0):
p.connect(p.GUI)
p.resetDebugVisualizerCamera(1.3,180,-41,[0.52,-0.2,-0.33])
else:
p.connect(p.DIRECT)
#timinglog = p.startStateLogging(p.STATE_LOGGING_PROFILE_TIMINGS, "kukaTimings.json")
self._seed()
self.reset()
observationDim = len(self.getExtendedObservation())
#print("observationDim")
#print(observationDim)
observation_high = np.array([np.finfo(np.float32).max] * observationDim)
if (self._isDiscrete):
self.action_space = spaces.Discrete(7)
else:
action_dim = 3
self._action_bound = 1
action_high = np.array([self._action_bound] * action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self.observation_space = spaces.Box(low=0, high=255, shape=(self._height, self._width, 4))
self.viewer = None
def _reset(self):
self.terminated = 0
p.resetSimulation()
p.setPhysicsEngineParameter(numSolverIterations=150)
p.setTimeStep(self._timeStep)
p.loadURDF(os.path.join(self._urdfRoot,"plane.urdf"),[0,0,-1])
p.loadURDF(os.path.join(self._urdfRoot,"table/table.urdf"), 0.5000000,0.00000,-.820000,0.000000,0.000000,0.0,1.0)
xpos = 0.5 +0.2*random.random()
ypos = 0 +0.25*random.random()
ang = 3.1415925438*random.random()
orn = p.getQuaternionFromEuler([0,0,ang])
self.blockUid =p.loadURDF(os.path.join(self._urdfRoot,"block.urdf"), xpos,ypos,-0.1,orn[0],orn[1],orn[2],orn[3])
p.setGravity(0,0,-10)
self._kuka = kuka.Kuka(urdfRootPath=self._urdfRoot, timeStep=self._timeStep)
self._envStepCounter = 0
p.stepSimulation()
self._observation = self.getExtendedObservation()
return np.array(self._observation)
def __del__(self):
p.disconnect()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def getExtendedObservation(self):
#camEyePos = [0.03,0.236,0.54]
#distance = 1.06
#pitch=-56
#yaw = 258
#roll=0
#upAxisIndex = 2
#camInfo = p.getDebugVisualizerCamera()
#print("width,height")
#print(camInfo[0])
#print(camInfo[1])
#print("viewMatrix")
#print(camInfo[2])
#print("projectionMatrix")
#print(camInfo[3])
#viewMat = camInfo[2]
#viewMat = p.computeViewMatrixFromYawPitchRoll(camEyePos,distance,yaw, pitch,roll,upAxisIndex)
viewMat = [-0.5120397806167603, 0.7171027660369873, -0.47284144163131714, 0.0, -0.8589617609977722, -0.42747554183006287, 0.28186774253845215, 0.0, 0.0, 0.5504802465438843, 0.8348482847213745, 0.0, 0.1925382763147354, -0.24935829639434814, -0.4401884973049164, 1.0]
#projMatrix = camInfo[3]#[0.7499999403953552, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0]
projMatrix = [0.75, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0]
img_arr = p.getCameraImage(width=self._width,height=self._height,viewMatrix=viewMat,projectionMatrix=projMatrix)
rgb=img_arr[2]
np_img_arr = np.reshape(rgb, (self._height, self._width, 4))
self._observation = np_img_arr
return self._observation
def _step(self, action):
if (self._isDiscrete):
dv = 0.01
dx = [0,-dv,dv,0,0,0,0][action]
dy = [0,0,0,-dv,dv,0,0][action]
da = [0,0,0,0,0,-0.1,0.1][action]
f = 0.3
realAction = [dx,dy,-0.002,da,f]
else:
dv = 0.01
dx = action[0] * dv
dy = action[1] * dv
da = action[2] * 0.1
f = 0.3
realAction = [dx,dy,-0.002,da,f]
return self.step2( realAction)
def step2(self, action):
for i in range(self._actionRepeat):
self._kuka.applyAction(action)
p.stepSimulation()
if self._termination():
break
#self._observation = self.getExtendedObservation()
self._envStepCounter += 1
self._observation = self.getExtendedObservation()
if self._renders:
time.sleep(self._timeStep)
#print("self._envStepCounter")
#print(self._envStepCounter)
done = self._termination()
reward = self._reward()
#print("len=%r" % len(self._observation))
return np.array(self._observation), reward, done, {}
def _render(self, mode='human', close=False):
if mode != "rgb_array":
return np.array([])
base_pos,orn = self._p.getBasePositionAndOrientation(self._racecar.racecarUniqueId)
view_matrix = self._p.computeViewMatrixFromYawPitchRoll(
cameraTargetPosition=base_pos,
distance=self._cam_dist,
yaw=self._cam_yaw,
pitch=self._cam_pitch,
roll=0,
upAxisIndex=2)
proj_matrix = self._p.computeProjectionMatrixFOV(
fov=60, aspect=float(RENDER_WIDTH)/RENDER_HEIGHT,
nearVal=0.1, farVal=100.0)
(_, _, px, _, _) = self._p.getCameraImage(
width=RENDER_WIDTH, height=RENDER_HEIGHT, viewMatrix=view_matrix,
projectionMatrix=proj_matrix, renderer=pybullet.ER_BULLET_HARDWARE_OPENGL)
rgb_array = np.array(px)
rgb_array = rgb_array[:, :, :3]
return rgb_array
def _termination(self):
#print (self._kuka.endEffectorPos[2])
state = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaEndEffectorIndex)
actualEndEffectorPos = state[0]
#print("self._envStepCounter")
#print(self._envStepCounter)
if (self.terminated or self._envStepCounter>maxSteps):
self._observation = self.getExtendedObservation()
return True
maxDist = 0.005
closestPoints = p.getClosestPoints(self._kuka.trayUid, self._kuka.kukaUid,maxDist)
if (len(closestPoints)):#(actualEndEffectorPos[2] <= -0.43):
self.terminated = 1
#print("closing gripper, attempting grasp")
#start grasp and terminate
fingerAngle = 0.3
for i in range (100):
graspAction = [0,0,0.0001,0,fingerAngle]
self._kuka.applyAction(graspAction)
p.stepSimulation()
fingerAngle = fingerAngle-(0.3/100.)
if (fingerAngle<0):
fingerAngle=0
for i in range (1000):
graspAction = [0,0,0.001,0,fingerAngle]
self._kuka.applyAction(graspAction)
p.stepSimulation()
blockPos,blockOrn=p.getBasePositionAndOrientation(self.blockUid)
if (blockPos[2] > 0.23):
#print("BLOCKPOS!")
#print(blockPos[2])
break
state = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaEndEffectorIndex)
actualEndEffectorPos = state[0]
if (actualEndEffectorPos[2]>0.5):
break
self._observation = self.getExtendedObservation()
return True
return False
def _reward(self):
#rewards is height of target object
blockPos,blockOrn=p.getBasePositionAndOrientation(self.blockUid)
closestPoints = p.getClosestPoints(self.blockUid,self._kuka.kukaUid,1000, -1, self._kuka.kukaEndEffectorIndex)
reward = -1000
numPt = len(closestPoints)
#print(numPt)
if (numPt>0):
#print("reward:")
reward = -closestPoints[0][8]*10
if (blockPos[2] >0.2):
#print("grasped a block!!!")
#print("self._envStepCounter")
#print(self._envStepCounter)
reward = reward+1000
#print("reward")
#print(reward)
return reward
if parse_version(gym.__version__)>=parse_version('0.9.6'):
render = _render
reset = _reset
seed = _seed
step = _step
| 32.731061
| 270
| 0.661035
| 1,091
| 8,641
| 5.092576
| 0.248396
| 0.025198
| 0.025918
| 0.024478
| 0.210223
| 0.173326
| 0.100612
| 0.083873
| 0.061915
| 0.061915
| 0
| 0.091742
| 0.204027
| 8,641
| 263
| 271
| 32.855513
| 0.716051
| 0.138294
| 0
| 0.202128
| 0
| 0
| 0.014045
| 0.003106
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053191
| false
| 0
| 0.06383
| 0
| 0.18617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed4a2840446404d2f282a8452d7b98f961fd5554
| 6,392
|
py
|
Python
|
hi-ml-histopathology/src/histopathology/preprocessing/tiling.py
|
kumar-pratik/hi-ml
|
a108cf4ea244a76127adedc0ca60f0a5afdfb3e8
|
[
"MIT"
] | 402
|
2020-09-22T16:38:16.000Z
|
2022-03-30T09:56:03.000Z
|
hi-ml-histopathology/src/histopathology/preprocessing/tiling.py
|
kumar-pratik/hi-ml
|
a108cf4ea244a76127adedc0ca60f0a5afdfb3e8
|
[
"MIT"
] | 259
|
2020-09-23T09:32:33.000Z
|
2022-03-30T18:15:01.000Z
|
hi-ml-histopathology/src/histopathology/preprocessing/tiling.py
|
kumar-pratik/hi-ml
|
a108cf4ea244a76127adedc0ca60f0a5afdfb3e8
|
[
"MIT"
] | 112
|
2020-09-23T00:12:58.000Z
|
2022-03-31T07:39:55.000Z
|
# ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
# These tiling implementations are adapted from PANDA Kaggle solutions, for example:
# https://github.com/kentaroy47/Kaggle-PANDA-1st-place-solution/blob/master/src/data_process/a00_save_tiles.py
from typing import Any, Optional, Tuple
import numpy as np
def get_1d_padding(length: int, tile_size: int) -> Tuple[int, int]:
"""Computes symmetric padding for `length` to be divisible by `tile_size`."""
pad = (tile_size - length % tile_size) % tile_size
return (pad // 2, pad - pad // 2)
def pad_for_tiling_2d(array: np.ndarray, tile_size: int, channels_first: Optional[bool] = True,
**pad_kwargs: Any) -> Tuple[np.ndarray, np.ndarray]:
"""Symmetrically pads a 2D `array` such that both dimensions are divisible by `tile_size`.
:param array: 2D image array.
:param tile_size: Width/height of each tile in pixels.
:param channels_first: Whether `array` is in CHW (`True`, default) or HWC (`False`) layout.
:param pad_kwargs: Keyword arguments to be passed to `np.pad()` (e.g. `constant_values=0`).
:return: A tuple containing:
- `padded_array`: Resulting array, in the same CHW/HWC layout as the input.
- `offset`: XY offset introduced by the padding. Add this to coordinates relative to the
original array to obtain indices for the padded array.
"""
height, width = array.shape[1:] if channels_first else array.shape[:-1]
padding_h = get_1d_padding(height, tile_size)
padding_w = get_1d_padding(width, tile_size)
padding = [padding_h, padding_w]
channels_axis = 0 if channels_first else 2
padding.insert(channels_axis, (0, 0)) # zero padding on channels axis
padded_array = np.pad(array, padding, **pad_kwargs)
offset = (padding_w[0], padding_h[0])
return padded_array, np.array(offset)
def tile_array_2d(array: np.ndarray, tile_size: int, channels_first: Optional[bool] = True,
**pad_kwargs: Any) -> Tuple[np.ndarray, np.ndarray]:
"""Split an image array into square non-overlapping tiles.
The array will be padded symmetrically if its dimensions are not exact multiples of `tile_size`.
:param array: Image array.
:param tile_size: Width/height of each tile in pixels.
:param pad_kwargs: Keyword arguments to be passed to `np.pad()` (e.g. `constant_values=0`).
:param channels_first: Whether `array` is in CHW (`True`, default) or HWC (`False`) layout.
:return: A tuple containing:
- `tiles`: A batch of tiles in NCHW layout.
- `coords`: XY coordinates of each tile, in the same order.
"""
padded_array, (offset_w, offset_h) = pad_for_tiling_2d(array, tile_size, channels_first, **pad_kwargs)
if channels_first:
channels, height, width = padded_array.shape
else:
height, width, channels = padded_array.shape
n_tiles_h = height // tile_size
n_tiles_w = width // tile_size
if channels_first:
intermediate_shape = (channels, n_tiles_h, tile_size, n_tiles_w, tile_size)
axis_order = (1, 3, 0, 2, 4) # (n_tiles_h, n_tiles_w, channels, tile_size, tile_size)
output_shape = (n_tiles_h * n_tiles_w, channels, tile_size, tile_size)
else:
intermediate_shape = (n_tiles_h, tile_size, n_tiles_w, tile_size, channels)
axis_order = (0, 2, 1, 3, 4) # (n_tiles_h, n_tiles_w, tile_size, tile_size, channels)
output_shape = (n_tiles_h * n_tiles_w, tile_size, tile_size, channels)
tiles = padded_array.reshape(intermediate_shape) # Split width and height axes
tiles = tiles.transpose(axis_order)
tiles = tiles.reshape(output_shape) # Flatten tile batch dimension
# Compute top-left coordinates of every tile, relative to the original array's origin
coords_h = tile_size * np.arange(n_tiles_h) - offset_h
coords_w = tile_size * np.arange(n_tiles_w) - offset_w
# Shape: (n_tiles_h * n_tiles_w, 2)
coords = np.stack(np.meshgrid(coords_w, coords_h), axis=-1).reshape(-1, 2)
return tiles, coords
def assemble_tiles_2d(tiles: np.ndarray, coords: np.ndarray, fill_value: Optional[float] = np.nan,
channels_first: Optional[bool] = True) -> Tuple[np.ndarray, np.ndarray]:
"""Assembles a 2D array from sequences of tiles and coordinates.
:param tiles: Stack of tiles with batch dimension first.
:param coords: XY tile coordinates, assumed to be spaced by multiples of `tile_size` (shape: [N, 2]).
:param tile_size: Size of each tile; must be >0.
:param fill_value: Value to assign to empty elements (default: `NaN`).
:param channels_first: Whether each tile is in CHW (`True`, default) or HWC (`False`) layout.
:return: A tuple containing:
- `array`: The reassembled 2D array with the smallest dimensions to contain all given tiles.
- `offset`: The lowest XY coordinates.
- `offset`: XY offset introduced by the assembly. Add this to tile coordinates to obtain
indices for the assembled array.
"""
if coords.shape[0] != tiles.shape[0]:
raise ValueError(f"Tile coordinates and values must have the same length, "
f"got {coords.shape[0]} and {tiles.shape[0]}")
if channels_first:
n_tiles, channels, tile_size, _ = tiles.shape
else:
n_tiles, tile_size, _, channels = tiles.shape
tile_xs, tile_ys = coords.T
x_min, x_max = min(tile_xs), max(tile_xs + tile_size)
y_min, y_max = min(tile_ys), max(tile_ys + tile_size)
width = x_max - x_min
height = y_max - y_min
output_shape = (channels, height, width) if channels_first else (height, width, channels)
array = np.full(output_shape, fill_value)
offset = np.array([-x_min, -y_min])
for idx in range(n_tiles):
row = coords[idx, 1] + offset[1]
col = coords[idx, 0] + offset[0]
if channels_first:
array[:, row:row + tile_size, col:col + tile_size] = tiles[idx]
else:
array[row:row + tile_size, col:col + tile_size, :] = tiles[idx]
return array, offset
| 49.550388
| 110
| 0.661608
| 930
| 6,392
| 4.35914
| 0.22043
| 0.078934
| 0.01554
| 0.019734
| 0.308584
| 0.261223
| 0.236063
| 0.230883
| 0.225456
| 0.225456
| 0
| 0.010277
| 0.208385
| 6,392
| 128
| 111
| 49.9375
| 0.790909
| 0.442897
| 0
| 0.153846
| 0
| 0
| 0.028454
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061538
| false
| 0
| 0.030769
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed4a5900145229cd2e22ae9792f8a8881bfd83d3
| 9,697
|
py
|
Python
|
miniapp/miniapp/hartreefock/hf.py
|
savcardamone/tyche-
|
ea89edea89a607291e4fe0ba738d75522f54dc1a
|
[
"MIT"
] | null | null | null |
miniapp/miniapp/hartreefock/hf.py
|
savcardamone/tyche-
|
ea89edea89a607291e4fe0ba738d75522f54dc1a
|
[
"MIT"
] | 1
|
2018-12-28T13:30:16.000Z
|
2018-12-29T10:30:33.000Z
|
miniapp/miniapp/hartreefock/hf.py
|
savcardamone/tyche
|
ea89edea89a607291e4fe0ba738d75522f54dc1a
|
[
"MIT"
] | null | null | null |
from math import pi
from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace
from numpy.linalg import eig, norm
class HartreeFock():
zeta = array([38.474970, 5.782948, 1.242567, 0.298073])
num_aos = len(zeta)
num_mos = 0
energy_tolerance = 0.0001; density_tolerance = 0.001
prev_energy = 0
prev_density = []
def __init__(self, num_elec):
# Make sure we can pair electrons
if num_elec % 2 != 0:
raise Exception("Can't do a RHF with", num_elec, "electrons.")
else:
print("Restricted Hartree-Fock with", num_elec, "electron(s).")
# We're RHF, so pair up spins in each molecular orbital
self.num_mos = int(num_elec / 2)
if self.num_mos > self.num_aos:
raise Exception("Can't create", self.num_mos, "molecular orbital(s) from", self.num_aos, "atomic orbital(s).")
else:
print(self.num_aos, "atomic orbital(s) and", self.num_mos, "molecular orbital(s).")
print("Zeta: ", self.zeta)
self.prev_density = ndarray(shape=(self.num_aos,self.num_aos),dtype=float, order='C')
def one_electron_integrals(self):
def overlap_kernel(zeta_i, zeta_j):
return pow(pi / (zeta_i + zeta_j), 1.5)
def kinetic_kernel(zeta_i, zeta_j):
return 3 * pow(pi, 1.5) * (zeta_i * zeta_j) / pow(zeta_i + zeta_j, 2.5)
def nucattr_kernel(zeta_i, zeta_j):
return (-4 * pi) / (zeta_i + zeta_j)
# Initialise our matrices
overlap = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C')
kinetic = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C')
nucattr = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C')
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
overlap[i_ao,j_ao] = overlap_kernel(self.zeta[i_ao], self.zeta[j_ao])
kinetic[i_ao,j_ao] = kinetic_kernel(self.zeta[i_ao], self.zeta[j_ao])
nucattr[i_ao,j_ao] = nucattr_kernel(self.zeta[i_ao], self.zeta[j_ao])
return overlap, kinetic, nucattr
def two_electron_integrals(self):
def tei_kernel(zeta_i, zeta_j, zeta_k, zeta_l):
temp_1 = (zeta_i + zeta_j) * (zeta_k + zeta_l)
temp_2 = sqrt(zeta_i + zeta_j + zeta_k + zeta_l)
return 2 * pow(pi, 2.5) / (temp_1 * temp_2)
teis = ndarray(shape=(self.num_aos,self.num_aos,self.num_aos,self.num_aos), dtype=float, order='C')
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
for k_ao in range(self.num_aos):
for l_ao in range(self.num_aos):
teis[i_ao,j_ao,k_ao,l_ao] = tei_kernel(self.zeta[i_ao], self.zeta[j_ao], self.zeta[k_ao], self.zeta[l_ao])
return teis
def basis_transformation_matrix(self, overlap):
# Get the eigenvalues and eigenvectors of the overlap matrix
overlap_evals, overlap_evecs = eig(overlap)
# Create diagonal matrix with entries given by inverse of eigenvalues of
# overlap matrix
try:
inv_sqrt_evals = diag(divide(1., sqrt(overlap_evals)))
except:
raise Exception("Overlap matrix is not positive definite.")
# Construct the basis transformation matrix and return it
return overlap_evecs @ inv_sqrt_evals @ overlap_evecs.T
def fock_matrix(self, core_hamiltonian, teis, density):
fock = ndarray(shape=density.shape, dtype=float, order='C')
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
fock[i_ao,j_ao] = core_hamiltonian[i_ao,j_ao]
for k_ao in range(self.num_aos):
for l_ao in range(self.num_aos):
coulomb = teis[i_ao,k_ao,j_ao,l_ao]
exchange = teis[i_ao,k_ao,l_ao,j_ao]
fock[i_ao,j_ao] += density[k_ao,l_ao] * (coulomb - 0.5*exchange)
return fock
def density_matrix(self, overlap, basis_transform, fock):
def ordered_eigensystem(matrix):
# Generate the eigenvalues and eigenvectors of the matrix
evals, evecs = eig(matrix)
# Sort the eigenvalues in ascending order and keep a track of what index they
# were originally assigned
ordered_indices = argsort(evals)
ordered_evals = sort(evals)
# Order the eigenvectors in asceding order of their corresponding eigenvalues
ordered_evecs = ndarray(shape=evecs.shape, dtype=float, order='C')
ordered_transform = ndarray(shape=evecs.shape, dtype=float, order='C')
for i_evec in range(len(ordered_evals)):
ordered_evecs[:,i_evec] = evecs[:,ordered_indices[i_evec]]
ordered_transform[i_evec,:] = basis_transform[ordered_indices[i_evec],:]
# Return the ordered eigenvalues and corresponding eigenvectors
return ordered_evals, ordered_evecs, ordered_transform
# Transform Fock matrix to orthogonal basis
fock = basis_transform.T @ fock @ basis_transform
# Get the eigenvalues and eigenvectors of the input Fock matrix
fock_evals, fock_evecs, new_transform = ordered_eigensystem(fock)
# Transform the eigenvectors of the Fock matrix back to the original basis
fock_evecs = new_transform @ fock_evecs
# First of all we make sure the eigenvectors of the Fock matrix are normalised by the
# overlap matrix (these are molecular orbitals, afterall)
for i_mo in range(self.num_aos):
ao_coeffs = fock_evecs[:,i_mo]
norm = ao_coeffs.T @ overlap @ ao_coeffs
fock_evecs[:,i_mo] /= sqrt(norm)
# Initialise the density matrix
density = ndarray(shape=overlap.shape, dtype=float, order='C')
# Loop over all elements in the density matrix and accumulate
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
density[i_ao,j_ao] = 0.0
# We accumulate only over occupied molecular orbitals! Note that we also have
# access to the virtual orbitals at this point, but they're effectively discarded
for i_mo in range(self.num_mos):
density[i_ao,j_ao] += 2 * fock_evecs[i_ao,i_mo] * fock_evecs[j_ao,i_mo]
return fock_evecs, density
def scf_energy(self, density, core_hamiltonian, fock):
energy = 0.0
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
energy += 0.5 * density[i_ao,j_ao] * (core_hamiltonian[i_ao,j_ao] + fock[i_ao,j_ao])
return energy
def check_convergence(self, energy, density):
if abs(energy - self.prev_energy) < self.energy_tolerance:
energy_converged = True
else:
energy_converged = False
self.prev_energy = energy
if norm(density - self.prev_density) < self.density_tolerance:
density_converged = True
else:
density_converged = False
self.prev_density = density
return energy_converged, density_converged
def mulliken(self, overlap, density):
return trace(density @ overlap)
def run(self, num_cycles):
print("Hartree-Fock will run for a maximum of", num_cycles, "SCF iteration(s).")
overlap, kinetic, nucattr = self.one_electron_integrals()
core_hamiltonian = kinetic + nucattr
teis = self.two_electron_integrals()
basis_transform = self.basis_transformation_matrix(overlap)
_, density = self.density_matrix(overlap, basis_transform, core_hamiltonian)
energy = self.scf_energy(density, core_hamiltonian, core_hamiltonian)
for i in range(num_cycles):
fock = self.fock_matrix(core_hamiltonian, teis, density)
fock_evecs, density = self.density_matrix(overlap, basis_transform, fock)
energy = self.scf_energy(density, core_hamiltonian, fock)
print("Iteration", i, "SCF Energy:", energy)
energy_converged, density_converged = self.check_convergence(energy, density)
if energy_converged and density_converged:
print("SCF has converged!")
for i_mo in range(self.num_mos):
print("Molecular Orbital", i_mo, "Coefficients :", fock_evecs[:,i_mo])
print("Mulliken charge:", self.mulliken(overlap, density))
break
if i == num_cycles - 1:
print("SCF failed to converge.")
print("Energy Convergence Check:", energy_converged)
print("Density Convergence Check:", density_converged)
fock_mo_basis = ndarray(shape=(self.num_mos,self.num_mos), dtype=float, order='C')
for i_mo in range(self.num_mos):
for j_mo in range(self.num_mos):
fock_mo_basis[i_mo,j_mo] = 0.0
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
fock_mo_basis[i_mo,j_mo] += fock_evecs[i_ao,j_mo] * fock_evecs[j_ao,i_mo] * fock[i_ao,j_ao]
print(fock_mo_basis)
if __name__ == "__main__":
hf = HartreeFock(4)
hf.run(2000)
| 39.579592
| 131
| 0.607301
| 1,313
| 9,697
| 4.252856
| 0.156893
| 0.055158
| 0.057307
| 0.05265
| 0.350287
| 0.310888
| 0.250537
| 0.189828
| 0.153653
| 0.121418
| 0
| 0.011183
| 0.299165
| 9,697
| 245
| 132
| 39.579592
| 0.810477
| 0.120553
| 0
| 0.155405
| 0
| 0
| 0.053734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101351
| false
| 0
| 0.02027
| 0.027027
| 0.263514
| 0.081081
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed4d3cea76d6d1815b54c52a975d47ddfb5f8c99
| 7,496
|
py
|
Python
|
server/djangoapp/restapis.py
|
christiansencq/ibm_capstone
|
d445fd40c0267be0948a5d85e9d43828b908641c
|
[
"Apache-2.0"
] | null | null | null |
server/djangoapp/restapis.py
|
christiansencq/ibm_capstone
|
d445fd40c0267be0948a5d85e9d43828b908641c
|
[
"Apache-2.0"
] | null | null | null |
server/djangoapp/restapis.py
|
christiansencq/ibm_capstone
|
d445fd40c0267be0948a5d85e9d43828b908641c
|
[
"Apache-2.0"
] | null | null | null |
import requests
import json
# import related models here
from .models import CarDealer, DealerReview
from requests.auth import HTTPBasicAuth
import logging
logger = logging.getLogger(__name__)
# Create a `get_request` to make HTTP GET requests
# e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'},
# auth=HTTPBasicAuth('apikey', api_key))
def get_request(url, api_key, **kwargs):
print("GET from {}".format(url))
print(kwargs)
try:
if api_key is not None:
response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key))
else:
response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs)
except:
print("Network Error")
status_code = response.status_code
print("With status code {}".format(status_code))
json_data = json.loads(response.text)
return json_data, status_code
# Create a `post_request` to make HTTP POST requests
# e.g., response = requests.post(url, params=kwargs, json=payload)
def post_request(url, json_payload, **kwargs):
print("Post to url: {} ".format(url))
print(kwargs)
print(json_payload)
response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload)
status_code = response.status_code
print("With status code {}".format(status_code))
json_data = json.loads(response.text)
return json_data, status_code
# Create a get_dealers_from_cf method to get dealers from a cloud function
def get_dealers_from_cf(url, **kwargs):
info = []
result = "ok"
# - Call get_request() with specified arguments
logger.info("Get Dealers from CF Called!")
json_result, status_code = get_request(url, None)
if status_code == 200 and json_result:
dealers = json_result['rows']
logger.info(len(dealers))
for dealer in dealers:
dlr_data = dealer['doc']
#print('ADDRESS', dlr_data["address"])
if dlr_data.get('address'):
# Create a CarDealer object with values in `doc` object
dealer_obj = CarDealer(address=dlr_data.get("address"), city=dlr_data.get("city"), full_name=dlr_data.get("full_name"),
id=dlr_data.get("id"), lat=dlr_data.get("lat"), long=dlr_data.get("long"),
short_name=dlr_data.get("short_name"), state=dlr_data.get("state"),
st=dlr_data.get("st"), zip=dlr_data.get("zip"))
# dealer_obj = CarDealer(address=dealer["doc"]["address"], city=dealer["doc"]["city"], full_name=dealer["doc"]["full_name"],
# id=dealer["doc"]["id"], lat=dealer["doc"]["lat"], long=dealer["doc"]["long"],
# short_name=dealer["doc"]["short_name"],
# st=dealer["doc"]["st"], state=dealer["doc"]["state"], zip=dealer["doc"]["zip"])
info.append(dealer_obj)
elif json_result:
result = json_result["message"]
else:
result = "Unknown error"
return info, result
def get_dealer_by_id(url, dealerId):
# Call get_request with a URL parameter
info = None
result = "ok"
json_result, status_code = get_request(url, None, dealerId=dealerId)
# json_result, status_code = get_request(url, None, dealerId=dealerId)
if status_code == 200 and json_result:
# Get the row list in JSON as dealers
dealers = json_result["rows"]
for dealer in dealers:
# Create a CarDealer object with values in `doc` object
info = CarDealer(address=dealer.get("address"), city=dealer.get("city"), full_name=dealer.get("full_name"),
id=dealer.get("id"), lat=dealer.get("lat"), long=dealer.get("long"),
short_name=dealer.get("short_name"),
st=dealer.get("st"), state=dealer.get("state"), zip=dealer.get("zip"))
# info = CarDealer(address=dealer["address"], city=dealer["city"], full_name=dealer["full_name"],
# id=dealer["id"], lat=dealer["lat"], long=dealer["long"],
# short_name=dealer["short_name"], state=dealer["state"],
# st=dealer["st"], zip=dealer["zip"])
elif json_result:
result = json_result["message"]
else:
result = "Unknown error"
return info, result
def get_dealers_by_state (url, state):
info = []
result = "ok"
# Call get_request with a URL parameter
json_result, status_code = get_request(url, None, state=state)
if status_code == 200 and json_result:
# Get the row list in JSON as dealers
dealers = json_result["rows"]
# For each dealer object
for dealer in dealers:
# dlr_data = dealer["doc"]
# Create a CarDealer object with values in `doc` object
dealer_obj = CarDealer(address=dealer.get("address"), city=dealer.get("city"), full_name=dealer.get("full_name"),
id=dealer.get("id"), lat=dealer.get("lat"), long=dealer.get("long"),
short_name=dealer.get("short_name"), state=dealer.get("state"),
st=dealer.get("st"), zip=dealer.get("zip"))
# dealer_obj = CarDealer(address=dlr_data.get("address"), city=dlr_data.get("city"), full_name=dlr_data.get("full_name"),
# id=dlr_data.get("id"), lat=dlr_data.get("lat"), long=dlr_data.get("long"),
# short_name=dlr_data.get("short_name"), state=dlr_data.get("state"),
# st=dlr_data.get("st"), zip=dlr_data.get("zip"))
info.append(dealer_obj)
elif json_result:
result = json_result["message"]
else:
result = "Unknown error"
return info, result
def get_dealer_reviews_from_cf (url, dealerId):
info = []
result = "ok"
# Call get_request with a URL parameter
json_result, status_code = get_request(url, None, dealerId=dealerId)
if status_code == 200 and json_result:
# Get the row list in JSON as reviews
reviews = json_result["body"]["data"]
# For each review object
for review in reviews:
if (dealerId == review.get("dealership")):
# Create a DealerReview object with values in object
#sentiment = analyze_review_sentiments(review["review"])
review_obj = DealerReview( id=review.get("id"), name=review.get("name"), review=review.get("review"),
purchase=review.get("purchase"), car_make=review.get("car_make", None),
car_model=review.get("car_model", None), car_year=review.get("car_year", None),
purchase_date=review.get("purchase_date", None))
info.append(review_obj)
elif json_result:
result = json_result["message"]
else:
result = "Unknown error"
return info, result
# Create an `analyze_review_sentiments` method to call Watson NLU and analyze text
# def analyze_review_sentiments(text):
# - Call get_request() with specified arguments
# - Get the returned sentiment label such as Positive or Negative
| 46.271605
| 140
| 0.603522
| 939
| 7,496
| 4.654952
| 0.13099
| 0.038435
| 0.048044
| 0.02059
| 0.58911
| 0.555022
| 0.535575
| 0.515214
| 0.48021
| 0.470373
| 0
| 0.00218
| 0.265608
| 7,496
| 161
| 141
| 46.559006
| 0.791826
| 0.324707
| 0
| 0.524752
| 0
| 0
| 0.105221
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059406
| false
| 0
| 0.049505
| 0
| 0.168317
| 0.079208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed4d8f444cbb7eba10514b86dbcd28fb80cd5824
| 2,035
|
py
|
Python
|
examples/python/upload.py
|
oslokommune/okdata-data-uploader
|
fc006ae90440b267613260bba90235799bf0cf6e
|
[
"MIT"
] | null | null | null |
examples/python/upload.py
|
oslokommune/okdata-data-uploader
|
fc006ae90440b267613260bba90235799bf0cf6e
|
[
"MIT"
] | null | null | null |
examples/python/upload.py
|
oslokommune/okdata-data-uploader
|
fc006ae90440b267613260bba90235799bf0cf6e
|
[
"MIT"
] | null | null | null |
import logging
from configparser import ConfigParser
from sdk.data_uploader import DataUploader
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
config = ConfigParser()
config.read("config.ini")
#####
# Datasets to be added to metadata API
datasetData = {
"title": "Test",
"description": "Test data",
"keywords": ["test"],
"accessRights": "non-public",
"objective": "Formรฅlsbeskrivelse",
"contactPoint": {
"name": "Tim",
"email": "tim@example.org",
"phone": "12345678",
},
"publisher": "Tim",
}
datasetVersionData = {"version": "6", "schema": {}, "transformation": {}}
datasetVersionEditionData = {
"edition": "2019-05-28T15:37:00+02:00",
"description": "Data for one hour",
"startTime": "2018-12-21T08:00:00+01:00",
"endTime": "2018-12-21T09:00:00+01:00",
}
######
# The dataset* variables are optional, if these are set in config.ini this script will
# not run the relevant DataUploader function
datasetId = config.get("dataUploader", "datasetId", fallback=None)
datasetVersion = config.get("dataUploader", "datasetVersion", fallback=None)
datasetVersionEdition = config.get(
"dataUploader", "datasetVersionEdition", fallback=None
)
upload = DataUploader(config)
try:
log.info("Uploading a file to S3")
upload.login()
if datasetId is None:
upload.createDataset(datasetData)
if datasetVersion is None:
upload.createVersion(datasetVersionData)
if datasetVersionEdition is None:
upload.createEdition(datasetVersionEditionData)
log.info(f"Dataset: {upload.datasetId}")
log.info(f"Version: {upload.datasetVersion}")
log.info(f"Edition: {upload.datasetVersionEdition}")
if upload.upload("README.md"):
log.info("Done... go brew some coffee")
else:
log.error("Could not upload file....")
except Exception as e:
log.exception(f">> Something went horrible wrong:\n{e}")
# To upload with curl: cmd = upload.curl("tmp3.zip")
# Max upload size for now is 5GB
| 30.373134
| 86
| 0.678624
| 234
| 2,035
| 5.897436
| 0.538462
| 0.025362
| 0.045652
| 0.011594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039262
| 0.173956
| 2,035
| 66
| 87
| 30.833333
| 0.781678
| 0.121867
| 0
| 0
| 0
| 0
| 0.348588
| 0.084181
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed4f8a612b9b05faf17087ad729a1b5925503103
| 1,358
|
py
|
Python
|
setup.py
|
xmedius/xmedius-mailrelayserver
|
44bb55c4b543e96bb23a45499d281c1bbab18abf
|
[
"MIT"
] | null | null | null |
setup.py
|
xmedius/xmedius-mailrelayserver
|
44bb55c4b543e96bb23a45499d281c1bbab18abf
|
[
"MIT"
] | null | null | null |
setup.py
|
xmedius/xmedius-mailrelayserver
|
44bb55c4b543e96bb23a45499d281c1bbab18abf
|
[
"MIT"
] | null | null | null |
from setuptools import setup
from setuptools.command.install import install
class PostInstallCommand(install):
user_options = install.user_options + [
('noservice', None, None),
]
def initialize_options(self):
install.initialize_options(self)
self.noservice = None
def finalize_options(self):
install.finalize_options(self)
def run(self):
install.run(self)
if not self.noservice:
from xmediusmailrelayserver import console
console.install_service(['--startup', 'auto', 'install'])
setup(
name='xmediusmailrelayserver',
version='1.0.0',
description='The Python module to be used to relay mail to different servers depending on patterns',
long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information',
url='https://github.com/xmedius/xmedius-mailrelayserver/',
author='XMedius R&D',
license='MIT',
classifiers=[
'Programming Language :: Python :: 3.6',
'Environment :: Win32 (MS Windows)',
'Operating System :: Microsoft :: Windows'
],
cmdclass={
'install': PostInstallCommand
},
packages=['xmediusmailrelayserver'],
package_data={'xmediusmailrelayserver': ['config.yml']},
install_requires=['pyyaml', 'aiosmtpd'],
dependency_links=[]
)
| 30.863636
| 104
| 0.667158
| 138
| 1,358
| 6.485507
| 0.586957
| 0.049162
| 0.040223
| 0.046927
| 0.096089
| 0.096089
| 0
| 0
| 0
| 0
| 0
| 0.006542
| 0.212077
| 1,358
| 43
| 105
| 31.581395
| 0.829907
| 0
| 0
| 0
| 0
| 0
| 0.343405
| 0.048637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0
| 0.081081
| 0
| 0.216216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed502bad0b09f0685ca1ff615fe9d8b7f8ad7287
| 1,554
|
py
|
Python
|
143.py
|
tsbxmw/leetcode
|
e751311b8b5f2769874351717a22c35c19b48a36
|
[
"MIT"
] | null | null | null |
143.py
|
tsbxmw/leetcode
|
e751311b8b5f2769874351717a22c35c19b48a36
|
[
"MIT"
] | null | null | null |
143.py
|
tsbxmw/leetcode
|
e751311b8b5f2769874351717a22c35c19b48a36
|
[
"MIT"
] | null | null | null |
# 143. ้ๆ้พ่กจ
# ็ปๅฎไธไธชๅ้พ่กจ L๏ผL0โL1โโฆโLn-1โLn ๏ผ
# ๅฐๅ
ถ้ๆฐๆๅๅๅไธบ๏ผ L0โLnโL1โLn-1โL2โLn-2โโฆ
# ไฝ ไธ่ฝๅชๆฏๅ็บฏ็ๆนๅ่็นๅ
้จ็ๅผ๏ผ่ๆฏ้่ฆๅฎ้
็่ฟ่ก่็นไบคๆขใ
# ็คบไพ 1:
# ็ปๅฎ้พ่กจ 1->2->3->4, ้ๆฐๆๅไธบ 1->4->2->3.
# ็คบไพ 2:
# ็ปๅฎ้พ่กจ 1->2->3->4->5, ้ๆฐๆๅไธบ 1->5->2->4->3.
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
## ๆดไฝไธๆฏไบคๆข๏ผไฝฟ็จ้ๅฝ๏ผๅ
ๆพๅฐๆๅ่็น
## 1 -ใ 2 -ใ 3 -ใ 4 -ใ 5
## | |
## temp = 1.next == 2
## 1.next = 4.next == 5
## 4.next = None
## 1.next.next == 5.next = 2
## now = 2
## last = 3.next
class Solution:
def reorderList(self, head: ListNode) -> None:
"""
Do not return anything, modify head in-place instead.
"""
if not head:
return
self.pre = head
self.flag = True
def test(node):
if not node.next: # ๅฆๆ node.next ๆฏ None๏ผๅฐฑไธ้่ฆไบคๆขไบ
return
test(node.next)
if not self.flag:
return
if not self.pre.next:
self.flag = False
return
if self.pre == node:
self.flag = False
return
temp = self.pre.next
self.pre.next = node.next
self.pre.next.next = temp
self.pre = temp
node.next = None
test(self.pre)
| 24.28125
| 62
| 0.462677
| 209
| 1,554
| 3.478469
| 0.301435
| 0.077029
| 0.060523
| 0.016506
| 0.233838
| 0.203576
| 0.203576
| 0.203576
| 0.203576
| 0.203576
| 0
| 0.050647
| 0.402831
| 1,554
| 64
| 63
| 24.28125
| 0.715517
| 0.441441
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed510f09d28e1fdb65727ee044b934fd67984f9c
| 2,555
|
py
|
Python
|
onmt/bin/build_vocab.py
|
comydream/OpenNMT-py
|
2f3c810069ca03b752d9886782648e576b39a06d
|
[
"MIT"
] | 1
|
2021-10-01T15:03:35.000Z
|
2021-10-01T15:03:35.000Z
|
onmt/bin/build_vocab.py
|
urialon/OpenNMT-py
|
bdca05a3fac8f864b21c86a8ad03c09895212e70
|
[
"MIT"
] | null | null | null |
onmt/bin/build_vocab.py
|
urialon/OpenNMT-py
|
bdca05a3fac8f864b21c86a8ad03c09895212e70
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Get vocabulary coutings from transformed corpora samples."""
from onmt.utils.logging import init_logger
from onmt.utils.misc import set_random_seed, check_path
from onmt.utils.parse import ArgumentParser
from onmt.opts import dynamic_prepare_opts
from onmt.inputters.corpus import build_vocab
from onmt.transforms import make_transforms, get_transforms_cls
def build_vocab_main(opts):
"""Apply transforms to samples of specified data and build vocab from it.
Transforms that need vocab will be disabled in this.
Built vocab is saved in plain text format as following and can be pass as
`-src_vocab` (and `-tgt_vocab`) when training:
```
<tok_0>\t<count_0>
<tok_1>\t<count_1>
```
"""
ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True)
assert opts.n_sample == -1 or opts.n_sample > 1, \
f"Illegal argument n_sample={opts.n_sample}."
logger = init_logger()
set_random_seed(opts.seed, False)
transforms_cls = get_transforms_cls(opts._all_transform)
fields = None
transforms = make_transforms(opts, transforms_cls, fields)
logger.info(f"Counter vocab from {opts.n_sample} samples.")
src_counter, tgt_counter, src_feats_counter = build_vocab(
opts, transforms, n_sample=opts.n_sample)
logger.info(f"Counters src:{len(src_counter)}")
logger.info(f"Counters tgt:{len(tgt_counter)}")
for feat_name, feat_counter in src_feats_counter.items():
logger.info(f"Counters {feat_name}:{len(feat_counter)}")
def save_counter(counter, save_path):
check_path(save_path, exist_ok=opts.overwrite, log=logger.warning)
with open(save_path, "w", encoding="utf8") as fo:
for tok, count in counter.most_common():
fo.write(tok + "\t" + str(count) + "\n")
if opts.share_vocab:
src_counter += tgt_counter
tgt_counter = src_counter
logger.info(f"Counters after share:{len(src_counter)}")
save_counter(src_counter, opts.src_vocab)
else:
save_counter(src_counter, opts.src_vocab)
save_counter(tgt_counter, opts.tgt_vocab)
for k, v in src_feats_counter.items():
save_counter(v, opts.src_feats_vocab[k])
def _get_parser():
parser = ArgumentParser(description='build_vocab.py')
dynamic_prepare_opts(parser, build_vocab_only=True)
return parser
def main():
parser = _get_parser()
opts, unknown = parser.parse_known_args()
build_vocab_main(opts)
if __name__ == '__main__':
main()
| 33.618421
| 77
| 0.706849
| 366
| 2,555
| 4.661202
| 0.336066
| 0.046893
| 0.032239
| 0.044549
| 0.126612
| 0.100821
| 0.038687
| 0
| 0
| 0
| 0
| 0.003378
| 0.189041
| 2,555
| 75
| 78
| 34.066667
| 0.819981
| 0.147162
| 0
| 0.042553
| 0
| 0
| 0.120037
| 0.057917
| 0
| 0
| 0
| 0
| 0.021277
| 1
| 0.085106
| false
| 0
| 0.12766
| 0
| 0.234043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed51b4414d9639f63dd9eb5d177b6130aa8d5108
| 2,756
|
py
|
Python
|
schools3/ml/experiments/feat_pruning_experiment.py
|
dssg/mlpolicylab_fall20_schools3_public
|
f8eff4c56e9bada1eb81ddaca03686d7ef53c2c4
|
[
"MIT"
] | null | null | null |
schools3/ml/experiments/feat_pruning_experiment.py
|
dssg/mlpolicylab_fall20_schools3_public
|
f8eff4c56e9bada1eb81ddaca03686d7ef53c2c4
|
[
"MIT"
] | null | null | null |
schools3/ml/experiments/feat_pruning_experiment.py
|
dssg/mlpolicylab_fall20_schools3_public
|
f8eff4c56e9bada1eb81ddaca03686d7ef53c2c4
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from schools3.ml.experiments.models_experiment import ModelsExperiment
from schools3.data.base.cohort import Cohort
from schools3.config import main_config
from schools3.config import global_config
from schools3.data.datasets.dataset import Dataset
from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment
from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment
from schools3.ml.models.tfkeras_model import TFKerasModel
from schools3.ml.models.sklearn_model import SklearnModel
import schools3.config.ml.experiments.feat_pruning_experiment_config as config
from schools3.config.data.datasets import dataset_config
# an experiment that trains models with subsets of the features according to their permutation importance rank
# like SingleDatasetExperiment, this works on a specific grade
class FeaturePruningExperiment(ModelsExperiment):
def __init__(
self, name='ignore',
features_list=main_config.features,
labels=main_config.labels,
models=main_config.models,
metrics=main_config.metrics,
use_cache=main_config.use_cache
):
super(FeaturePruningExperiment, self).__init__(
name, features_list, labels, models, metrics, use_cache=use_cache
)
def perform(
self, grade=main_config.single_grade,
train_years=main_config.train_years,
test_years=main_config.test_years,
compute_train_metrics=False, **kwargs
):
train_cohort = Cohort(grade, train_years)
df = pd.DataFrame()
for model in self.models:
if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)):
continue
train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels)
model.train(train_data)
feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics)
feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data)
feats = np.flip(feature_names[sorted_idxs])
for i in config.num_feats:
dataset_config.feat_whitelist.clear()
for feat in feats[:i]:
dataset_config.feat_whitelist.append(feat)
exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics)
cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs)
cur_df['num_feats'] = i
df = pd.concat([df, cur_df], ignore_index=True)
return df
| 43.746032
| 123
| 0.717707
| 327
| 2,756
| 5.810398
| 0.308869
| 0.063158
| 0.036842
| 0.039474
| 0.08
| 0.050526
| 0.050526
| 0.050526
| 0.050526
| 0
| 0
| 0.005051
| 0.209724
| 2,756
| 62
| 124
| 44.451613
| 0.867309
| 0.061321
| 0
| 0.04
| 0
| 0
| 0.010449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.3
| 0
| 0.38
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed51e5aefc8aa4c007f752784c838fb5f4f57c1c
| 2,297
|
py
|
Python
|
network/dataset/image_loading.py
|
imsb-uke/podometric_u_net
|
a33afcc186d618889df73c7ab2941dfbb63574ac
|
[
"MIT"
] | null | null | null |
network/dataset/image_loading.py
|
imsb-uke/podometric_u_net
|
a33afcc186d618889df73c7ab2941dfbb63574ac
|
[
"MIT"
] | null | null | null |
network/dataset/image_loading.py
|
imsb-uke/podometric_u_net
|
a33afcc186d618889df73c7ab2941dfbb63574ac
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
from skimage.io import imread
def get_file_count(paths, image_format='.tif'):
total_count = 0
for path in paths:
try:
path_list = [_ for _ in os.listdir(path) if _.endswith(image_format)]
total_count += len(path_list)
except OSError:
print("Directory does not exist. Returned file count for this path will be 0")
return total_count
# Function to load image
def load_image(img_path):
img = imread(img_path)
if img.shape[2] == 4:
img = img[:, :, :-1]
# img = np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT
return img
# Function to load mask
def load_mask(mask_path):
mask = imread(mask_path)
return mask
def load_mask_from_img(cfg, img_path, img_name, suffixes):
a_mask = imread(os.path.join(img_path, img_name + suffixes[0]))
msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS))
i = 0
for suffix in suffixes:
msk_channel = imread(os.path.join(img_path, img_name + suffix))
if len(msk_channel.shape) == 2:
msk_channel = np.expand_dims(msk_channel, axis=-1)
if len(msk_channel.shape) != 3:
raise ValueError("Mask must be 3-dim here. Does your mask have 1 or more than 3 dimensions? "
"Check the masks.")
msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel
i += cfg.NUMBER_MSK_CHANNELS
# print(msk, msk.shape)
return msk
def load_weights(cfg, img_path, img_name, weight_suffixes):
a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0]))
weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS))
i = 0
for suffix in weight_suffixes:
weights_channel = np.load(os.path.join(img_path, img_name + suffix))
if len(weights_channel.shape) == 2:
weights_channel = np.expand_dims(weights_channel, axis=-1)
if len(weights_channel.shape) != 3:
raise ValueError("Weights must be 3-dim here. Has your weights 1 or more than 3 dimensions? Check the weights.")
weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel
i += cfg.NUMBER_MSK_CHANNELS
return weights
| 37.048387
| 124
| 0.652155
| 348
| 2,297
| 4.103448
| 0.241379
| 0.039216
| 0.04902
| 0.058824
| 0.436975
| 0.298319
| 0.19888
| 0.19888
| 0.131653
| 0.106443
| 0
| 0.015437
| 0.238572
| 2,297
| 62
| 125
| 37.048387
| 0.801029
| 0.053548
| 0
| 0.085106
| 0
| 0.021277
| 0.117566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106383
| false
| 0
| 0.06383
| 0
| 0.276596
| 0.021277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed52fe7003cd3391400a3e6ca8a3b67edfc17d59
| 6,769
|
py
|
Python
|
series/simple/numeric_series.py
|
kefir/snakee
|
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
|
[
"MIT"
] | null | null | null |
series/simple/numeric_series.py
|
kefir/snakee
|
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
|
[
"MIT"
] | null | null | null |
series/simple/numeric_series.py
|
kefir/snakee
|
a17734d4b2d7dfd3e6c7b195baa128fbc84d197b
|
[
"MIT"
] | null | null | null |
from typing import Optional, Callable
try: # Assume we're a sub-module in a package.
from series import series_classes as sc
from utils import numeric as nm
except ImportError: # Apparently no higher-level package has been imported, fall back to a local import.
from .. import series_classes as sc
from ...utils import numeric as nm
Native = sc.AnySeries
DEFAULT_NUMERIC = True
WINDOW_DEFAULT = (-1, 0, 1)
WINDOW_WO_CENTER = (-2, -1, 0, 1, 2)
WINDOW_NEIGHBORS = (-1, 0)
class NumericSeries(sc.AnySeries):
def __init__(
self,
values=[],
validate=False,
name=None,
):
super().__init__(
values=values,
validate=validate,
name=name,
)
@staticmethod
def get_distance_func():
return nm.diff
def get_errors(self):
yield from super().get_errors()
if not self.has_valid_items():
yield 'Values of {} must be numeric'.format(self.get_class_name())
def has_valid_items(self):
for v in self.get_values():
if not isinstance(v, (int, float)):
return False
return True
def is_numeric(self, check=False):
if check:
return self.has_valid_items()
else:
return DEFAULT_NUMERIC
def get_sum(self):
return sum(
self.filter_values_defined().get_values(),
)
def get_mean(self):
values_defined = self.filter_values_defined().get_values()
if values_defined:
return sum(values_defined) / len(values_defined)
def norm(self, rate=None, default=None):
if rate is None:
rate = self.get_mean()
return self.map_values(lambda v: v / rate if rate else default)
def divide(self, series, default=None, extend=False):
return self.map_optionally_extend_zip_values(
lambda x, y: x / y if y else default,
extend,
series,
)
def subtract(self, series, default=None, extend=False):
return self.map_optionally_extend_zip_values(
lambda x, y: x - y if x is not None and y is not None else default,
extend,
series,
)
def derivative(self, extend=False, default=0):
if extend:
return self.preface(None).subtract(
self,
extend=True,
default=default,
).crop(0, 1)
else:
return self.slice(0, -1).subtract(
self.shift(-1)
)
def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True):
if extend:
n_min = 0
n_max = self.get_count()
else:
n_min = - min(window)
n_max = self.get_count() - max(window)
for center in range(n_min, n_max):
sliding_window = [center + n for n in window]
if as_series:
yield self.value_series().items_no(sliding_window, extend=extend, default=default)
else:
yield self.value_series().get_items_no(sliding_window, extend=extend, default=default)
def apply_window_func(
self, function: Callable,
window=WINDOW_DEFAULT, extend=True, default=None, as_series=False,
inplace: bool = False,
) -> Optional[Native]:
values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series))
return self.set_values(values, inplace=inplace)
def mark_local_extremums(self, local_min=True, local_max=True):
return self.apply_window_func(
lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max),
window=WINDOW_DEFAULT,
extend=True,
default=False,
)
def mark_local_max(self):
return self.mark_local_extremums(local_min=False, local_max=True)
def mark_local_min(self):
return self.mark_local_extremums(local_min=True, local_max=False)
def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False):
smoothed_series = self.smooth(window=window)
deviation = self.subtract(smoothed_series)
if rel:
deviation = deviation.divide(smoothed_series, default=0)
return deviation
# @deprecated
def smooth_simple_linear(self, window_len=3, exclude_center=False):
center = int((window_len - 1) / 2)
count = self.get_count()
result = self.new()
for n in self.get_range_numbers():
is_edge = n < center or n >= count - center
if is_edge:
result.append(self.get_item_no(n), inplace=True)
else:
sub_series = self.slice(n - center, n + center + 1)
if exclude_center:
sub_series = sub_series.drop_item_no(center)
result.append(sub_series.get_mean(), inplace=True)
return result
def smooth(self, how='linear', *args, **kwargs):
method_name = 'smooth_{}'.format(how)
smooth_method = self.__getattribute__(method_name)
return smooth_method(*args, **kwargs)
def smooth_multiple(self, list_kwargs=[]):
series = self
for kwargs in list_kwargs:
series = series.smooth(**kwargs)
return series
def smooth_linear(self, window=WINDOW_DEFAULT):
return self.apply_window_func(
lambda s: s.get_mean(),
window=window, extend=True, default=None,
as_series=True,
)
def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None):
spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max)
if whitelist:
spikes = spikes.map_zip_values(
lambda a, b: a and not b,
whitelist,
)
return self.map_zip_values(
lambda v, t, s: s if t else v,
spikes,
self.smooth(window=window),
)
def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True):
deviation = self.deviation_from_neighbors(window=window, rel=True)
if local_min or local_max:
deviation = deviation.map_zip_values(
lambda x, m: x if m else None,
self.mark_local_extremums(local_min=local_min, local_max=local_max),
)
spikes = deviation.map_values(
lambda x: abs(x or 0) > threshold,
)
return spikes
def plot(self, fmt='-'):
nm.plot(self.get_range_numbers(), self.get_values(), fmt=fmt)
| 34.186869
| 116
| 0.601418
| 850
| 6,769
| 4.571765
| 0.177647
| 0.026763
| 0.020072
| 0.012352
| 0.289243
| 0.235203
| 0.170612
| 0.161091
| 0.092126
| 0.067422
| 0
| 0.004893
| 0.30551
| 6,769
| 197
| 117
| 34.360406
| 0.82174
| 0.019796
| 0
| 0.113772
| 0
| 0
| 0.006636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.143713
| false
| 0
| 0.035928
| 0.047904
| 0.323353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed548c718f56038d0a32759b322ccf9c4f9f5e93
| 29,735
|
py
|
Python
|
python/OpenGeoTile.py
|
scoofy/open-geotiling
|
0b1305d4482d6df46104135662ffe4565f92f9f0
|
[
"Apache-2.0"
] | null | null | null |
python/OpenGeoTile.py
|
scoofy/open-geotiling
|
0b1305d4482d6df46104135662ffe4565f92f9f0
|
[
"Apache-2.0"
] | null | null | null |
python/OpenGeoTile.py
|
scoofy/open-geotiling
|
0b1305d4482d6df46104135662ffe4565f92f9f0
|
[
"Apache-2.0"
] | null | null | null |
from openlocationcode import openlocationcode as olc
from enum import Enum
import math, re
class TileSize(Enum):
''' An area of 20ยฐ x 20ยฐ. The side length of this tile varies with its location on the globe,
but can be up to approximately 2200km. Tile addresses will be 2 characters long.'''
GLOBAL = (2, 20.0)
''' An area of 1ยฐ x 1ยฐ. The side length of this tile varies with its location on the globe,
but can be up to approximately 110km. Tile addresses will be 4 characters long.'''
REGION = (4, 1.0)
''' An area of 0.05ยฐ x 0.05ยฐ. The side length of this tile varies with its location on the
globe, but can be up to approximately 5.5km. Tile addresses will be 6 characters long.'''
DISTRICT = (6, 0.05)
''' An area of 0.0025ยฐ x 0.0025ยฐ. The side length of this tile varies with its location on
the globe, but can be up to approximately 275m.
Tile addresses will be 8 characters long.'''
NEIGHBORHOOD = (8, 0.0025)
''' An area of 0.000125ยฐ x 0.000125ยฐ. The side length of this tile varies with its location
on the globe, but can be up to approximately 14m.
Tile addresses will be 10 characters long.'''
PINPOINT = (10, 0.000125)
def __init__(self, code_length, coordinate_increment):
self.code_length = code_length
self.coordinate_increment = coordinate_increment
def getCodeLength(self):
'''get 0th value'''
return self.code_length
def getCoordinateIncrement(self):
'''get 1th value'''
return self.coordinate_increment
# Copy from OpenLocationCode.java
# A separator used to break the code into two parts to aid memorability.
SEPARATOR = '+'
# Copy from OpenLocationCode.java
# The character used to pad codes.
PADDING_CHARACTER = '0'
PADDING_2 = "00"
PADDING_4 = "0000"
PADDING_6 = "000000"
CODE_ALPHABET = olc.CODE_ALPHABET_
BASE_20_SET = {x+y for x in CODE_ALPHABET for y in CODE_ALPHABET}
BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0] in ['2', 'X'] or x[1] in ['2', 'X']}
NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == 'X'}
EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == 'X'}
SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == '2'}
WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == '2'}
memoized_digit_dict = {
"N1": NORTH_DIGITS,
"E1": EAST_DIGITS,
"S1": SOUTH_DIGITS,
"W1": WEST_DIGITS,
}
def is_padded(plus_code):
return plus_code.find(PADDING_CHARACTER) != -1
def is_tile_address(plus_code):
return plus_code.find(SEPARATOR) == -1
def return_code_of_tile_size(too_precise_plus_code, desired_tile_size):
code = too_precise_plus_code
if not is_tile_address(code):
code = code.replace(SEPARATOR, '')
if is_padded(code):
if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength():
raise Exception("OLC padding larger than allowed by desired_tile_size")
code_address = code[:desired_tile_size.getCodeLength()]
full_length = TileSize.PINPOINT.getCodeLength()
code = code_address + ("0" * (full_length - len(code_address)))
if desired_tile_size == TileSize.PINPOINT:
code = code[:-2] + SEPARATOR + code[-2:]
else:
code = code[:-2] + SEPARATOR
return code
def return_set_of_subaddresses(set_of_addresses):
for address in set_of_addresses:
if len(address) == TileSize.PINPOINT.getCodeLength():
''' address already minimum possible size '''
return None
return {address+base for address in set_of_addresses for base in BASE_20_SET}
class OpenGeoTile():
'''
/**
* A wrapper around an {@code OpenLocationCode} object, focusing on the area identified by a prefix
* of the given OpenLocationCode.
*
* Using this wrapper class allows to determine whether two locations are in the same or adjacent
* "tiles", to determine all neighboring tiles of a given one, to calculate a distance in tiles etc.
*
* Open Location Code is a technology developed by Google and licensed under the Apache License 2.0.
* For more information, see https://github.com/google/open-location-code
*
* @author Andreas Bartels
* @version 0.1.0
*/
Ported by scoofy on 08.31.21
'''
def __init__(self,
code=None,
tile_size=None,
lat=None,
long=None,
):
if not (code or (code and tile_size) or (lat and long)):
raise Exception("Invalid OpenGeoTile constructor arguments")
if lat and long:
self.constructTileFromLatLong(lat, long, tile_size)
elif code and tile_size:
self.constructTileFromCodeAndSize(code, tile_size)
elif code:
if is_tile_address(code):
self.constructTileFromTileAddress(code)
else:
self.constructTileFromCode(code)
self.tile_address = self.code.replace(SEPARATOR, "")[0: self.tile_size.getCodeLength()]
def constructTileFromCode(self, plus_code):
'''/**
* Creates a new OpenGeoTile from an existing
* {@link com.google.openlocationcode.OpenLocationCode}.
* @param olc OpenLocationCode for the current location. This can be a padded code, in which
* case the resulting OpenGeoTile will have a larger TileSize.
* @throws IllegalArgumentException if olc is not a full code
*/'''
if not olc.isFull(plus_code):
raise Exception("Only full OLC supported. Use olc.recoverNearest().")
self.code = plus_code.upper()
if is_padded(plus_code):
code_length = plus_code.find(PADDING_CHARACTER)
else:
code_length = min(len(plus_code)-1, 10)
if code_length == TileSize.GLOBAL.getCodeLength():
self.tile_size = TileSize.GLOBAL
elif code_length == TileSize.REGION.getCodeLength():
self.tile_size = TileSize.REGION
elif code_length == TileSize.DISTRICT.getCodeLength():
self.tile_size = TileSize.DISTRICT
elif code_length == TileSize.NEIGHBORHOOD.getCodeLength():
self.tile_size = TileSize.NEIGHBORHOOD
elif code_length == TileSize.PINPOINT.getCodeLength():
self.tile_size = TileSize.PINPOINT
else:
raise Exception("Too precise, sort this later")
def constructTileFromCodeAndSize(self, plus_code, tile_size):
'''
Creates a new OpenGeoTile from an existing
{@link com.google.openlocationcode.OpenLocationCode}.
@param olc OpenLocationCode for the current location
@param tile_size tile size to use for this OpenGeoTile
@throws IllegalArgumentException when trying to pass a short (non-full) OLC, or if OLC has
too much padding for given tile_size
'''
if not olc.isFull(plus_code):
raise Exception("Only full OLC supported. Use recover().")
modified_plus_code = return_code_of_tile_size(plus_code, tile_size)
self.code = modified_plus_code.upper()
self.tile_size = tile_size
def constructTileFromLatLong(self, lat: float, long: float, tile_size=None):
'''/**
* Creates a new OpenGeoTile from lat/long coordinates.
* @param latitude latitude of the location
* @param longitude longitude of the location
* @param tile_size tile size to use for this OpenGeoTile
* @throws IllegalArgumentException passed through from
* {@link OpenLocationCode#OpenLocationCode(double, double, int)}
*/'''
if not tile_size:
tile_size = TileSize.PINPOINT
self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper()
self.tile_size = tile_size
def constructTileFromTileAddress(self, tileAddress):
'''/**
* Creates a new OpenGeoTile from a tile address.
* @param tileAddress a tile address is a [2/4/6/8/10]-character string that corresponds to a
* valid {@link com.google.openlocationcode.OpenLocationCode} after removing
* '+' and an additional number of trailing characters; tile size is
* determined by the length of this address
* @throws IllegalArgumentException passed through from
* {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of
* invalid length
*/'''
detectedTileSize = None
olcBuilder = ""
if len(tileAddress) == TileSize.GLOBAL.getCodeLength():
detectedTileSize = TileSize.GLOBAL
olcBuilder += tileAddress + PADDING_6 + SEPARATOR
if len(tileAddress) == TileSize.REGION.getCodeLength():
detectedTileSize = TileSize.REGION
olcBuilder += tileAddress + PADDING_4 + SEPARATOR
if len(tileAddress) == TileSize.DISTRICT.getCodeLength():
detectedTileSize = TileSize.DISTRICT
olcBuilder += tileAddress + PADDING_2 + SEPARATOR
if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength():
detectedTileSize = TileSize.NEIGHBORHOOD
olcBuilder += tileAddress + SEPARATOR
if len(tileAddress) == TileSize.PINPOINT.getCodeLength():
detectedTileSize = TileSize.PINPOINT
olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10]
if detectedTileSize == None:
print(tileAddress)
raise Exception("Invalid tile address")
self.tile_size = detectedTileSize
self.code = olcBuilder.upper()
def getWrappedOpenLocationCode(self):
# this code is effectively redundant as python has no wrapping
'''/**
* The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile.
* For the plus code of the whole tile, see {@link #getTileOpenLocationCode()}.
* @return the exact plus code wrapped by this OpenGeoTile
*/'''
return self.code
def returnCode(self):
return self.code
def getTileSize(self):
'''/**
* Get the {@link TileSize} of this OpenGeoTile.
* @return the {@link TileSize} of this OpenGeoTile
*/'''
return self.tile_size
def getTileAddress(self):
'''/**
* A tile address is a string of length 2, 4, 6, 8, or 10, which corresponds to a valid
* {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate
* number of '0' and '+' characters. Example: Address "CVXW" corresponds to OLC "CVXW0000+"
* @return the tile address of this OpenGeoTile;
*/'''
return self.tile_address
def getTileAddressPrefix(self):
'''/**
* The prefix of a tile address is the address of the next biggest tile at this location.
* @return this tile's address with the final two characters removed. In case of a GLOBAL tile,
* returns the empty string.
*/'''
if self.tile_size == TileSize.GLOBAL:
return ""
else:
return self.getTileAddress()[0: self.tile_size.getCodeLength()-2]
def getParentTileAddress(self):
return self.getTileAddressPrefix()
def getTileOpenLocationCode(self):
# this code is redundant
'''/**
* The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than
* {@link #getWrappedOpenLocationCode()}, this will return a full plus code for the whole tile.
* @return a plus code for the whole tile, probably padded with '0' characters
*/'''
return self.getWrappedOpenLocationCode()
def getNeighbors(self, eight_point_direction=None):
'''/**
* Get an array of the typically 8 neighboring tiles of the same size.
* @return an array of the typically 8 neighboring tiles of the same size;
* may return less than 8 neighbors for tiles near the poles.
*/'''
# deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125]
delta = self.getTileSize().getCoordinateIncrement()
code_area = olc.decode(self.code)
latitude = code_area.latitudeCenter
longitude = code_area.longitudeCenter
'''directions_list included to keep ordered data'''
directions_list = ["NW", "N", "NE", "E", "SE", "S", "SW", "W"]
direction_dict = {
"NW": [+1, -1], "N": [+1, 0], "NE": [+1, +1],
"W": [ 0, -1], "E": [ 0, +1],
"SW": [-1, -1], "S": [-1, 0], "SE": [-1, +1],
}
#lat_diff = [+1, +1, +1, 0, -1, -1, -1, 0]
#long_diff = [-1, 0, +1, +1, +1, 0, -1, -1]
if not type(eight_point_direction) in [type(None), list, str]:
raise Exception("eight_point_direction must be of type list or str")
if eight_point_direction is None:
directions = directions_list
elif isinstance(eight_point_direction, str):
directions = []
if eight_point_direction.upper() in directions_list:
directions.append(eight_point_direction.upper())
else:
''' this list construction keeps directions in the order above '''
uppercase_input_directions = [d.upper() for d in eight_point_direction]
directions = [direction for direction in directions_list if direction in uppercase_input_directions]
neighbors = set()
for direction in directions:
lat_diff, long_diff = direction_dict.get(direction)
''' //OLC constructor clips and normalizes,
//so we don't have to deal with invalid lat/long values directly'''
neighborLatitude = latitude + (delta * lat_diff)
neighborLongitude = longitude + (delta * long_diff)
new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize())
if not self.isSameTile(new_OpenGeoTile):
'''//don't add tiles that are the same as this one due to clipping near the poles'''
neighbors.add(new_OpenGeoTile)
return neighbors
def isSameTile(self, potentialSameTile):
'''/**
* Check if a tile describes the same area as this one.
* @param potentialSameTile the OpenGeoTile to check
* @return true if tile sizes and addresses are the same; false if not
*/'''
if potentialSameTile.getTileSize() != self.getTileSize():
return False
return potentialSameTile.getTileAddress() == self.getTileAddress()
def isNeighbor(self, potentialNeighbor):
'''/**
* Check if a tile is neighboring this one.
* @param potentialNeighbor the OpenGeoTile to check
* @return true if this and potentialNeighbor are adjacent (8-neighborhood);
* false if not
*/'''
if potentialNeighbor.getTileSize() == self.getTileSize():
'''//avoid iterating over neighbors for same tile'''
if self.isSameTile(potentialNeighbor):
return False
neighbors = self.getNeighbors()
for neighbor in neighbors:
if potentialNeighbor.isSameTile(neighbor):
return True
return False
else:
'''//tiles of different size are adjacent if at least one neighbor of the smaller tile,
//but not the smaller tile itself, is contained within the bigger tile'''
if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength():
smallerTile = potentialNeighbor
biggerTile = self
else:
smallerTile = self
biggerTile = potentialNeighbor
if biggerTile.contains(smallerTile):
return False
neighbors = smallerTile.getNeighbors()
for neighbor in neighbors:
if biggerTile.contains(neighbor):
return True
return False
def contains(self, potentialMember):
'''/**
* Check if this tile contains another one.
* @param potentialMember the OpenGeoTile to check
* @return true if the area potentialMember falls within the area of this tile, including cases
* where both are the same; false if not
*/'''
# //if A contains B, then B's address has A's address as a prefix
return potentialMember.getTileAddress().startswith(self.getTileAddress())
def getManhattanTileDistanceTo(self, otherTile):
'''/**
* Calculates the Manhattan (city block) distance between this and another tile of the same size.
* @param otherTile another tile of the same size as this one
* @return an integer value corresponding to the number of tiles of the given size that need to
* be traversed getting from one to the other tile
* @throws IllegalArgumentException thrown if otherTile has different {@link TileSize}
*/'''
if otherTile.getTileSize() != self.getTileSize():
raise Exception("Tile sizes don't match")
return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True)
def getChebyshevTileDistanceTo(self, otherTile):
'''/**
* Calculates the Chebyshev (chessboard) distance between this and another tile of the same size.
* @param otherTile another tile of the same size as this one
* @return an integer value corresponding to the number of tiles of the given size that need to
* be traversed getting from one to the other tile
* @throws IllegalArgumentException thrown if otherTile has different {@link TileSize}
*/'''
if otherTile.getTileSize() != self.getTileSize():
raise Exception("Tile sizes don't match")
return max(self.getLatitudinalTileDistance(otherTile, True),
self.getLongitudinalTileDistance(otherTile, True))
def getDirection(self, otherTile):
'''/**
* Returns the approximate direction of the other tile relative to this. The return value can
* have a large margin of error, especially for big or far away tiles, so this should only be
* interpreted as a very rough approximation and used as such.
* @param otherTile another tile of the same size as this one
* @return an angle in radians, 0 being an eastward direction, +/- PI being westward direction
* @throws IllegalArgumentException thrown if otherTile has different {@link TileSize}
*/'''
if otherTile.getTileSize() != self.getTileSize():
raise Exception("Tile sizes don't match")
xDiff = int(self.getLongitudinalTileDistance(otherTile, False))
yDiff = int(self.getLatitudinalTileDistance(otherTile, False))
return math.atan2(yDiff, xDiff)
def getEightPointDirectionOfNeighbor(self, neighborTile):
''' returns neighbor's direction, to assist in expanding tile areas '''
if not self.isNeighbor(neighborTile):
raise Exception("neighborTile must be neighbor")
if neighborTile.getTileSize() != self.getTileSize():
raise Exception("Tile sizes don't match")
self_tile_x = self.getTileAddress()[-2]
self_tile_y = self.getTileAddress()[-1]
other_tile_x = neighborTile.getTileAddress()[-2]
other_tile_y = neighborTile.getTileAddress()[-1]
direction = ""
north_south = None
if self_tile_x != other_tile_x:
''' one tile is above the other '''
if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]:
''' ajacent parent tiles '''
if CODE_ALPHABET.find(other_tile_x) == 0:
''' other tile is above -> neighborTile is north '''
direction = direction + 'N'
else:
direction = direction + 'S'
else:
if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x):
''' other tile is above -> neighborTile is north '''
direction = direction + 'N'
else:
''' other tile is below -> neighborTile is south '''
direction = direction + 'S'
if self_tile_y != other_tile_y:
''' one tile is above the other '''
if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]:
''' ajacent parent tiles '''
if CODE_ALPHABET.find(other_tile_y) == 0:
''' other tile is right -> neighborTile is east '''
direction = direction + 'E'
else:
''' other tile is left -> neighborTile is west '''
direction = direction + 'W'
else:
if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y):
''' other tile is right -> neighborTile is east '''
direction = direction + 'E'
else:
''' other tile is left -> neighborTile is west '''
direction = direction + 'W'
return direction
def getCharacterIndex(self, c):
'''//following definitions copied from OpenLocationCode.java'''
index = "23456789CFGHJMPQRVWX".find(c.upper())
if index == -1:
raise Exception("Character does not exist in alphabet")
return index
def characterDistance(self, c1, c2):
return self.getCharacterIndex(c1) - self.getCharacterIndex(c2)
def getLatitudinalTileDistance(self, otherTile, absolute_value_bool):
if otherTile.getTileSize() != self.getTileSize():
raise Exception("Tile sizes don't match")
numIterations = self.tile_size.getCodeLength()/2 #1..5
tileDistance = 0
for i in range(int(numIterations)):
tileDistance *= 20
c1 = self.getTileAddress()[i*2]
c2 = otherTile.getTileAddress()[i*2]
tileDistance += self.characterDistance(c1,c2)
if absolute_value_bool:
return abs(tileDistance)
return tileDistance
def getLongitudinalTileDistance(self, otherTile, absolute_value_bool):
if otherTile.getTileSize() != self.getTileSize():
raise Exception("Tile sizes don't match")
numIterations = self.tile_size.getCodeLength()/2 #; //1..5
tileDistance = 0
for i in range(int(numIterations)):
tileDistance *= 20
c1 = self.getTileAddress()[i*2 + 1]
c2 = otherTile.getTileAddress()[i*2 + 1]
if i == 0:
'''//for the first longitudinal value, we need to take care of wrapping - basically,
//if it's shorter to go the other way around, do so'''
firstDiff = self.characterDistance(c1, c2)
NUM_CHARACTERS_USED = 18 #; //360ยฐ/20ยฐ = 18
if abs(firstDiff) > NUM_CHARACTERS_USED/2:
if firstDiff > 0:
firstDiff -= NUM_CHARACTERS_USED
else:
firstDiff += NUM_CHARACTERS_USED
tileDistance += firstDiff
else:
tileDistance += self.characterDistance(c1, c2)
if absolute_value_bool:
return abs(tileDistance)
return tileDistance
def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT):
if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength():
''' tile is desired size '''
return self
elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength():
'desired_tile_size is too big'
raise Exception("OLC padding larger than allowed by desired_tile_size")
iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2
address_set = set([self.getTileAddress()])
for i in range(int(iterations_needed)):
address_set = return_set_of_subaddresses(address_set)
tile_set = {OpenGeoTile(address) for address in address_set}
return tile_set
def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None):
address = self.getTileAddress()
if len(address) == TileSize.PINPOINT.getCodeLength():
''' address already minimum possible size '''
return None
elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength():
'desired_tile_size is too big'
raise Exception("OLC padding larger than allowed by desired_tile_size")
iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2)
north_set = set()
east_set = set()
south_set = set()
west_set = set()
if isinstance(eight_point_direction, str):
eight_point_direction = eight_point_direction.upper()
set_of_border_subaddresses = set()
if eight_point_direction is None:
''' all borders '''
''' traveling salesman problem '''
''' let's do it once, and try to reduce by swaping digits '''
all_border_set = memoized_digit_dict.get(f"A{iterations_needed}")
if not all_border_set:
north_base_set = memoized_digit_dict.get(f"N{iterations_needed}")
if not north_base_set:
self.memoizeDigitDict("N", iterations_needed)
north_set = memoized_digit_dict.get(f"N{iterations_needed}")
east_set = memoized_digit_dict.get(f"E{iterations_needed}", set())
south_set = memoized_digit_dict.get(f"S{iterations_needed}", set())
west_set = memoized_digit_dict.get(f"W{iterations_needed}", set())
east_exists = east_set != set()
south_exists = south_set != set()
west_exists = west_set != set()
for base in north_set:
east_base = ""
south_base = ""
west_base = ""
base_tuple_list = re.findall('..', base)
''' north will be Xd
east dX
south 2d
west d2'''
for n_tuple in base_tuple_list:
relevant_digit = n_tuple[1]
if not east_exists:
east_base += relevant_digit + "X"
if not south_exists:
south_base += "2" + relevant_digit
if not west_exists:
west_base += relevant_digit + "2"
if not east_exists:
east_set.add(east_base)
if not south_exists:
south_set.add(south_base)
if not west_exists:
west_set.add(west_base)
memoized_digit_dict[f"E{iterations_needed}"] = east_set
memoized_digit_dict[f"S{iterations_needed}"] = south_set
memoized_digit_dict[f"W{iterations_needed}"] = west_set
all_border_set = north_set | east_set | south_set | west_set
memoized_digit_dict[f"A{iterations_needed}"] = all_border_set
return {OpenGeoTile(address+base) for base in all_border_set}
elif len(eight_point_direction) == 1:
''' North, South, East, or West '''
base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}")
if not base_set:
self.memoizeDigitDict(eight_point_direction, iterations_needed)
base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}')
return {OpenGeoTile(address + base) for base in base_set}
elif len(eight_point_direction) == 2:
''' NW, NE, SW, SE... should return only one tile'''
ordinal_digit_dict = {
'NW': 'X2',
'NE': 'XX',
'SE': '2X',
'SW': '22'
}
base = ''
for i in range(iterations_needed):
base += ordinal_digit_dict.get(eight_point_direction)
return {OpenGeoTile(address + base)}
def memoizeDigitDict(self, eight_point_direction, iterations_needed):
base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}")
if not base_set:
quickest_i = 0
for i in reversed(range(iterations_needed)):
if memoized_digit_dict.get(f"{eight_point_direction}{i + 1}"):
quickest_i = i
break
for i in range(quickest_i, iterations_needed):
existing_bases = memoized_digit_dict.get(f"{eight_point_direction}{i + 1}")
next_set = {existing_base + base for existing_base in existing_bases for base in memoized_digit_dict.get(f"{eight_point_direction}1")}
memoized_digit_dict[f"{eight_point_direction}{i + 2}"] = next_set
| 43.535871
| 150
| 0.613351
| 3,453
| 29,735
| 5.144802
| 0.139299
| 0.026119
| 0.026738
| 0.01351
| 0.404447
| 0.346411
| 0.310611
| 0.283648
| 0.270532
| 0.255784
| 0
| 0.015501
| 0.297057
| 29,735
| 682
| 151
| 43.599707
| 0.833844
| 0.210459
| 0
| 0.225888
| 0
| 0
| 0.05866
| 0.013262
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086294
| false
| 0
| 0.007614
| 0.01269
| 0.215736
| 0.002538
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed57f1712b86394159992dc11fd79688181d493e
| 13,851
|
bzl
|
Python
|
tensorflow_probability/python/build_defs.bzl
|
jbergmanster/probability
|
e15b307066e7485b8fe9faf3d289c739ab8d3806
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/build_defs.bzl
|
jbergmanster/probability
|
e15b307066e7485b8fe9faf3d289c739ab8d3806
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/build_defs.bzl
|
jbergmanster/probability
|
e15b307066e7485b8fe9faf3d289c739ab8d3806
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Build defs for TF/NumPy/JAX-variadic libraries & tests."""
# [internal] load python3.bzl
NO_REWRITE_NEEDED = [
"internal:all_util",
"internal:docstring_util",
"internal:reparameterization",
"layers",
"platform_google",
]
REWRITER_TARGET = "//tensorflow_probability/substrates/meta:rewrite"
RUNFILES_ROOT = "tensorflow_probability/"
def _substrate_src(src, substrate):
"""Rewrite a single src filename for the given substrate."""
return "_{}/_generated_{}".format(substrate, src)
def _substrate_srcs(srcs, substrate):
"""Rewrite src filenames for the given substrate."""
return [_substrate_src(src, substrate) for src in srcs]
def _substrate_dep(dep, substrate):
"""Convert a single dep to one appropriate for the given substrate."""
dep_to_check = dep
if dep.startswith(":"):
dep_to_check = "{}{}".format(native.package_name(), dep)
for no_rewrite in NO_REWRITE_NEEDED:
if no_rewrite in dep_to_check:
return dep
if "tensorflow_probability/" in dep or dep.startswith(":"):
if "internal/backend" in dep:
return dep
if ":" in dep:
return "{}.{}".format(dep, substrate)
return "{}:{}.{}".format(dep, dep.split("/")[-1], substrate)
return dep
def _substrate_deps(deps, substrate):
"""Convert deps to those appropriate for the given substrate."""
new_deps = [_substrate_dep(dep, substrate) for dep in deps]
backend_dep = "//tensorflow_probability/python/internal/backend/{}".format(substrate)
if backend_dep not in new_deps:
new_deps.append(backend_dep)
return new_deps
# This is needed for the transitional period during which we have the internal
# py2and3_test and py_test comingling in BUILD files. Otherwise the OSS export
# rewrite process becomes irreversible.
def py3_test(*args, **kwargs):
"""Internal/external reversibility, denotes py3-only vs py2+3 tests.
Args:
*args: Passed to underlying py_test.
**kwargs: Passed to underlying py_test. srcs_version and python_version
are added (with value `"PY3"`) if not specified.
"""
kwargs = dict(kwargs)
if "srcs_version" not in kwargs:
kwargs["srcs_version"] = "PY3"
if "python_version" not in kwargs:
kwargs["python_version"] = "PY3"
native.py_test(*args, **kwargs)
def _resolve_omit_dep(dep):
"""Resolves a `substrates_omit_deps` item to full target."""
if ":" not in dep:
dep = "{}:{}".format(dep, dep.split("/")[-1])
if dep.startswith(":"):
dep = "{}{}".format(native.package_name(), dep)
return dep
def _substrate_runfiles_symlinks_impl(ctx):
"""A custom BUILD rule to generate python runfiles symlinks.
A custom build rule which adds runfiles symlinks for files matching a
substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`.
This rule will aggregate and pass along deps while adding the given
symlinks to the runfiles structure.
Build rule attributes:
- substrate: One of 'jax' or 'numpy'; which substrate this applies to.
- deps: A list of py_library labels. These are passed along.
Args:
ctx: Rule analysis context.
Returns:
Info objects to propagate deps and add runfiles symlinks.
"""
# Aggregate the depset inputs to resolve transitive dependencies.
transitive_sources = []
uses_shared_libraries = []
imports = []
has_py2_only_sources = []
has_py3_only_sources = []
cc_infos = []
for dep in ctx.attr.deps:
if PyInfo in dep:
transitive_sources.append(dep[PyInfo].transitive_sources)
uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries)
imports.append(dep[PyInfo].imports)
has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources)
has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources)
# if PyCcLinkParamsProvider in dep: # DisableOnExport
# cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport
if CcInfo in dep:
cc_infos.append(dep[CcInfo])
# Determine the set of symlinks to generate.
transitive_sources = depset(transitive = transitive_sources)
runfiles_dict = {}
substrate = ctx.attr.substrate
file_substr = "_{}/_generated_".format(substrate)
for f in transitive_sources.to_list():
if "tensorflow_probability" in f.dirname and file_substr in f.short_path:
pre, post = f.short_path.split("/python/")
out_path = "{}/substrates/{}/{}".format(
pre,
substrate,
post.replace(file_substr, ""),
)
runfiles_dict[RUNFILES_ROOT + out_path] = f
# Construct the output structures to pass along Python srcs/deps/etc.
py_info = PyInfo(
transitive_sources = transitive_sources,
uses_shared_libraries = any(uses_shared_libraries),
imports = depset(transitive = imports),
has_py2_only_sources = any(has_py2_only_sources),
has_py3_only_sources = any(has_py3_only_sources),
)
py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos)
py_runfiles = depset(
transitive = [depset(transitive = [
dep[DefaultInfo].data_runfiles.files,
dep[DefaultInfo].default_runfiles.files,
]) for dep in ctx.attr.deps],
)
runfiles = DefaultInfo(runfiles = ctx.runfiles(
transitive_files = py_runfiles,
root_symlinks = runfiles_dict,
))
return py_info, py_cc_link_info, runfiles
# See documentation at:
# https://docs.bazel.build/versions/3.4.0/skylark/rules.html
substrate_runfiles_symlinks = rule(
implementation = _substrate_runfiles_symlinks_impl,
attrs = {
"substrate": attr.string(),
"deps": attr.label_list(),
},
)
def multi_substrate_py_library(
name,
srcs = [],
deps = [],
substrates_omit_deps = [],
jax_omit_deps = [],
numpy_omit_deps = [],
testonly = 0,
srcs_version = "PY2AND3"):
"""A TFP `py_library` for each of TF, NumPy, and JAX.
Args:
name: The TF `py_library` name. NumPy and JAX libraries have '.numpy' and
'.jax' appended.
srcs: As with `py_library`. A `genrule` is used to rewrite srcs for NumPy
and JAX substrates.
deps: As with `py_library`. The list is rewritten to depend on
substrate-specific libraries for substrate variants.
substrates_omit_deps: List of deps to omit if those libraries are not
rewritten for the substrates.
jax_omit_deps: List of deps to omit for the JAX substrate.
numpy_omit_deps: List of deps to omit for the NumPy substrate.
testonly: As with `py_library`.
srcs_version: As with `py_library`.
"""
native.py_library(
name = name,
srcs = srcs,
deps = deps,
srcs_version = srcs_version,
testonly = testonly,
)
remove_deps = [
"//third_party/py/tensorflow",
"//third_party/py/tensorflow:tensorflow",
]
trimmed_deps = [dep for dep in deps if (dep not in substrates_omit_deps and
dep not in remove_deps)]
resolved_omit_deps_numpy = [
_resolve_omit_dep(dep)
for dep in substrates_omit_deps + numpy_omit_deps
]
for src in srcs:
native.genrule(
name = "rewrite_{}_numpy".format(src.replace(".", "_")),
srcs = [src],
outs = [_substrate_src(src, "numpy")],
cmd = "$(location {}) $(SRCS) --omit_deps={} > $@".format(
REWRITER_TARGET,
",".join(resolved_omit_deps_numpy),
),
tools = [REWRITER_TARGET],
)
native.py_library(
name = "{}.numpy.raw".format(name),
srcs = _substrate_srcs(srcs, "numpy"),
deps = _substrate_deps(trimmed_deps, "numpy"),
srcs_version = srcs_version,
testonly = testonly,
)
# Add symlinks under tfp/substrates/numpy.
substrate_runfiles_symlinks(
name = "{}.numpy".format(name),
substrate = "numpy",
deps = [":{}.numpy.raw".format(name)],
testonly = testonly,
)
resolved_omit_deps_jax = [
_resolve_omit_dep(dep)
for dep in substrates_omit_deps + jax_omit_deps
]
jax_srcs = _substrate_srcs(srcs, "jax")
for src in srcs:
native.genrule(
name = "rewrite_{}_jax".format(src.replace(".", "_")),
srcs = [src],
outs = [_substrate_src(src, "jax")],
cmd = "$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@".format(
REWRITER_TARGET,
",".join(resolved_omit_deps_jax),
),
tools = [REWRITER_TARGET],
)
native.py_library(
name = "{}.jax.raw".format(name),
srcs = jax_srcs,
deps = _substrate_deps(trimmed_deps, "jax"),
srcs_version = srcs_version,
testonly = testonly,
)
# Add symlinks under tfp/substrates/jax.
substrate_runfiles_symlinks(
name = "{}.jax".format(name),
substrate = "jax",
deps = [":{}.jax.raw".format(name)],
testonly = testonly,
)
def multi_substrate_py_test(
name,
size = "small",
jax_size = None,
numpy_size = None,
srcs = [],
deps = [],
tags = [],
numpy_tags = [],
jax_tags = [],
disabled_substrates = [],
srcs_version = "PY2AND3",
timeout = None,
shard_count = None):
"""A TFP `py2and3_test` for each of TF, NumPy, and JAX.
Args:
name: Name of the `test_suite` which covers TF, NumPy and JAX variants
of the test. Each substrate will have a dedicated `py2and3_test`
suffixed with '.tf', '.numpy', or '.jax' as appropriate.
size: As with `py_test`.
jax_size: A size override for the JAX target.
numpy_size: A size override for the numpy target.
srcs: As with `py_test`. These will have a `genrule` emitted to rewrite
NumPy and JAX variants, writing the test file into a subdirectory.
deps: As with `py_test`. The list is rewritten to depend on
substrate-specific libraries for substrate variants.
tags: Tags global to this test target. NumPy also gets a `'tfp_numpy'`
tag, and JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag is used
to produce the `test_suite`.
numpy_tags: Tags specific to the NumPy test. (e.g. `"notap"`).
jax_tags: Tags specific to the JAX test. (e.g. `"notap"`).
disabled_substrates: Iterable of substrates to disable, items from
["numpy", "jax"].
srcs_version: As with `py_test`.
timeout: As with `py_test`.
shard_count: As with `py_test`.
"""
name_tag = "_{}".format(name)
tags = [t for t in tags]
tags.append(name_tag)
tags.append("multi_substrate")
native.py_test(
name = "{}.tf".format(name),
size = size,
srcs = srcs,
main = "{}.py".format(name),
deps = deps,
tags = tags,
srcs_version = srcs_version,
timeout = timeout,
shard_count = shard_count,
)
if "numpy" not in disabled_substrates:
numpy_srcs = _substrate_srcs(srcs, "numpy")
native.genrule(
name = "rewrite_{}_numpy".format(name),
srcs = srcs,
outs = numpy_srcs,
cmd = "$(location {}) $(SRCS) > $@".format(REWRITER_TARGET),
tools = [REWRITER_TARGET],
)
py3_test(
name = "{}.numpy".format(name),
size = numpy_size or size,
srcs = numpy_srcs,
main = _substrate_src("{}.py".format(name), "numpy"),
deps = _substrate_deps(deps, "numpy"),
tags = tags + ["tfp_numpy"] + numpy_tags,
srcs_version = srcs_version,
python_version = "PY3",
timeout = timeout,
shard_count = shard_count,
)
if "jax" not in disabled_substrates:
jax_srcs = _substrate_srcs(srcs, "jax")
native.genrule(
name = "rewrite_{}_jax".format(name),
srcs = srcs,
outs = jax_srcs,
cmd = "$(location {}) $(SRCS) --numpy_to_jax > $@".format(REWRITER_TARGET),
tools = [REWRITER_TARGET],
)
jax_deps = _substrate_deps(deps, "jax")
# [internal] Add JAX build dep
py3_test(
name = "{}.jax".format(name),
size = jax_size or size,
srcs = jax_srcs,
main = _substrate_src("{}.py".format(name), "jax"),
deps = jax_deps,
tags = tags + ["tfp_jax"] + jax_tags,
srcs_version = srcs_version,
python_version = "PY3",
timeout = timeout,
shard_count = shard_count,
)
native.test_suite(
name = name,
tags = [name_tag],
)
| 35.698454
| 89
| 0.608043
| 1,657
| 13,851
| 4.86904
| 0.176222
| 0.025905
| 0.009916
| 0.016361
| 0.302429
| 0.20005
| 0.146257
| 0.111304
| 0.089737
| 0.064452
| 0
| 0.0046
| 0.27803
| 13,851
| 387
| 90
| 35.790698
| 0.8022
| 0.320121
| 0
| 0.28125
| 0
| 0
| 0.104155
| 0.030918
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035156
| false
| 0
| 0.011719
| 0
| 0.085938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed583ccefc13cf5fca32a4b108662e62505e92e1
| 5,425
|
py
|
Python
|
src/wikidated/wikidata/wikidata_dump.py
|
lschmelzeisen/wikidata-history-analyzer
|
8673639b61839d2dca271fbbaf2feb8563b75f2d
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2021-06-10T09:26:44.000Z
|
2021-07-07T13:49:00.000Z
|
src/wikidated/wikidata/wikidata_dump.py
|
lschmelzeisen/wikidata-history-analyzer
|
8673639b61839d2dca271fbbaf2feb8563b75f2d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/wikidated/wikidata/wikidata_dump.py
|
lschmelzeisen/wikidata-history-analyzer
|
8673639b61839d2dca271fbbaf2feb8563b75f2d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
#
# Copyright 2021 Lukas Schmelzeisen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
import json
from datetime import date, datetime
from logging import getLogger
from pathlib import Path
from typing import Mapping, MutableSequence, Sequence, Type, TypeVar
import requests
from pydantic import BaseModel as PydanticModel
from pydantic import validator
from tqdm import tqdm # type: ignore
from typing_extensions import Final
from wikidated._utils import RangeMap
from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile
from wikidated.wikidata.wikidata_dump_pages_meta_history import (
WikidataDumpPagesMetaHistory,
)
from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable
_LOGGER = getLogger(__name__)
_T_WikidataDumpFile = TypeVar("_T_WikidataDumpFile", bound=WikidataDumpFile)
class WikidataDump:
def __init__(
self,
dump_dir: Path,
version: date,
*,
mirror: str = "https://dumps.wikimedia.org",
) -> None:
self._dump_dir = dump_dir
self.version: Final = version
self.mirror: Final = mirror
self._dump_status = _WikidataDumpStatus.load(
self._dump_dir, self.version, self.mirror
)
self.sites_table: Final = self._construct_dumps(
WikidataDumpSitesTable, "sitestable"
)[0]
self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]()
for dump_file in self._construct_dumps(
WikidataDumpPagesMetaHistory, "metahistory7zdump"
):
self.pages_meta_history[dump_file.page_ids] = dump_file
def download(
self, *, sites_table: bool = True, pages_meta_history: bool = True
) -> None:
_LOGGER.info(
f"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'."
)
dump_files: MutableSequence[WikidataDumpFile] = []
if sites_table:
dump_files.append(self.sites_table)
if pages_meta_history:
dump_files.extend(self.pages_meta_history.values())
with tqdm(
desc=f"Wikidata dump {self.version:%4Y%2m%2d} files",
total=len(dump_files),
dynamic_ncols=True,
position=1,
) as progress_bar_files, tqdm(
desc=f"Wikidata dump {self.version:%4Y%2m%2d} bytes",
total=sum(dump_file.size for dump_file in dump_files),
dynamic_ncols=True,
position=2,
unit="B",
unit_scale=True,
unit_divisor=1024,
) as progress_bar_size:
for dump_file in dump_files:
dump_file.download()
progress_bar_files.update(1)
progress_bar_size.update(dump_file.size)
_LOGGER.info(f"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.")
def _construct_dumps(
self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str
) -> Sequence[_T_WikidataDumpFile]:
return [
dump_type(
path=self._dump_dir / path,
url=self.mirror + dump_status_file.url,
sha1=dump_status_file.sha1,
size=dump_status_file.size,
)
for path, dump_status_file in self._dump_status.jobs[
dump_type_id
].files.items()
]
class _WikidataDumpStatusFile(PydanticModel):
size: int
url: str
md5: str
sha1: str
class _WikidataDumpStatusJob(PydanticModel):
status: str
updated: datetime
files: Mapping[str, _WikidataDumpStatusFile]
@validator("updated", pre=True)
def _parse_datetime(cls, value: str) -> datetime: # noqa: N805
return datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
class _WikidataDumpStatus(PydanticModel):
jobs: Mapping[str, _WikidataDumpStatusJob]
version: str
@classmethod
def load(cls, dump_dir: Path, version: date, mirror: str) -> _WikidataDumpStatus:
path = dump_dir / f"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json"
if not path.exists():
url = f"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json"
_LOGGER.debug(f"Downloading Wikidata dump status from '{url}'.")
response = requests.get(url)
response.raise_for_status()
path.parent.mkdir(exist_ok=True, parents=True)
with path.open("w", encoding="UTF-8") as fd:
fd.write(json.dumps(response.json(), indent=2) + "\n")
_LOGGER.debug("Done downloading Wikidata dump status.")
dump_status = _WikidataDumpStatus.parse_file(path)
for job_name, job in dump_status.jobs.items():
if job.status != "done":
path.unlink()
raise Exception(f"Job '{job_name}' is not 'done', but '{job.status}'.")
return dump_status
| 33.487654
| 87
| 0.654194
| 642
| 5,425
| 5.325545
| 0.323988
| 0.032173
| 0.028078
| 0.022814
| 0.147412
| 0.118456
| 0.101784
| 0.045627
| 0.022229
| 0.022229
| 0
| 0.010875
| 0.254194
| 5,425
| 161
| 88
| 33.695652
| 0.834157
| 0.106728
| 0
| 0.033898
| 0
| 0
| 0.117233
| 0.041839
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042373
| false
| 0
| 0.127119
| 0.016949
| 0.305085
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed58570015c33daeb7f03921904a43571a44e66f
| 18,726
|
py
|
Python
|
tcapygen/layoutgen.py
|
Ahrvo-Trading-Systems/tcapy
|
df8439aa5c754fc9a7fde463c44c489b27112f76
|
[
"Apache-2.0"
] | 189
|
2020-03-20T17:03:04.000Z
|
2022-03-30T13:33:27.000Z
|
tcapygen/layoutgen.py
|
Ahrvo-Trading-Systems/tcapy
|
df8439aa5c754fc9a7fde463c44c489b27112f76
|
[
"Apache-2.0"
] | 4
|
2020-06-06T14:58:21.000Z
|
2022-03-10T22:31:15.000Z
|
tcapygen/layoutgen.py
|
Ahrvo-Trading-Systems/tcapy
|
df8439aa5c754fc9a7fde463c44c489b27112f76
|
[
"Apache-2.0"
] | 60
|
2020-03-20T17:06:56.000Z
|
2022-03-26T02:48:58.000Z
|
from __future__ import division, print_function
__author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
#
# Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
## Web server components
import dash_core_components as dcc
import dash_html_components as html
import base64
import os
## Date/time components
import pandas as pd
import datetime
from datetime import timedelta
from collections import OrderedDict
from pandas.tseries.offsets import *
from tcapy.vis.layoutdash import LayoutDash
########################################################################################################################
class LayoutDashImplGen(LayoutDash):
"""This implements the LayoutDash abstract class, to create the web based GUI for the tcapy application. It creates two
web pages
- detailed_page - for doing detailed tcapy analysis for a specific currency pair
- aggregated_page - for more aggregated style analysis across multiple currency pairs and over multiple time periods
"""
def __init__(self, app=None, constants=None, url_prefix=''):
super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix)
available_dates = pd.date_range(
datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window),
datetime.datetime.today().date(), freq=BDay())
times = pd.date_range("0:00", "23:59", freq="15min")
### create the possible values for drop down boxes on both pages
# Reverse date list (for both detailed and aggregated pages)
self.available_dates = [x.date() for x in available_dates[::-1]]
# For detailed page only
self.available_times = [t.strftime("%H:%M") for t in times]
self.available_tickers = self._constants.available_tickers_dictionary['All']
self.available_venues = self._constants.available_venues_dictionary['All']
self.available_brokers = self._constants.available_brokers_dictionary['All']
self.available_algos = self._constants.available_algos_dictionary['All']
self.available_market_data = self._constants.available_market_data
self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap',
'buy trade', 'sell trade']
self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade']
self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask']
# For aggregated page only
self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary)
self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary)
self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary)
self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary)
self.available_event_types = self._constants.available_event_types
self.available_metrics = self._constants.available_metrics
self.available_reload = ['no', 'yes']
self.available_visualization = ['yes', 'no']
self.construct_layout()
def _flatten_dictionary(self, dictionary):
available = dictionary['All']
available_groups = self._util_func.dict_key_list(dictionary.keys())
return self.flatten_list_of_strings([available_groups, available])
def construct_layout(self):
self.page_content = html.Div([
dcc.Location(id='url', refresh=False),
html.Div(id='page-content')
])
link_bar_dict = {'Detailed' : 'detailed',
'Aggregated' : 'aggregated',
'Compliance' : 'compliance'}
trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not',
'exec not in rep cur', 'slippage']
broker_cols = ['Date', 'by broker notional (rep cur)']
# Main page for detailed analysing of (eg. over the course of a few days)
self.pages['detailed'] = html.Div([
self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'),
self._sc.link_bar(link_bar_dict),
self._sc.width_row_cell(html.B("Status: ok", id='detailed-status'), margin_left=5),
self._sc.horizontal_bar(),
# Dropdown selection boxes
html.Div([
self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates,
'start-time-val' : self.available_times},
prefix_id='detailed'),
self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates),
('finish-time-val', self.available_times)]),
prefix_id='detailed'),
self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed',
drop_down_values=self.available_tickers),
self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed',
drop_down_values=self.available_grouped_brokers),
self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed',
drop_down_values=self.available_grouped_algos),
self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed',
drop_down_values=self.available_grouped_venues),
self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed',
drop_down_values=self.available_market_data),
self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed',
drop_down_values=self.available_metrics)
]),
self._sc.horizontal_bar(),
self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'),
# self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'),
# Orders
self._sc.horizontal_bar(),
self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed',
element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot',
self.available_order_plot_lines),
downloadplot_caption='Download CSV',
downloadplot_tag='order-candle-timeline-download-link',
download_file='download_order_candle_timeline', height=500),
self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500),
self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500),
# Execution trades
self._sc.horizontal_bar(),
self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed',
element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot',
self.available_execution_plot_lines),
downloadplot_caption='Download CSV',
downloadplot_tag='execution-candle-timeline-download-link',
download_file='download_execution_candle_timeline.csv', height=500),
self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500),
self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500),
# Detailed tcapy markout table for executions
html.Div([
html.H3('Executions: Markout Table'),
html.Div(id='detailed-execution-table')
],
style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5,
'marginRight': 5}),
],
style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})
################################################################################################################
# Secondary page for analysing aggregated statistics over long periods of time, eg. who is the best broker?
self.pages['aggregated'] = html.Div([
self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'),
self._sc.link_bar(link_bar_dict),
self._sc.width_row_cell(html.B("Status: ok", id='aggregated-status'), margin_left=5),
self._sc.horizontal_bar(),
# dropdown selection boxes
html.Div([
self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_tickers, multiselect=True),
self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_brokers, multiselect=True),
self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_algos, multiselect=True),
self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_venues, multiselect=True),
self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated',
drop_down_values=self.available_reload),
self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated',
drop_down_values=self.available_market_data),
self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated',
drop_down_values=self.available_event_types),
self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated',
drop_down_values=self.available_metrics),
]),
self._sc.horizontal_bar(),
self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'),
# , msg_id='aggregated-status'),
self._sc.horizontal_bar(),
# self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]),
self._sc.plot(caption='Aggregated Trader: Summary',
id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500),
self._sc.horizontal_bar(),
self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot',
prefix_id='aggregated', height=500),
self._sc.horizontal_bar(),
self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot',
'execution-by-venue-dist-plot'],
prefix_id='aggregated', height=500),
self._sc.horizontal_bar()
],
style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})
################################################################################################################
self.pages['compliance'] = html.Div([
self._sc.header_bar('FX: Compliance Analysis', img='logo.png'),
self._sc.link_bar(link_bar_dict),
self._sc.width_row_cell(html.B("Status: ok", id='compliance-status'), margin_left=5),
self._sc.horizontal_bar(),
# Dropdown selection boxes
html.Div([
self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance',
drop_down_values=self.available_grouped_tickers, multiselect=True),
self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance',
drop_down_values=self.available_grouped_brokers, multiselect=True),
self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance',
drop_down_values=self.available_grouped_algos, multiselect=True),
self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance',
drop_down_values=self.available_grouped_venues, multiselect=True),
self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance',
drop_down_values=self.available_reload),
self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance',
drop_down_values=self.available_market_data),
self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance',
drop_down_values=self.available_reload),
self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance',
drop_down_values=self.available_times),
self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance',
drop_down_values=self.available_times),
self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance',
drop_down_values=self.available_slippage_bounds),
self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance',
drop_down_values=self.available_visualization)
]),
self._sc.horizontal_bar(),
html.Div([
self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'),
# self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'),
# self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'),
]),
self._sc.horizontal_bar(),
self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance',
columns=trade_outliers_cols,
downloadplot_caption='Trade outliers CSV',
downloadplot_tag='execution-by-anomalous-download-link',
download_file='download_execution_by_anomalous.csv'),
self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance',
columns=broker_cols,
downloadplot_caption='Download broker CSV',
downloadplot_tag='summary-by-broker-download-link',
download_file='download_broker.csv'
),
self._sc.horizontal_bar()
],
style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})
# ID flags
self.id_flags = {
# Detailed trader page
# 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'},
# 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'},
'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df',
'order': 'sparse_market_order_df'},
'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'},
'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'},
'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'},
'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df',
'order-candle-timeline': 'sparse_market_order_df'},
# Aggregated trader page
'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker',
'execution-by-venue': 'bar_trade_df_by/mean/venue'},
'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker',
'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'},
'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker',
'execution-by-venue': 'dist_trade_df_by/pdf/venue'},
# Compliance page
'compliance_metric_table_trade_order':
{'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all',
'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'},
'compliance_download_link_trade_order':
{'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all',
'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'},
}
| 58.51875
| 154
| 0.595215
| 2,030
| 18,726
| 5.20936
| 0.139409
| 0.039716
| 0.029314
| 0.04104
| 0.618345
| 0.518676
| 0.468085
| 0.425816
| 0.40766
| 0.385437
| 0
| 0.006055
| 0.268023
| 18,726
| 319
| 155
| 58.702194
| 0.765448
| 0.090035
| 0
| 0.283784
| 0
| 0
| 0.24997
| 0.097548
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013514
| false
| 0
| 0.04955
| 0
| 0.072072
| 0.004505
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed587bf56577619d8ec39ef62825f11e9ce7e776
| 3,511
|
py
|
Python
|
projects/MAE/utils/weight_convert.py
|
Oneflow-Inc/libai
|
e473bd3962f07b1e37232d2be39c8257df0ec0f3
|
[
"Apache-2.0"
] | 55
|
2021-12-10T08:47:06.000Z
|
2022-03-28T09:02:15.000Z
|
projects/MAE/utils/weight_convert.py
|
Oneflow-Inc/libai
|
e473bd3962f07b1e37232d2be39c8257df0ec0f3
|
[
"Apache-2.0"
] | 106
|
2021-11-03T05:16:45.000Z
|
2022-03-31T06:16:23.000Z
|
projects/MAE/utils/weight_convert.py
|
Oneflow-Inc/libai
|
e473bd3962f07b1e37232d2be39c8257df0ec0f3
|
[
"Apache-2.0"
] | 13
|
2021-12-29T08:12:08.000Z
|
2022-03-28T06:59:45.000Z
|
# coding=utf-8
# Copyright 2021 The OneFlow Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import oneflow as flow
import torch
logger = logging.getLogger(__name__)
def convert_qkv_weight(cfg, value):
"""
Convert qkv.weight to be compatible with LiBai transformer layer
Args:
cfg: config file
value: qkv.weight in the loaded checkpoint
"""
num_heads = cfg.model.num_heads
hidden_size = cfg.model.embed_dim
head_size = int(hidden_size / num_heads)
qkv_weight = (
value.view([3, num_heads, head_size, hidden_size])
.permute(1, 0, 2, 3)
.contiguous()
.view(hidden_size * 3, hidden_size)
)
return qkv_weight
def convert_qkv_bias(cfg, value):
"""
Convert qkv.bias to be compatible with LiBai transformer layer
Args:
cfg: config file
value: qkv.bias in the loaded checkpoint
"""
num_heads = cfg.model.num_heads
hidden_size = cfg.model.embed_dim
head_size = int(hidden_size / num_heads)
qkv_bias = (
value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3)
)
return qkv_bias
def filter_keys(key, value, cfg):
"""
Filtering the state_dict keys and values to match LiBai's MAE model
"""
if "norm1" in key:
key = key.replace("norm1", "input_layernorm")
elif "attn.qkv" in key:
key = key.replace("attn.qkv", "self_attention.query_key_value")
if "weight" in key:
value = convert_qkv_weight(cfg, value)
if "bias" in key:
value = convert_qkv_bias(cfg, value)
elif "attn.proj" in key:
key = key.replace("attn.proj", "self_attention.dense")
elif "norm2" in key:
key = key.replace("norm2", "post_attention_layernorm")
elif "mlp.fc1" in key:
key = key.replace("mlp.fc1", "mlp.dense_h_to_4h")
elif "mlp.fc2" in key:
key = key.replace("mlp.fc2", "mlp.dense_4h_to_h")
elif "fc_norm" in key:
key = key.replace("fc_norm", "norm")
return key, value
def load_torch_checkpoint(model, cfg, path="./mae_finetuned_vit_base.pth", strict=False):
"""
Load checkpoint from the given torch weights.
Torch weight can be downloaded from the original repo:
https://github.com/facebookresearch/mae
"""
torch_dict = torch.load(path, map_location="cpu")["model"]
parameters = torch_dict
new_parameters = dict()
for key, value in parameters.items():
if "num_batches_tracked" not in key:
# to global tensor
key, val = filter_keys(key, value, cfg)
val = val.detach().cpu().numpy()
val = flow.tensor(val).to_global(
sbp=flow.sbp.broadcast, placement=flow.placement("cuda", ranks=[0])
)
new_parameters[key] = val
model.load_state_dict(new_parameters, strict=strict)
print("Successfully load torch mae checkpoint.")
return model
| 32.509259
| 95
| 0.656508
| 494
| 3,511
| 4.520243
| 0.3583
| 0.037618
| 0.025078
| 0.034483
| 0.337662
| 0.211375
| 0.172862
| 0.149575
| 0.149575
| 0.149575
| 0
| 0.011606
| 0.239248
| 3,511
| 107
| 96
| 32.813084
| 0.82441
| 0.312731
| 0
| 0.103448
| 0
| 0
| 0.143539
| 0.035559
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.051724
| 0
| 0.189655
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5a4c9715fe0f81c7675d25ae101b58391d1929
| 8,462
|
py
|
Python
|
spot/level1.py
|
K0gata/SGLI_Python_output_tool
|
1368e0408edd737a5109d0523db6c147faa80b97
|
[
"MIT"
] | 1
|
2020-08-04T04:17:49.000Z
|
2020-08-04T04:17:49.000Z
|
spot/level1.py
|
K0gata/SGLI_Python_Open_Tool
|
1368e0408edd737a5109d0523db6c147faa80b97
|
[
"MIT"
] | null | null | null |
spot/level1.py
|
K0gata/SGLI_Python_Open_Tool
|
1368e0408edd737a5109d0523db6c147faa80b97
|
[
"MIT"
] | null | null | null |
import numpy as np
import logging
from decimal import Decimal, ROUND_HALF_UP
from abc import ABC, abstractmethod, abstractproperty
from spot.utility import bilin_2d
from spot.config import PROJ_TYPE
# =============================
# Level-1 template class
# =============================
class L1Interface(ABC):
@property
@abstractmethod
def PROJECTION_TYPE(self):
raise NotImplementedError()
@property
@abstractmethod
def ALLOW_PROJECTION_TYPE(self):
return NotImplementedError()
def __init__(self, h5_file, product_id):
self.h5_file = h5_file
self.product_id = product_id
geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs
self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0]
self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0]
img_data_grp_attrs = self.h5_file['Image_data'].attrs
self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0]
self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0]
def get_product_data(self, prod_name:str):
dset = self.h5_file['Image_data/' + prod_name]
# Return uint16 type data if the product is QA_flag or Line_tai93
if 'QA_flag' == prod_name or 'Line_tai93' == prod_name:
return dset[:]
# Validate
data = dset[:].astype(np.float32)
if 'Error_DN' in dset.attrs:
data[data == dset.attrs['Error_DN'][0]] = np.NaN
with np.warnings.catch_warnings():
np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)')
if 'Maximum_valid_DN' in dset.attrs:
data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN
if 'Minimum_valid_DN' in dset.attrs:
data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN
# Convert DN to physical value
data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0]
return data
@abstractmethod
def get_geometry_data(self, data_name:str, **kwargs):
raise NotImplementedError()
@abstractmethod
def get_geometry_data_list(self):
raise NotImplementedError()
def get_product_data_list(self):
return list(self.h5_file['/Image_data'].keys())
def get_unit(self, prod_name: str):
if 'Rt_' in prod_name:
return 'NA'
# Get attrs set
unit_name = 'Unit'
attrs = self.h5_file['/Image_data/' + prod_name].attrs
# Get unit
if unit_name not in attrs:
return 'NA'
return attrs[unit_name][0].decode('UTF-8')
# =============================
# Level-1 map-projection class
# =============================
class Scene(L1Interface):
PROJECTION_TYPE = PROJ_TYPE.SCENE.name
ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name]
def __init__(self, h5_file, product_id):
super().__init__(h5_file, product_id)
self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0]
self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0]
img_data_grp_attrs = self.h5_file['Image_data'].attrs
self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0]
def get_geometry_data(self, data_name: str, **kwargs):
interval = kwargs['interval']
dset = self.h5_file['Geometry_data/' + data_name]
data = dset[:]
if 'Latitude' is not data_name and 'Longitude' is not data_name:
data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0]
# Finish if interval is none
if interval is None or interval == 'none':
return data
# Interpolate raw data
if interval == 'auto':
interp_interval = dset.attrs['Resampling_interval'][0]
else:
interp_interval = interval
lon_mode = False
if 'Longitude' == data_name:
lon_mode = True
if interp_interval > 1:
data = bilin_2d(data, interp_interval, lon_mode)
# Trim away the excess pixel/line
(data_size_lin, data_size_pxl) = data.shape
if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl):
data = data[:self.img_n_lin, :self.img_n_pix]
return data
def get_geometry_data_list(self):
return list(self.h5_file['/Geometry_data'].keys())
def get_allow_projection_type(self):
return self.ALLOW_PROJECTION_TYPE
# =============================
# Level-1 sub-processing level class
# =============================
class L1B(Scene):
# -----------------------------
# Public
# -----------------------------
def get_product_data(self, prod_name:str):
if 'Land_water_flag' in prod_name:
return self._get_land_water_flag()
if 'Lt_' in prod_name:
return self._get_Lt(prod_name)
if 'Rt_' in prod_name:
return self._get_Rt(prod_name)
if 'Stray_light_correction_flag_' in prod_name:
return self._get_stray_light_correction_flag(prod_name)
return super().get_product_data(prod_name)
# -----------------------------
# Private
# -----------------------------
def _get_land_water_flag(self):
dset = self.h5_file['Image_data/Land_water_flag']
data = dset[:].astype(np.float32)
if 'Error_DN' in dset.attrs:
data[data == dset.attrs['Error_value'][0]] = np.NaN
with np.warnings.catch_warnings():
np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)')
data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN
data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN
return data
def _get_Lt(self, prod_name):
dset = self.h5_file['Image_data/' + prod_name]
dn_data = dset[:]
mask = dset.attrs['Mask'][0]
data = np.bitwise_and(dn_data, mask).astype(np.float32)
data = data * dset.attrs['Slope'] + dset.attrs['Offset']
data[dn_data == dset.attrs['Error_DN']] = np.NaN
with np.warnings.catch_warnings():
np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)')
data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN
data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN
return data
def _get_Rt(self, prod_name):
prod_name = prod_name.replace('Rt_', 'Lt_')
dset = self.h5_file['Image_data/' + prod_name]
dn_data = dset[:]
mask = dset.attrs['Mask'][0]
data = np.bitwise_and(dn_data, mask).astype(np.float32)
data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance']
data[dn_data == dset.attrs['Error_DN']] = np.NaN
with np.warnings.catch_warnings():
np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)')
data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN
data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN
cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True)))
data = data / cos_theta_0
return data
def _get_stray_light_correction_flag(self, prod_name):
prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_')
dset = self.h5_file['Image_data/' + prod_name]
dn_data = dset[:]
data = np.bitwise_and(dn_data, 0x8000)
data[dn_data == dset.attrs['Error_DN']] = 0
return data > 0
class VNRL1B(L1B):
def get_product_data_list(self):
prod_list = super().get_product_data_list()
for prod in prod_list:
if 'Lt_' in prod:
prod_list.append(prod.replace('Lt', 'Rt'))
prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag'))
prod_list = sorted(prod_list)
return prod_list
class IRSL1B(L1B):
def get_product_data_list(self):
prod_list = super().get_product_data_list()
for prod in prod_list:
if 'Lt_SW' in prod:
prod_list.append(prod.replace('Lt', 'Rt'))
prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag'))
prod_list = sorted(prod_list)
return prod_list
# EOF
| 34.538776
| 120
| 0.610021
| 1,113
| 8,462
| 4.337826
| 0.150045
| 0.052196
| 0.043082
| 0.045775
| 0.600249
| 0.547639
| 0.490472
| 0.439105
| 0.36599
| 0.335128
| 0
| 0.013121
| 0.243441
| 8,462
| 244
| 121
| 34.680328
| 0.741018
| 0.072205
| 0
| 0.462963
| 0
| 0
| 0.136852
| 0.017378
| 0
| 0
| 0.000767
| 0
| 0
| 1
| 0.117284
| false
| 0
| 0.037037
| 0.024691
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5a7dc4339280b02e5e632da64cfe3100fda887
| 345
|
py
|
Python
|
168. Excel Sheet Column Title.py
|
Alvin1994/leetcode-python3-
|
ba2bde873c925554cc39f2bd13be81967713477d
|
[
"Apache-2.0"
] | null | null | null |
168. Excel Sheet Column Title.py
|
Alvin1994/leetcode-python3-
|
ba2bde873c925554cc39f2bd13be81967713477d
|
[
"Apache-2.0"
] | null | null | null |
168. Excel Sheet Column Title.py
|
Alvin1994/leetcode-python3-
|
ba2bde873c925554cc39f2bd13be81967713477d
|
[
"Apache-2.0"
] | null | null | null |
class Solution:
# @return a string
def convertToTitle(self, n: int) -> str:
capitals = [chr(x) for x in range(ord('A'), ord('Z')+1)]
result = []
while n > 0:
result.insert(0, capitals[(n-1)%len(capitals)])
n = (n-1) % len(capitals)
# result.reverse()
return ''.join(result)
| 34.5
| 64
| 0.510145
| 45
| 345
| 3.911111
| 0.6
| 0.102273
| 0.056818
| 0.147727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021186
| 0.315942
| 345
| 10
| 65
| 34.5
| 0.724576
| 0.095652
| 0
| 0
| 0
| 0
| 0.006452
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5ab9e7476a3e24312d9ef871509f4e43e86312
| 18,788
|
py
|
Python
|
devil/devil/utils/cmd_helper.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | 1,894
|
2015-04-17T18:29:53.000Z
|
2022-03-28T22:41:06.000Z
|
devil/devil/utils/cmd_helper.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | 4,640
|
2015-07-08T16:19:08.000Z
|
2019-12-02T15:01:27.000Z
|
infra/services/android_docker/third_party/devil/utils/cmd_helper.py
|
NDevTK/chromium-infra
|
d38e088e158d81f7f2065a38aa1ea1894f735ec4
|
[
"BSD-3-Clause"
] | 698
|
2015-06-02T19:18:35.000Z
|
2022-03-29T16:57:15.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for subprocess to make calling shell commands easier."""
import codecs
import logging
import os
import pipes
import select
import signal
import string
import subprocess
import sys
import time
CATAPULT_ROOT_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six')
if SIX_PATH not in sys.path:
sys.path.append(SIX_PATH)
import six
from devil import base_error
logger = logging.getLogger(__name__)
_SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')
# Cache the string-escape codec to ensure subprocess can find it
# later. Return value doesn't matter.
if six.PY2:
codecs.lookup('string-escape')
def SingleQuote(s):
"""Return an shell-escaped version of the string using single quotes.
Reliably quote a string which may contain unsafe characters (e.g. space,
quote, or other special characters such as '$').
The returned value can be used in a shell command line as one token that gets
to be interpreted literally.
Args:
s: The string to quote.
Return:
The string quoted using single quotes.
"""
return pipes.quote(s)
def DoubleQuote(s):
"""Return an shell-escaped version of the string using double quotes.
Reliably quote a string which may contain unsafe characters (e.g. space
or quote characters), while retaining some shell features such as variable
interpolation.
The returned value can be used in a shell command line as one token that gets
to be further interpreted by the shell.
The set of characters that retain their special meaning may depend on the
shell implementation. This set usually includes: '$', '`', '\', '!', '*',
and '@'.
Args:
s: The string to quote.
Return:
The string quoted using double quotes.
"""
if not s:
return '""'
elif all(c in _SafeShellChars for c in s):
return s
else:
return '"' + s.replace('"', '\\"') + '"'
def ShrinkToSnippet(cmd_parts, var_name, var_value):
"""Constructs a shell snippet for a command using a variable to shrink it.
Takes into account all quoting that needs to happen.
Args:
cmd_parts: A list of command arguments.
var_name: The variable that holds var_value.
var_value: The string to replace in cmd_parts with $var_name
Returns:
A shell snippet that does not include setting the variable.
"""
def shrink(value):
parts = (x and SingleQuote(x) for x in value.split(var_value))
with_substitutions = ('"$%s"' % var_name).join(parts)
return with_substitutions or "''"
return ' '.join(shrink(part) for part in cmd_parts)
def Popen(args,
stdin=None,
stdout=None,
stderr=None,
shell=None,
cwd=None,
env=None):
# preexec_fn isn't supported on windows.
# pylint: disable=unexpected-keyword-arg
if sys.platform == 'win32':
close_fds = (stdin is None and stdout is None and stderr is None)
preexec_fn = None
else:
close_fds = True
preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if six.PY2:
return subprocess.Popen(
args=args,
cwd=cwd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
shell=shell,
close_fds=close_fds,
env=env,
preexec_fn=preexec_fn
)
else:
# opens stdout in text mode, so that caller side always get 'str',
# and there will be no type mismatch error.
# Ignore any decoding error, so that caller will not crash due to
# uncaught exception. Decoding errors are unavoidable, as we
# do not know the encoding of the output, and in some output there
# will be multiple encodings (e.g. adb logcat)
return subprocess.Popen(
args=args,
cwd=cwd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
shell=shell,
close_fds=close_fds,
env=env,
preexec_fn=preexec_fn,
universal_newlines=True,
encoding='utf-8',
errors='ignore'
)
def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
pipe = Popen(
args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env)
pipe.communicate()
return pipe.wait()
def RunCmd(args, cwd=None):
"""Opens a subprocess to execute a program and returns its return value.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
Returns:
Return code from the command execution.
"""
logger.debug(str(args) + ' ' + (cwd or ''))
return Call(args, cwd=cwd)
def GetCmdOutput(args, cwd=None, shell=False, env=None):
"""Open a subprocess to execute a program and returns its output.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command.
env: If not None, a mapping that defines environment variables for the
subprocess.
Returns:
Captures and returns the command's stdout.
Prints the command's stderr to logger (which defaults to stdout).
"""
(_, output) = GetCmdStatusAndOutput(args, cwd, shell, env)
return output
def _ValidateAndLogCommand(args, cwd, shell):
if isinstance(args, six.string_types):
if not shell:
raise Exception('string args must be run with shell=True')
else:
if shell:
raise Exception('array args must be run with shell=False')
args = ' '.join(SingleQuote(str(c)) for c in args)
if cwd is None:
cwd = ''
else:
cwd = ':' + cwd
logger.debug('[host]%s> %s', cwd, args)
return args
def GetCmdStatusAndOutput(args,
cwd=None,
shell=False,
env=None,
merge_stderr=False):
"""Executes a subprocess and returns its exit code and output.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
merge_stderr: If True, captures stderr as part of stdout.
Returns:
The 2-tuple (exit code, stdout).
"""
status, stdout, stderr = GetCmdStatusOutputAndError(
args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr)
if stderr:
logger.critical('STDERR: %s', stderr)
logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(),
'<truncated>' if len(stdout) > 4096 else '')
return (status, stdout)
def StartCmd(args, cwd=None, shell=False, env=None):
"""Starts a subprocess and returns a handle to the process.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
Returns:
A process handle from subprocess.Popen.
"""
_ValidateAndLogCommand(args, cwd, shell)
return Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell,
cwd=cwd,
env=env)
def GetCmdStatusOutputAndError(args,
cwd=None,
shell=False,
env=None,
merge_stderr=False):
"""Executes a subprocess and returns its exit code, output, and errors.
Args:
args: A string or a sequence of program arguments. The program to execute is
the string or the first item in the args sequence.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
merge_stderr: If True, captures stderr as part of stdout.
Returns:
The 3-tuple (exit code, stdout, stderr).
"""
_ValidateAndLogCommand(args, cwd, shell)
stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE
pipe = Popen(
args,
stdout=subprocess.PIPE,
stderr=stderr,
shell=shell,
cwd=cwd,
env=env)
stdout, stderr = pipe.communicate()
return (pipe.returncode, stdout, stderr)
class TimeoutError(base_error.BaseError):
"""Module-specific timeout exception."""
def __init__(self, output=None):
super(TimeoutError, self).__init__('Timeout')
self._output = output
@property
def output(self):
return self._output
def _read_and_decode(fd, buffer_size):
data = os.read(fd, buffer_size)
if data and six.PY3:
data = data.decode('utf-8', errors='ignore')
return data
def _IterProcessStdoutFcntl(process,
iter_timeout=None,
timeout=None,
buffer_size=4096,
poll_interval=1):
"""An fcntl-based implementation of _IterProcessStdout."""
# pylint: disable=too-many-nested-blocks
import fcntl
try:
# Enable non-blocking reads from the child's stdout.
child_fd = process.stdout.fileno()
fl = fcntl.fcntl(child_fd, fcntl.F_GETFL)
fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
end_time = (time.time() + timeout) if timeout else None
iter_end_time = (time.time() + iter_timeout) if iter_timeout else None
while True:
if end_time and time.time() > end_time:
raise TimeoutError()
if iter_end_time and time.time() > iter_end_time:
yield None
iter_end_time = time.time() + iter_timeout
if iter_end_time:
iter_aware_poll_interval = min(poll_interval,
max(0, iter_end_time - time.time()))
else:
iter_aware_poll_interval = poll_interval
read_fds, _, _ = select.select([child_fd], [], [],
iter_aware_poll_interval)
if child_fd in read_fds:
data = _read_and_decode(child_fd, buffer_size)
if not data:
break
yield data
if process.poll() is not None:
# If process is closed, keep checking for output data (because of timing
# issues).
while True:
read_fds, _, _ = select.select([child_fd], [], [],
iter_aware_poll_interval)
if child_fd in read_fds:
data = _read_and_decode(child_fd, buffer_size)
if data:
yield data
continue
break
break
finally:
try:
if process.returncode is None:
# Make sure the process doesn't stick around if we fail with an
# exception.
process.kill()
except OSError:
pass
process.wait()
def _IterProcessStdoutQueue(process,
iter_timeout=None,
timeout=None,
buffer_size=4096,
poll_interval=1):
"""A Queue.Queue-based implementation of _IterProcessStdout.
TODO(jbudorick): Evaluate whether this is a suitable replacement for
_IterProcessStdoutFcntl on all platforms.
"""
# pylint: disable=unused-argument
if six.PY3:
import queue
else:
import Queue as queue
import threading
stdout_queue = queue.Queue()
def read_process_stdout():
# TODO(jbudorick): Pick an appropriate read size here.
while True:
try:
output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size)
except IOError:
break
stdout_queue.put(output_chunk, True)
if not output_chunk and process.poll() is not None:
break
reader_thread = threading.Thread(target=read_process_stdout)
reader_thread.start()
end_time = (time.time() + timeout) if timeout else None
try:
while True:
if end_time and time.time() > end_time:
raise TimeoutError()
try:
s = stdout_queue.get(True, iter_timeout)
if not s:
break
yield s
except queue.Empty:
yield None
finally:
try:
if process.returncode is None:
# Make sure the process doesn't stick around if we fail with an
# exception.
process.kill()
except OSError:
pass
process.wait()
reader_thread.join()
_IterProcessStdout = (_IterProcessStdoutQueue
if sys.platform == 'win32' else _IterProcessStdoutFcntl)
"""Iterate over a process's stdout.
This is intentionally not public.
Args:
process: The process in question.
iter_timeout: An optional length of time, in seconds, to wait in
between each iteration. If no output is received in the given
time, this generator will yield None.
timeout: An optional length of time, in seconds, during which
the process must finish. If it fails to do so, a TimeoutError
will be raised.
buffer_size: The maximum number of bytes to read (and thus yield) at once.
poll_interval: The length of time to wait in calls to `select.select`.
If iter_timeout is set, the remaining length of time in the iteration
may take precedence.
Raises:
TimeoutError: if timeout is set and the process does not complete.
Yields:
basestrings of data or None.
"""
def GetCmdStatusAndOutputWithTimeout(args,
timeout,
cwd=None,
shell=False,
logfile=None,
env=None):
"""Executes a subprocess with a timeout.
Args:
args: List of arguments to the program, the program to execute is the first
element.
timeout: the timeout in seconds or None to wait forever.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
logfile: Optional file-like object that will receive output from the
command as it is running.
env: If not None, a mapping that defines environment variables for the
subprocess.
Returns:
The 2-tuple (exit code, output).
Raises:
TimeoutError on timeout.
"""
_ValidateAndLogCommand(args, cwd, shell)
output = six.StringIO()
process = Popen(
args,
cwd=cwd,
shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env)
try:
for data in _IterProcessStdout(process, timeout=timeout):
if logfile:
logfile.write(data)
output.write(data)
except TimeoutError:
raise TimeoutError(output.getvalue())
str_output = output.getvalue()
logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(),
'<truncated>' if len(str_output) > 4096 else '')
return process.returncode, str_output
def IterCmdOutputLines(args,
iter_timeout=None,
timeout=None,
cwd=None,
shell=False,
env=None,
check_status=True):
"""Executes a subprocess and continuously yields lines from its output.
Args:
args: List of arguments to the program, the program to execute is the first
element.
iter_timeout: Timeout for each iteration, in seconds.
timeout: Timeout for the entire command, in seconds.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command. Must be True if args
is a string and False if args is a sequence.
env: If not None, a mapping that defines environment variables for the
subprocess.
check_status: A boolean indicating whether to check the exit status of the
process after all output has been read.
Yields:
The output of the subprocess, line by line.
Raises:
CalledProcessError if check_status is True and the process exited with a
non-zero exit status.
"""
cmd = _ValidateAndLogCommand(args, cwd, shell)
process = Popen(
args,
cwd=cwd,
shell=shell,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
return _IterCmdOutputLines(
process,
cmd,
iter_timeout=iter_timeout,
timeout=timeout,
check_status=check_status)
def _IterCmdOutputLines(process,
cmd,
iter_timeout=None,
timeout=None,
check_status=True):
buffer_output = ''
iter_end = None
cur_iter_timeout = None
if iter_timeout:
iter_end = time.time() + iter_timeout
cur_iter_timeout = iter_timeout
for data in _IterProcessStdout(
process, iter_timeout=cur_iter_timeout, timeout=timeout):
if iter_timeout:
# Check whether the current iteration has timed out.
cur_iter_timeout = iter_end - time.time()
if data is None or cur_iter_timeout < 0:
yield None
iter_end = time.time() + iter_timeout
continue
else:
assert data is not None, (
'Iteration received no data despite no iter_timeout being set. '
'cmd: %s' % cmd)
# Construct lines to yield from raw data.
buffer_output += data
has_incomplete_line = buffer_output[-1] not in '\r\n'
lines = buffer_output.splitlines()
buffer_output = lines.pop() if has_incomplete_line else ''
for line in lines:
yield line
if iter_timeout:
iter_end = time.time() + iter_timeout
if buffer_output:
yield buffer_output
if check_status and process.returncode:
raise subprocess.CalledProcessError(process.returncode, cmd)
| 31.10596
| 80
| 0.64951
| 2,518
| 18,788
| 4.753376
| 0.175139
| 0.024814
| 0.009775
| 0.007519
| 0.438884
| 0.406884
| 0.37405
| 0.36135
| 0.349319
| 0.329769
| 0
| 0.003368
| 0.27294
| 18,788
| 603
| 81
| 31.157546
| 0.87284
| 0.381627
| 0
| 0.450617
| 0
| 0
| 0.031292
| 0
| 0
| 0
| 0
| 0.003317
| 0.003086
| 1
| 0.064815
| false
| 0.006173
| 0.049383
| 0.003086
| 0.175926
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5b5860c856a4418e7eeb1cf777cb4c10722142
| 2,845
|
py
|
Python
|
api/to_astm.py
|
urchinpro/L2-forms
|
37f33386984efbb2d1e92c73d915256247801109
|
[
"MIT"
] | null | null | null |
api/to_astm.py
|
urchinpro/L2-forms
|
37f33386984efbb2d1e92c73d915256247801109
|
[
"MIT"
] | null | null | null |
api/to_astm.py
|
urchinpro/L2-forms
|
37f33386984efbb2d1e92c73d915256247801109
|
[
"MIT"
] | null | null | null |
import itertools
from astm import codec
from collections import defaultdict
from django.utils import timezone
import directions.models as directions
import directory.models as directory
import api.models as api
import simplejson as json
def get_astm_header() -> list:
return ['H|\\^&', None, None, ['1', '2.00'], None, None, None, None, None, None, 'P', '1.00', timezone.now().strftime("%Y%m%d%H%M%S")]
def get_leave() -> list:
return ['L', 1, 'N']
def get_patient() -> list:
return ['P', 1]
def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list:
r = []
n = 0
iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction)
if not full:
iss_list = iss_list.filter(doc_confirmation__isnull=True)
for i in iss_list:
researches = defaultdict(list)
for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False):
rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer)
if not rel.exists():
continue
rel = rel[0]
tube = directions.TubesRegistration.objects.filter(type__fractions=fraction)
if not tube.exists():
continue
tube = tube[0]
researches[tube.pk].append(rel.astm_field)
for tpk in researches:
n += 1
r.append(['O', n, tpk, None, [[None, x, None, None] for x in researches[tpk]]])
return r
def encode(m) -> str:
return codec.iter_encode(m)
def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str:
iss = [get_iss_direction(x, analyzer, full) for x in directions_list]
m = [get_astm_header(), get_patient()]
m = list(itertools.chain(m, *iss))
m.append(get_leave())
if out:
out.write(json.dumps(m))
return encode(m)
def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False):
m = [get_astm_header(), get_patient()]
n = 0
researches = defaultdict(list)
for row in issledovaniya:
k = row["pk"]
i = row["iss"]
for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False):
rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app)
if not rel.exists():
continue
rel = rel[0]
if rel.is_code:
researches[k].append([None, None, None, rel.astm_field])
else:
researches[k].append([None, rel.astm_field, None, None])
for tpk in researches:
n += 1
m.append(['O', n, tpk, None, researches[tpk]])
m.append(get_leave())
return encode(m)
| 34.277108
| 138
| 0.636555
| 372
| 2,845
| 4.755376
| 0.247312
| 0.049746
| 0.033917
| 0.027134
| 0.308649
| 0.246467
| 0.196721
| 0.196721
| 0.163934
| 0.091577
| 0
| 0.007404
| 0.240422
| 2,845
| 82
| 139
| 34.695122
| 0.811199
| 0
| 0
| 0.313433
| 0
| 0
| 0.013357
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104478
| false
| 0
| 0.119403
| 0.059701
| 0.328358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5c8a4473db3e1f846fdf5ddd27546849b2b2e4
| 3,091
|
py
|
Python
|
src/compas_plotters/artists/lineartist.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
src/compas_plotters/artists/lineartist.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
src/compas_plotters/artists/lineartist.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
from compas_plotters.artists import Artist
from matplotlib.lines import Line2D
from compas.geometry import intersection_line_box_xy
__all__ = ['LineArtist']
class LineArtist(Artist):
""""""
zorder = 1000
def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)):
super(LineArtist, self).__init__(line)
self._mpl_line = None
self._start_artist = None
self._end_artist = None
self._segment_artist = None
self._draw_points = draw_points
self._draw_as_segment = draw_as_segment
self.line = line
self.linewidth = linewidth
self.linestyle = linestyle
self.color = color
def clip(self):
xlim, ylim = self.plotter.viewbox
xmin, xmax = xlim
ymin, ymax = ylim
box = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]]
return intersection_line_box_xy(self.line, box)
@property
def data(self):
return [self.line.start[:2], self.line.end[:2]]
def draw(self):
if self._draw_as_segment:
x0, y0 = self.line.start[:2]
x1, y1 = self.line.end[:2]
line2d = Line2D([x0, x1], [y0, y1],
linewidth=self.linewidth,
linestyle=self.linestyle,
color=self.color,
zorder=self.zorder)
self._mpl_line = self.plotter.axes.add_line(line2d)
if self._draw_points:
self._start_artist = self.plotter.add(self.line.start)
self._end_artist = self.plotter.add(self.line.end)
else:
points = self.clip()
if points:
p0, p1 = points
x0, y0 = p0[:2]
x1, y1 = p1[:2]
line2d = Line2D([x0, x1], [y0, y1],
linewidth=self.linewidth,
linestyle=self.linestyle,
color=self.color,
zorder=self.zorder)
self._mpl_line = self.plotter.axes.add_line(line2d)
if self._draw_points:
self._start_artist = self.plotter.add(self.line.start)
self._end_artist = self.plotter.add(self.line.end)
def redraw(self):
if self._draw_as_segment:
x0, y0 = self.line.start[:2]
x1, y1 = self.line.end[:2]
self._mpl_line.set_xdata([x0, x1])
self._mpl_line.set_ydata([y0, y1])
self._mpl_line.set_color(self.color)
self._mpl_line.set_linewidth(self.linewidth)
else:
points = self.clip()
if points:
p0, p1 = points
x0, y0 = p0[:2]
x1, y1 = p1[:2]
self._mpl_line.set_xdata([x0, x1])
self._mpl_line.set_ydata([y0, y1])
self._mpl_line.set_color(self.color)
self._mpl_line.set_linewidth(self.linewidth)
| 36.364706
| 122
| 0.530573
| 363
| 3,091
| 4.280992
| 0.179063
| 0.066924
| 0.077864
| 0.072072
| 0.574003
| 0.574003
| 0.574003
| 0.574003
| 0.574003
| 0.574003
| 0
| 0.033486
| 0.362342
| 3,091
| 84
| 123
| 36.797619
| 0.754947
| 0
| 0
| 0.60274
| 0
| 0
| 0.004862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068493
| false
| 0
| 0.041096
| 0.013699
| 0.164384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5d3821ab68704ffac0f126c20afbf8dae239de
| 1,018
|
py
|
Python
|
plot2d_artificial_dataset1_silvq.py
|
manome/python-silvq
|
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
|
[
"BSD-3-Clause"
] | null | null | null |
plot2d_artificial_dataset1_silvq.py
|
manome/python-silvq
|
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
|
[
"BSD-3-Clause"
] | null | null | null |
plot2d_artificial_dataset1_silvq.py
|
manome/python-silvq
|
b50d7486e970fbe9a5b66dd3fc5beb8b5de8ca2f
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- encoding: utf8 -*-
import numpy as np
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from lvq import SilvqModel
from lvq.utils import plot2d
def main():
# Load dataset
dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',')
x = dataset[:, :-1].astype('float64')
y = dataset[:, -1].astype('int64')
# Split dataset into training set and test set
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y)
# Generating model
model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls')
# Training the model
model.fit(x_train, y_train, epochs=30)
# Predict the response for test dataset
y_predict = model.predict(x_test)
# Evaluating the model
print('Accuracy: %.3f' %accuracy_score(y_test, y_predict))
# Plot prediction results and prototypes
plot2d(model, x, y, title='Artificial dataset1')
if __name__ == '__main__':
main()
| 30.848485
| 118
| 0.697446
| 147
| 1,018
| 4.632653
| 0.496599
| 0.022026
| 0.041116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024038
| 0.182711
| 1,018
| 32
| 119
| 31.8125
| 0.794471
| 0.210216
| 0
| 0
| 0
| 0
| 0.10566
| 0.03522
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.294118
| 0
| 0.352941
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5d69e17539392ab832fd82b04ce64e261c7b31
| 7,727
|
py
|
Python
|
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
|
ifr1m/hyper-kvasir
|
21cc366e78c0cb4e180a26a0e441d6c0d5171da9
|
[
"CC-BY-4.0"
] | 38
|
2019-12-20T13:17:09.000Z
|
2022-03-20T08:39:40.000Z
|
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
|
smaranjitghose/hyper-kvasir
|
b4815d151ef90cffa1bbc8fbf97cd091a20ce600
|
[
"CC-BY-4.0"
] | 2
|
2021-01-12T10:45:13.000Z
|
2021-01-28T06:14:45.000Z
|
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
|
smaranjitghose/hyper-kvasir
|
b4815d151ef90cffa1bbc8fbf97cd091a20ce600
|
[
"CC-BY-4.0"
] | 11
|
2020-03-24T17:58:04.000Z
|
2021-12-09T16:12:16.000Z
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
#Importing all required libraries
# In[ ]:
from __future__ import absolute_import, division, print_function, unicode_literals
# In[ ]:
#Checking for correct cuda and tf versions
from tensorflow.python.platform import build_info as tf_build_info
print(tf_build_info.cuda_version_number)
# 9.0 in v1.10.0
print(tf_build_info.cudnn_version_number)
# 7 in v1.10.0
# In[ ]:
import tensorflow as tf
import pathlib
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import os
import numpy as np
import matplotlib.pyplot as plt
# In[ ]:
AUTOTUNE = tf.data.experimental.AUTOTUNE
# In[ ]:
import IPython.display as display
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import os
# In[ ]:
tf.__version__
# In[ ]:
#Train and test data folder
train_data_dir = "\\hyper-kvasir\\splits\\all\\1"
test_data_dir = "\\hyper-kvasir\\splits\\all\\0"
# In[ ]:
train_data_dir = pathlib.Path(train_data_dir)
test_data_dir = pathlib.Path(test_data_dir)
# In[ ]:
#count how many images are there
image_count = len(list(train_data_dir.glob('*/*.jpg')))
image_count
# In[ ]:
total_train = len(list(train_data_dir.glob('*/*.jpg')))
total_val = len(list(test_data_dir.glob('*/*.jpg')))
# In[ ]:
#get the class names
CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name != "LICENSE.txt"])
CLASS_NAMES
# In[ ]:
#Define parameter for training
batch_size = 32
IMG_HEIGHT = 224
IMG_WIDTH = 224
STEPS_PER_EPOCH = np.ceil(image_count/batch_size)
epochs = 8
num_classes = len(CLASS_NAMES) #23
# In[ ]:
#We use image data generators to load the images and prepare them for the training
train_image_generator = ImageDataGenerator() # Generator for our training data
validation_image_generator = ImageDataGenerator() # Generator for our validation data
train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir),
batch_size=batch_size,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
classes = list(CLASS_NAMES),
class_mode='categorical'
)
val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir),
batch_size=batch_size,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
class_mode='categorical',
classes = list(CLASS_NAMES)
)
#get class order from directories
print(train_data_gen.class_indices.keys())
print(val_data_gen.class_indices.keys())
# In[ ]:
IMG_SIZE = 224
IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3)
# base model from the pre-trained model. Resnet 50 in this case
base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,
include_top=False,
weights='imagenet')
base_model.trainable = False
# In[ ]:
#add new classification layer
x = base_model.output
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dense(num_classes,activation='softmax')(x)
model = tf.keras.models.Model(inputs=base_model.input, outputs=x)
base_learning_rate = 0.001
model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate),
loss='categorical_crossentropy',
metrics=['accuracy'])
# In[ ]:
#fit the model
history = model.fit_generator(
train_data_gen,
steps_per_epoch=total_train // batch_size,
epochs=epochs,
validation_data=val_data_gen,
validation_steps=total_val // batch_size
)
# In[ ]:
#create training plots
history
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
# In[ ]:
base_model.trainable = True #now we want to train the base model
# In[ ]:
# How many layers are in the base model
print("Layers base model: ", len(base_model.layers))
# Fine tune from layer x
fine_tune_at = 100
# Freeze all the layers before the fine tune starting layer
for layer in base_model.layers[:fine_tune_at]:
layer.trainable = False
# In[ ]:
model.compile(loss='categorical_crossentropy',
optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10),
metrics=['accuracy'])
# In[ ]:
model.summary()
# In[ ]:
#Fine tune step
initial_epochs = 7
fine_tune_epochs = 3
total_epochs = initial_epochs + fine_tune_epochs
train_batches = total_train // batch_size
print(total_val // batch_size)
validation_batches = total_val // batch_size
history_fine = model.fit_generator(
train_data_gen,
steps_per_epoch=total_train // batch_size,
epochs=total_epochs,
initial_epoch = history.epoch[-1],
validation_data=val_data_gen,
validation_steps=total_val // batch_size
)
# In[ ]:
acc += history_fine.history['accuracy']
val_acc += history_fine.history['val_accuracy']
loss += history_fine.history['loss']
val_loss += history_fine.history['val_loss']
# In[ ]:
#Plot fine tuning
plt.figure(figsize=(8, 8))
plt.subplot(2, 1, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.ylim([0.8, 1])
plt.plot([initial_epochs-1,initial_epochs-1],
plt.ylim(), label='Start Fine Tuning')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(2, 1, 2)
plt.plot(loss, label='Training Loss')
plt.plot(val_loss, label='Validation Loss')
plt.ylim([0, 1.0])
plt.plot([initial_epochs-1,initial_epochs-1],
plt.ylim(), label='Start Fine Tuning')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.xlabel('epoch')
plt.show()
# In[ ]:
#model save and load
import os
# In[ ]:
#some time stamp
from datetime import datetime
# current date and time.
now = datetime.now()
timestamp = datetime.timestamp(now)
print("timestamp =", timestamp)
# In[ ]:
mode_filename = str(timestamp)+'mymodel.h5'
model.save(model_filename)
# In[ ]:
#To apply the model on new data
new_model = tf.keras.models.load_model(model_filename)
# Show the model architecture
new_model.summary()
# In[ ]:
from tensorflow.keras.preprocessing import image
#image directory containing images to test
img_dir="\\polyps"
for i,img in enumerate(os.listdir(img_dir)):
tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE))
tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32')
result_class=new_model.predict(tmpimage)
print(img,";",CLASS_NAMES[result_class.argmax(axis=-1)])
| 21.889518
| 102
| 0.666235
| 1,036
| 7,727
| 4.765444
| 0.237452
| 0.023699
| 0.017014
| 0.013774
| 0.342718
| 0.286409
| 0.209034
| 0.169739
| 0.169739
| 0.152724
| 0
| 0.013114
| 0.220396
| 7,727
| 352
| 103
| 21.951705
| 0.806441
| 0.14197
| 0
| 0.253247
| 0
| 0
| 0.098203
| 0.016443
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.116883
| 0
| 0.116883
| 0.058442
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5f32dd2cd9143c086d6a609f05220bf9f92fde
| 12,380
|
py
|
Python
|
test/functional/fantasygold_opcall.py
|
FantasyGold/FantasyGold-Core
|
afff8871e770045e468e2f536ede9db0dff889d5
|
[
"MIT"
] | 13
|
2018-04-30T21:43:40.000Z
|
2020-12-07T11:06:47.000Z
|
test/functional/fantasygold_opcall.py
|
donoel2/FantasyGold-Core
|
afff8871e770045e468e2f536ede9db0dff889d5
|
[
"MIT"
] | 4
|
2018-05-10T00:18:18.000Z
|
2019-07-08T23:12:54.000Z
|
test/functional/fantasygold_opcall.py
|
donoel2/FantasyGold-Core
|
afff8871e770045e468e2f536ede9db0dff889d5
|
[
"MIT"
] | 13
|
2018-04-30T17:41:54.000Z
|
2020-12-08T18:24:06.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.fantasygold import *
from test_framework.fantasygoldconfig import *
import sys
class OpCallTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [['-txindex=1']]*2
def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False):
# 61bc221a counter()
old_out = int(self.node.callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16)
inpt = make_vin(self.node, input_value)
tx = make_transaction(self.node, [inpt], outputs)
if should_throw:
try:
self.node.sendrawtransaction(tx)
assert(False)
except JSONRPCException as e:
print(e)
pass
else:
self.node.sendrawtransaction(tx)
self.node.generate(1)
sync_blocks(self.nodes)
for i in range(2):
# 61bc221a counter()
out = int(self.nodes[i].callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16)
assert(out-old_out == counter_should_increase_by)
def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by):
# 61bc221a counter()
old_out = int(self.node.callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16)
i = 0
unspents = self.node.listunspent()
while i < num_txs and len(unspents) > 0:
# Select as input a tx which has at least 5 fantasygold spendable
for tx_i in range(len(unspents)):
if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']:
break
else:
assert(False)
inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0)
tx = make_transaction(self.node, [inpt], outputs)
txid = self.node.sendrawtransaction(tx)
unspents = self.node.listunspent()
i += 1
self.node.generate(1)
sync_blocks(self.nodes)
for i in range(2):
# 61bc221a counter()
out = int(self.nodes[i].callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16)
assert(out-old_out == counter_should_increase_by)
# Deploy the testing contract
def create_contract_test(self):
"""
pragma solidity ^0.4.10;
contract Example {
uint public counter;
function inc() public {
counter += 1;
}
function getBalance() public {
return this.balance;
}
}
"""
contract_data = self.node.createcontract("6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029", 1000000)
self.contract_address = contract_data['address']
block_height = self.node.getblockcount()
self.node.generate(1)
sync_blocks(self.nodes)
for i in range(2):
assert(self.nodes[i].getblockcount() == block_height+1)
assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS)
# Sends a tx containing 2 op_call outputs calling inc()
def many_calls_in_same_tx_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE)
# Sends a normal raw op_call tx with a single output.
def normal_op_call_output_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", b"\xff\x7f", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE)
# Sends a tx containing 1 op_call output where txfee == gas_price*gas_limit.
def gas_equal_to_tx_fee_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE)
# Sends a tx containing 1 op_call output where txfee < gas_price*gas_limit.
def gas_exceeding_tx_fee_100001_1_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True)
# Sends a tx containing 1 op_call output where txfee < gas_price*gas_limit.
def gas_exceeding_tx_fee_100001_2_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True)
# Sends a tx containing 2 op_call outputs that has a combined gas_price*gas_limit exceeding the tx fee.
# This tx should be rejected since executing such a tx would be unable to pay for its potential execution costs in the same way as a tx with one output where txfee < gas_price*gas_limit.
def two_calls_in_same_tx_exceeding_tx_fee_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True)
# sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas limit calling inc()
def gas_limit_signedness_test(self):
outputs = []
gas_limit = b"\xff"
while len(gas_limit) < 20:
outputs.append(make_op_call_output(0, b"\x04", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000))
gas_limit += b"\xff"
# sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas limit calling inc()
def gas_limit_signedness_one_valid_test(self):
outputs = []
gas_limit = b"\xff"
outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff\x00", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE)
# sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas price calling inc()
def gas_price_signedness_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", b"\x01\x00", b"\xff\xff", bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000)
# sends a tx containing 1 op_call output with a possible negative gas limit and price calling inc()
def gas_limit_and_price_signedness_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff", b"\xff", bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff)
# Sends 100 valid op_call txs
def send_100_txs_test(self):
outputs = []
outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address)))
self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100)
def send_tx_with_value_test(self):
outputs = []
# d0e30db0 deposit()
outputs.append(make_op_call_output(100000000, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("d0e30db0"), bytes.fromhex(self.contract_address)))
self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE)
# 12065fe0 getBalance()
balance = int(self.node.callcontract(self.contract_address, "12065fe0")['executionResult']['output'], 16)
assert(balance == 100000000)
def run_test(self):
self.node = self.nodes[0]
connect_nodes(self.nodes[0], 1)
self.nodes[0].generate(200+COINBASE_MATURITY)
self.node.sendmany("", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)})
print("Creating contract")
self.create_contract_test()
print("Calling inc() in two outputs")
self.many_calls_in_same_tx_test()
print("Calling inc() in one output")
self.normal_op_call_output_test()
print("Calling inc() in one output with txfee equal to gas_limit*gas_price")
self.gas_equal_to_tx_fee_test()
print("Calling inc() in one output with txfee < gas_limit*gas_price")
self.gas_exceeding_tx_fee_100001_1_test()
print("Second test of inc() in one outputs with txfee < gas_limit*gas_price")
self.gas_exceeding_tx_fee_100001_2_test()
print("Second test of inc() in one output with txfee < gas_limit*gas_price")
self.two_calls_in_same_tx_exceeding_tx_fee_test()
print("Mining a block with 100 txs each with an output calling inc()")
self.send_100_txs_test()
print("Checking that the value of txs are correctly updated")
self.send_tx_with_value_test()
print("Checking gas limit signedness where one tx is valid")
self.gas_limit_signedness_one_valid_test()
print("Checking gas limit signedness")
self.gas_limit_signedness_test()
print("Checking gas price signedness")
self.gas_price_signedness_test()
print("Checking gas limit and gas price signedness")
self.gas_limit_and_price_signedness_test()
if __name__ == '__main__':
OpCallTest().main()
| 57.581395
| 689
| 0.714943
| 1,640
| 12,380
| 5.10061
| 0.152439
| 0.030126
| 0.025822
| 0.040167
| 0.634429
| 0.59474
| 0.564854
| 0.532337
| 0.496234
| 0.478542
| 0
| 0.102193
| 0.185864
| 12,380
| 215
| 690
| 57.581395
| 0.727751
| 0.13853
| 0
| 0.302013
| 0
| 0
| 0.159123
| 0.059588
| 0
| 0
| 0.002087
| 0
| 0.04698
| 1
| 0.114094
| false
| 0.006711
| 0.04698
| 0
| 0.167785
| 0.09396
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed5fdee808e9a889711f8e8007e05b2a81263072
| 1,883
|
py
|
Python
|
intake_sklearn/source.py
|
AlbertDeFusco/intake-sklearn
|
6cd0e11b26703712eb338032518e5c55b725c48f
|
[
"BSD-3-Clause"
] | 1
|
2022-02-23T09:00:38.000Z
|
2022-02-23T09:00:38.000Z
|
intake_sklearn/source.py
|
AlbertDeFusco/intake-sklearn
|
6cd0e11b26703712eb338032518e5c55b725c48f
|
[
"BSD-3-Clause"
] | 1
|
2019-10-14T12:25:26.000Z
|
2019-10-25T13:55:59.000Z
|
intake_sklearn/source.py
|
AlbertDeFusco/intake-sklearn
|
6cd0e11b26703712eb338032518e5c55b725c48f
|
[
"BSD-3-Clause"
] | 1
|
2021-07-28T17:49:36.000Z
|
2021-07-28T17:49:36.000Z
|
from intake.source.base import DataSource, Schema
import joblib
import fsspec
import sklearn
import re
from . import __version__
class SklearnModelSource(DataSource):
container = 'python'
name = 'sklearn'
version = __version__
partition_access = False
def __init__(self, urlpath, storage_options=None, metadata=None):
"""
Parameters
----------
urlpath: str, location of model pkl file
Either the absolute or relative path to the file or URL to be
opened. Some examples:
- ``{{ CATALOG_DIR }}/models/model.pkl``
- ``s3://some-bucket/models/model.pkl``
"""
self._urlpath = urlpath
self._storage_options = storage_options or {}
super().__init__(metadata=metadata)
def _load(self):
with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f:
return f.read()
def _get_schema(self):
as_binary = self._load()
s = re.search(b'_sklearn_versionq(.*\x00)((\d+\.)?(\d+\.)?(\*|\d+))q', as_binary)
if s:
sklearn_version = s.group(2).decode()
else:
sklearn_version = None
self._schema = Schema(
npartitions=1,
extra_metadata={
'sklearn_version':sklearn_version
}
)
return self._schema
def read(self):
self._load_metadata()
if not self.metadata['sklearn_version'] == sklearn.__version__:
msg = ('The model was created with Scikit-Learn version {} '
'but version {} has been installed in your current environment.'
).format(self.metadata['sklearn_version'], sklearn.__version__)
raise RuntimeError(msg)
with fsspec.open(self._urlpath, **self._storage_options) as f:
return joblib.load(f)
| 27.691176
| 89
| 0.591609
| 208
| 1,883
| 5.086538
| 0.461538
| 0.119093
| 0.05104
| 0.082231
| 0.20794
| 0.126654
| 0
| 0
| 0
| 0
| 0
| 0.003762
| 0.294211
| 1,883
| 67
| 90
| 28.104478
| 0.792325
| 0.123739
| 0
| 0
| 0
| 0
| 0.142676
| 0.032974
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.146341
| 0
| 0.439024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed607bad1d48fdf5da41de44d6ec206f2716afe4
| 4,915
|
py
|
Python
|
jedi/evaluate/dynamic.py
|
hatamov/jedi
|
10df0f933f931a8e0e70304d823f6df0dc3000bd
|
[
"MIT"
] | null | null | null |
jedi/evaluate/dynamic.py
|
hatamov/jedi
|
10df0f933f931a8e0e70304d823f6df0dc3000bd
|
[
"MIT"
] | null | null | null |
jedi/evaluate/dynamic.py
|
hatamov/jedi
|
10df0f933f931a8e0e70304d823f6df0dc3000bd
|
[
"MIT"
] | null | null | null |
"""
One of the really important features of |jedi| is to have an option to
understand code like this::
def foo(bar):
bar. # completion here
foo(1)
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
like ``foo('str')``, what would happen? Well, we'll just show both. Because
that's what a human would expect.
It works as follows:
- |Jedi| sees a param
- search for function calls named ``foo``
- execute these calls and check the input. This work with a ``ParamListener``.
"""
from itertools import chain
from jedi._compatibility import unicode
from jedi.parser import tree as pr
from jedi import settings
from jedi import debug
from jedi.evaluate.cache import memoize_default
from jedi.evaluate import imports
class ParamListener(object):
"""
This listener is used to get the params for a function.
"""
def __init__(self):
self.param_possibilities = []
def execute(self, params):
self.param_possibilities += params
@debug.increase_indent
def search_params(evaluator, param):
"""
A dynamic search for param values. If you try to complete a type:
>>> def func(foo):
... foo
>>> func(1)
>>> func("")
It is not known what the type ``foo`` without analysing the whole code. You
have to look for all calls to ``func`` to find out what ``foo`` possibly
is.
"""
if not settings.dynamic_params:
return []
debug.dbg('Dynamic param search for %s', param)
func = param.get_parent_until(pr.Function)
# Compare the param names.
names = [n for n in search_function_call(evaluator, func)
if n.value == param.name.value]
# Evaluate the ExecutedParams to types.
result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names))
debug.dbg('Dynamic param result %s', result)
return result
@memoize_default([], evaluator_is_first_arg=True)
def search_function_call(evaluator, func):
"""
Returns a list of param names.
"""
from jedi.evaluate import representation as er
def get_params_for_module(module):
"""
Returns the values of a param, or an empty array.
"""
@memoize_default([], evaluator_is_first_arg=True)
def get_posibilities(evaluator, module, func_name):
try:
names = module.used_names[func_name]
except KeyError:
return []
for name in names:
parent = name.parent
if pr.is_node(parent, 'trailer'):
parent = parent.parent
trailer = None
if pr.is_node(parent, 'power'):
for t in parent.children[1:]:
if t == '**':
break
if t.start_pos > name.start_pos and t.children[0] == '(':
trailer = t
break
if trailer is not None:
types = evaluator.goto_definition(name)
# We have to remove decorators, because they are not the
# "original" functions, this way we can easily compare.
# At the same time we also have to remove InstanceElements.
undec = []
for escope in types:
if escope.isinstance(er.Function, er.Instance) \
and escope.decorates is not None:
undec.append(escope.decorates)
elif isinstance(escope, er.InstanceElement):
undec.append(escope.var)
else:
undec.append(escope)
if er.wrap(evaluator, compare) in undec:
# Only if we have the correct function we execute
# it, otherwise just ignore it.
evaluator.eval_trailer(types, trailer)
return listener.param_possibilities
return get_posibilities(evaluator, module, func_name)
current_module = func.get_parent_until()
func_name = unicode(func.name)
compare = func
if func_name == '__init__':
cls = func.get_parent_scope()
if isinstance(cls, pr.Class):
func_name = unicode(cls.name)
compare = cls
# add the listener
listener = ParamListener()
func.listeners.add(listener)
try:
result = []
# This is like backtracking: Get the first possible result.
for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name):
result = get_params_for_module(mod)
if result:
break
finally:
# cleanup: remove the listener; important: should not stick.
func.listeners.remove(listener)
return result
| 33.435374
| 95
| 0.584334
| 593
| 4,915
| 4.738617
| 0.32715
| 0.022776
| 0.017082
| 0.015658
| 0.088968
| 0.055516
| 0.02847
| 0.02847
| 0
| 0
| 0
| 0.001221
| 0.333672
| 4,915
| 146
| 96
| 33.664384
| 0.856794
| 0.278739
| 0
| 0.139241
| 0
| 0
| 0.02127
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075949
| false
| 0
| 0.113924
| 0
| 0.278481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed614dd2f553e42b3e9876c261fcf0d4bfb4705a
| 2,245
|
py
|
Python
|
steamcheck/views.py
|
moird/linux-game-report
|
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
|
[
"MIT"
] | null | null | null |
steamcheck/views.py
|
moird/linux-game-report
|
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
|
[
"MIT"
] | null | null | null |
steamcheck/views.py
|
moird/linux-game-report
|
8c3204d857134b0685bc3c213cd9d9e9f9a5f2fd
|
[
"MIT"
] | null | null | null |
from steamcheck import app
from flask import jsonify, render_template
import os
import steamapi
import json
@app.route('/')
def index():
return render_template("index.html")
@app.route('/report/<name>')
def report(name=None):
"""
This will generate the report based on the users Steam ID. Returns JSON
:param name: Steam ID (either numerical ID or vanity url: steamcommunity.com/id/moird
:return: Json object that contains listing of all linux games and general information about them:
{
"steamuser": "real steam name",
"image": "steam user image url",
"games": [{'gametitle', {"linux":true}}]
"error": ""
}
"""
process_report = {}
try:
# See if we are running on heroku or not. Could probably set an environment variable for this as well.
if os.path.exists('/app/assets/GAMES.json'):
linux_game_list = '/app/assets/GAMES.json'
winehq_list = '/app/assets/winehq.json'
else:
linux_game_list = './assets/GAMES.json'
winehq_list = './assets/winehq.json'
with open(linux_game_list) as linux_game_list_raw:
linux_games = json.load(linux_game_list_raw)
with open(winehq_list) as winehq_raw:
winehq_apps = json.load(winehq_raw)
steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key'])
try:
user = steamapi.user.SteamUser(userid=int(name))
except ValueError:
# When we get further this as a fallback will be taken out, really don't want to do this.
user = steamapi.user.SteamUser(userurl=name)
process_report['steamuser'] = user.name
process_report['image'] = user.avatar
process_report['games'] = {}
for game in user.games:
linux = False
winehq = False
if str(game.id) in linux_games:
linux = True
if game.name in winehq_apps:
winehq = winehq_apps[game.name]
process_report['games'][game.id] = {"name": game.name, "linux": linux, "winehq":winehq}
except Exception as e:
process_report['error'] = e
return jsonify(**process_report)
| 35.634921
| 111
| 0.623163
| 288
| 2,245
| 4.736111
| 0.413194
| 0.066716
| 0.047654
| 0.026393
| 0.036657
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270379
| 2,245
| 63
| 112
| 35.634921
| 0.832723
| 0.260579
| 0
| 0.04878
| 0
| 0
| 0.116625
| 0.041563
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.121951
| 0.02439
| 0.219512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6236b34ab65a1e059ca45441d455cec6bd4e90
| 516
|
py
|
Python
|
validator/delphi_validator/run.py
|
benjaminysmith/covidcast-indicators
|
b1474cd68a1497166fefe4beffd4d5ff867b9a61
|
[
"MIT"
] | null | null | null |
validator/delphi_validator/run.py
|
benjaminysmith/covidcast-indicators
|
b1474cd68a1497166fefe4beffd4d5ff867b9a61
|
[
"MIT"
] | null | null | null |
validator/delphi_validator/run.py
|
benjaminysmith/covidcast-indicators
|
b1474cd68a1497166fefe4beffd4d5ff867b9a61
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Functions to call when running the tool.
This module should contain a function called `run_module`, that is executed
when the module is run with `python -m delphi_validator`.
"""
from delphi_utils import read_params
from .validate import Validator
def run_module():
"""Run the validator as a module."""
parent_params = read_params()
params = parent_params['validation']
validator = Validator(params)
validator.validate(parent_params["export_dir"]).print_and_exit()
| 28.666667
| 75
| 0.732558
| 71
| 516
| 5.15493
| 0.577465
| 0.098361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002309
| 0.160853
| 516
| 17
| 76
| 30.352941
| 0.842956
| 0.443798
| 0
| 0
| 0
| 0
| 0.072727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.285714
| 0
| 0.428571
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed62a0f3bd61d82280e96fe9d14711d5df97f622
| 1,876
|
py
|
Python
|
datasets/validation_folders.py
|
zenithfang/supervised_dispnet
|
f81dfccfdc944e015d8fae17e24b3e664bec14d6
|
[
"MIT"
] | 39
|
2020-01-17T18:33:42.000Z
|
2021-11-14T02:36:32.000Z
|
datasets/validation_folders.py
|
zenithfang/supervised_dispnet
|
f81dfccfdc944e015d8fae17e24b3e664bec14d6
|
[
"MIT"
] | 7
|
2020-01-10T14:52:44.000Z
|
2021-03-15T18:55:35.000Z
|
datasets/validation_folders.py
|
zenithfang/supervised_dispnet
|
f81dfccfdc944e015d8fae17e24b3e664bec14d6
|
[
"MIT"
] | 10
|
2020-03-01T11:35:50.000Z
|
2022-01-18T10:54:04.000Z
|
import torch.utils.data as data
import numpy as np
from imageio import imread
from path import Path
import pdb
def crawl_folders(folders_list):
imgs = []
depth = []
for folder in folders_list:
current_imgs = sorted(folder.files('*.jpg'))
current_depth = []
for img in current_imgs:
d = img.dirname()/(img.name[:-4] + '.npy')
assert(d.isfile()), "depth file {} not found".format(str(d))
depth.append(d)
imgs.extend(current_imgs)
depth.extend(current_depth)
return imgs, depth
def load_as_float(path):
return imread(path).astype(np.float32)
class ValidationSet(data.Dataset):
"""A sequence data loader where the files are arranged in this way:
root/scene_1/0000000.jpg
root/scene_1/0000000.npy
root/scene_1/0000001.jpg
root/scene_1/0000001.npy
..
root/scene_2/0000000.jpg
root/scene_2/0000000.npy
.
transform functions must take in a list a images and a numpy array which can be None
"""
def __init__(self, root, transform=None):
self.root = Path(root)
scene_list_path = self.root/'val.txt'
self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)]
self.imgs, self.depth = crawl_folders(self.scenes)
self.transform = transform
def __getitem__(self, index):
img = load_as_float(self.imgs[index])
depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace()
if self.transform is not None:
img, _, _ = self.transform([img], depth, None); #this depth is just used to fill the compose transform that is shared(no need for the result)
img = img[0]
return img, depth
def __len__(self):
return len(self.imgs)
| 32.912281
| 153
| 0.615672
| 257
| 1,876
| 4.346304
| 0.381323
| 0.056401
| 0.03581
| 0.030439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04068
| 0.279318
| 1,876
| 56
| 154
| 33.5
| 0.785503
| 0.220682
| 0
| 0
| 0
| 0
| 0.028302
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 1
| 0.138889
| false
| 0
| 0.138889
| 0.055556
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6310e1d8d83cf871e0d32a527ca7f1529b58ca
| 1,302
|
py
|
Python
|
pysaurus/database/special_properties.py
|
notoraptor/pysaurus
|
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
|
[
"MIT"
] | null | null | null |
pysaurus/database/special_properties.py
|
notoraptor/pysaurus
|
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
|
[
"MIT"
] | 4
|
2021-08-13T14:03:02.000Z
|
2022-03-05T16:02:45.000Z
|
pysaurus/database/special_properties.py
|
notoraptor/pysaurus
|
3bf5fe8c15e0e0e580e5edaea05b4a1298641367
|
[
"MIT"
] | null | null | null |
from abc import abstractmethod
from pysaurus.database.properties import PropType
from pysaurus.database.video import Video
class SpecialPropType(PropType):
__slots__ = ()
@abstractmethod
def get(self, video: Video):
raise NotImplementedError()
class PropError(SpecialPropType):
__slots__ = ()
def __init__(self):
super().__init__("<error>", "", True)
def get(self, video: Video):
return sorted(set(video.errors) | set(video.properties.get(self.name, ())))
class SpecialProperties:
properties = [PropError()]
@classmethod
def install(cls, database):
to_save = False
for expected in cls.properties:
if (
not database.has_prop_type(expected.name)
or database.get_prop_type(expected.name) != expected
):
database.remove_prop_type(expected.name)
database.add_prop_type(expected)
to_save = True
if to_save:
database.save()
@classmethod
def all_in(cls, video: Video):
return all(prop.name in video.properties for prop in cls.properties)
@classmethod
def set(cls, video: Video):
for prop in cls.properties:
video.properties[prop.name] = prop.get(video)
| 26.04
| 83
| 0.627496
| 144
| 1,302
| 5.479167
| 0.319444
| 0.050697
| 0.081115
| 0.076046
| 0.106464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.274962
| 1,302
| 49
| 84
| 26.571429
| 0.835805
| 0
| 0
| 0.194444
| 0
| 0
| 0.005376
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.083333
| 0.055556
| 0.472222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed65a740d0a6c0e521ed5a04db6b899535f0bcde
| 19,613
|
py
|
Python
|
patrole_tempest_plugin/rbac_utils.py
|
openstack/patrole
|
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
|
[
"Apache-2.0"
] | 14
|
2017-01-03T15:07:18.000Z
|
2020-09-17T18:07:39.000Z
|
patrole_tempest_plugin/rbac_utils.py
|
openstack/patrole
|
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
|
[
"Apache-2.0"
] | null | null | null |
patrole_tempest_plugin/rbac_utils.py
|
openstack/patrole
|
fa0ee135121a5e86301ad5ee1854b3a0bd70b69b
|
[
"Apache-2.0"
] | 12
|
2017-02-28T20:08:48.000Z
|
2020-12-30T09:31:51.000Z
|
# Copyright 2017 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import sys
import time
from oslo_log import log as logging
from oslo_utils import excutils
from tempest import config
from tempest.lib import exceptions as lib_exc
from patrole_tempest_plugin import rbac_exceptions
CONF = config.CONF
LOG = logging.getLogger(__name__)
class _ValidateListContext(object):
"""Context class responsible for validation of the list functions.
This class is used in ``override_role_and_validate_list`` function and
the result of a list function must be assigned to the ``ctx.resources``
variable.
Example::
with self.override_role_and_validate_list(...) as ctx:
ctx.resources = list_function()
"""
def __init__(self, admin_resources=None, admin_resource_id=None):
"""Constructor for ``ValidateListContext``.
Either ``admin_resources`` or ``admin_resource_id`` should be used,
not both.
:param list admin_resources: The list of resources received before
calling the ``override_role_and_validate_list`` function. To
validate will be used the ``_validate_len`` function.
:param UUID admin_resource_id: An ID of a resource created before
calling the ``override_role_and_validate_list`` function. To
validate will be used the ``_validate_resource`` function.
:raises RbacValidateListException: if both ``admin_resources`` and
``admin_resource_id`` are set or unset.
"""
self.resources = None
if admin_resources is not None and not admin_resource_id:
self._admin_len = len(admin_resources)
if not self._admin_len:
raise rbac_exceptions.RbacValidateListException(
reason="the list of admin resources cannot be empty")
self._validate_func = self._validate_len
elif admin_resource_id and admin_resources is None:
self._admin_resource_id = admin_resource_id
self._validate_func = self._validate_resource
else:
raise rbac_exceptions.RbacValidateListException(
reason="admin_resources and admin_resource_id are mutually "
"exclusive")
def _validate_len(self):
"""Validates that the number of resources is less than admin resources.
"""
if not len(self.resources):
raise rbac_exceptions.RbacEmptyResponseBody()
elif self._admin_len > len(self.resources):
raise rbac_exceptions.RbacPartialResponseBody(body=self.resources)
def _validate_resource(self):
"""Validates that the admin resource is present in the resources.
"""
for resource in self.resources:
if resource['id'] == self._admin_resource_id:
return
raise rbac_exceptions.RbacPartialResponseBody(body=self.resources)
def _validate(self):
"""Calls the proper validation function.
:raises RbacValidateListException: if the ``ctx.resources`` variable is
not assigned.
"""
if self.resources is None:
raise rbac_exceptions.RbacValidateListException(
reason="ctx.resources is not assigned")
self._validate_func()
class RbacUtilsMixin(object):
"""Utility mixin responsible for switching ``os_primary`` role.
Should be used as a mixin class alongside an instance of
:py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a
base RBAC class. Child classes should not use this mixin.
Example::
class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest):
@classmethod
def setup_clients(cls):
super(BaseRbacTest, cls).setup_clients()
cls.hosts_client = cls.os_primary.hosts_client
...
This class is responsible for overriding the value of the primary Tempest
credential's role (i.e. ``os_primary`` role). By doing so, it is possible
to seamlessly swap between admin credentials, needed for setup and clean
up, and primary credentials, needed to perform the API call which does
policy enforcement. The primary credentials always cycle between roles
defined by ``CONF.identity.admin_role`` and
``CONF.patrole.rbac_test_roles``.
"""
credentials = ['primary', 'admin']
def __init__(self, *args, **kwargs):
super(RbacUtilsMixin, self).__init__(*args, **kwargs)
# Shows if override_role was called.
self.__override_role_called = False
# Shows if exception raised during override_role.
self.__override_role_caught_exc = False
_admin_role_id = None
_rbac_role_ids = None
_project_id = None
_user_id = None
_role_map = None
_role_inferences_mapping = None
_orig_roles = []
admin_roles_client = None
@classmethod
def restore_roles(cls):
if cls._orig_roles:
LOG.info("Restoring original roles %s", cls._orig_roles)
roles_already_present = cls._list_and_clear_user_roles_on_project(
cls._orig_roles)
if not roles_already_present:
cls._create_user_role_on_project(cls._orig_roles)
@classmethod
def setup_clients(cls):
if CONF.identity_feature_enabled.api_v3:
admin_roles_client = cls.os_admin.roles_v3_client
else:
raise lib_exc.InvalidConfiguration(
"Patrole role overriding only supports v3 identity API.")
cls.admin_roles_client = admin_roles_client
cls._project_id = cls.os_primary.credentials.tenant_id
cls._user_id = cls.os_primary.credentials.user_id
cls._role_inferences_mapping = cls._prepare_role_inferences_mapping()
cls._init_roles()
# Store the user's original roles and rollback after testing.
roles = cls.admin_roles_client.list_user_roles_on_project(
cls._project_id, cls._user_id)['roles']
cls._orig_roles = [role['id'] for role in roles]
cls.addClassResourceCleanup(cls.restore_roles)
# Change default role to admin
cls._override_role(False)
super(RbacUtilsMixin, cls).setup_clients()
@classmethod
def _prepare_role_inferences_mapping(cls):
"""Preparing roles mapping to support role inferences
Making query to `list-all-role-inference-rules`_ keystone API
returns all inference rules, which makes it possible to prepare
roles mapping.
It walks recursively through the raw data::
{"role_inferences": [
{
"implies": [{"id": "3", "name": "reader"}],
"prior_role": {"id": "2", "name": "member"}
},
{
"implies": [{"id": "2", "name": "member"}],
"prior_role": {"id": "1", "name": "admin"}
}
]
}
and converts it to the mapping::
{
"2": ["3"], # "member": ["reader"],
"1": ["2", "3"] # "admin": ["member", "reader"]
}
.. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules
""" # noqa: E501
def process_roles(role_id, data):
roles = data.get(role_id, set())
for rid in roles.copy():
roles.update(process_roles(rid, data))
return roles
def convert_data(data):
res = {}
for rule in data:
prior_role = rule['prior_role']['id']
implies = {r['id'] for r in rule['implies']}
res[prior_role] = implies
return res
raw_data = cls.admin_roles_client.list_all_role_inference_rules()
data = convert_data(raw_data['role_inferences'])
res = {}
for role_id in data:
res[role_id] = process_roles(role_id, data)
return res
def get_all_needed_roles(self, roles):
"""Extending given roles with roles from mapping
Examples::
["admin"] >> ["admin", "member", "reader"]
["member"] >> ["member", "reader"]
["reader"] >> ["reader"]
["custom_role"] >> ["custom_role"]
:param roles: list of roles
:return: extended list of roles
"""
res = set(r for r in roles)
for role in res.copy():
role_id = self.__class__._role_map.get(role)
implied_roles = self.__class__._role_inferences_mapping.get(
role_id, set())
role_names = {self.__class__._role_map[rid]
for rid in implied_roles}
res.update(role_names)
LOG.debug('All needed roles: %s; Base roles: %s', res, roles)
return list(res)
@contextlib.contextmanager
def override_role(self):
"""Override the role used by ``os_primary`` Tempest credentials.
Temporarily change the role used by ``os_primary`` credentials to:
* ``[patrole] rbac_test_roles`` before test execution
* ``[identity] admin_role`` after test execution
Automatically switches to admin role after test execution.
:returns: None
.. warning::
This function can alter user roles for pre-provisioned credentials.
Work is underway to safely clean up after this function.
Example::
@rbac_rule_validation.action(service='test',
rules=['a:test:rule'])
def test_foo(self):
# Allocate test-level resources here.
with self.override_role():
# The role for `os_primary` has now been overridden. Within
# this block, call the API endpoint that enforces the
# expected policy specified by "rule" in the decorator.
self.foo_service.bar_api_call()
# The role is switched back to admin automatically. Note that
# if the API call above threw an exception, any code below this
# point in the test is not executed.
"""
self._set_override_role_called()
self._override_role(True)
try:
# Execute the test.
yield
finally:
# Check whether an exception was raised. If so, remember that
# for future validation.
exc = sys.exc_info()[0]
if exc is not None:
self._set_override_role_caught_exc()
# This code block is always executed, no matter the result of the
# test. Automatically switch back to the admin role for test clean
# up.
self._override_role(False)
@classmethod
def _override_role(cls, toggle_rbac_role=False):
"""Private helper for overriding ``os_primary`` Tempest credentials.
:param toggle_rbac_role: Boolean value that controls the role that
overrides default role of ``os_primary`` credentials.
* If True: role is set to ``[patrole] rbac_test_role``
* If False: role is set to ``[identity] admin_role``
"""
LOG.debug('Overriding role to: %s.', toggle_rbac_role)
roles_already_present = False
try:
target_roles = (cls._rbac_role_ids
if toggle_rbac_role else [cls._admin_role_id])
roles_already_present = cls._list_and_clear_user_roles_on_project(
target_roles)
# Do not override roles if `target_role` already exists.
if not roles_already_present:
cls._create_user_role_on_project(target_roles)
except Exception as exp:
with excutils.save_and_reraise_exception():
LOG.exception(exp)
finally:
auth_providers = cls.get_auth_providers()
for provider in auth_providers:
provider.clear_auth()
# Fernet tokens are not subsecond aware so sleep to ensure we are
# passing the second boundary before attempting to authenticate.
# Only sleep if a token revocation occurred as a result of role
# overriding. This will optimize test runtime in the case where
# ``[identity] admin_role`` == ``[patrole] rbac_test_roles``.
if not roles_already_present:
time.sleep(1)
for provider in auth_providers:
provider.set_auth()
@classmethod
def _init_roles(cls):
available_roles = cls.admin_roles_client.list_roles()['roles']
cls._role_map = {r['name']: r['id'] for r in available_roles}
LOG.debug('Available roles: %s', cls._role_map.keys())
rbac_role_ids = []
roles = CONF.patrole.rbac_test_roles
# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed
if CONF.patrole.rbac_test_role:
if not roles:
roles.append(CONF.patrole.rbac_test_role)
for role_name in roles:
rbac_role_ids.append(cls._role_map.get(role_name))
admin_role_id = cls._role_map.get(CONF.identity.admin_role)
if not all([admin_role_id, all(rbac_role_ids)]):
missing_roles = []
msg = ("Could not find `[patrole] rbac_test_roles` or "
"`[identity] admin_role`, both of which are required for "
"RBAC testing.")
if not admin_role_id:
missing_roles.append(CONF.identity.admin_role)
if not all(rbac_role_ids):
missing_roles += [role_name for role_name in roles
if role_name not in cls._role_map]
msg += " Following roles were not found: %s." % (
", ".join(missing_roles))
msg += " Available roles: %s." % ", ".join(cls._role_map)
raise rbac_exceptions.RbacResourceSetupFailed(msg)
cls._admin_role_id = admin_role_id
cls._rbac_role_ids = rbac_role_ids
# Adding backward mapping
cls._role_map.update({v: k for k, v in cls._role_map.items()})
@classmethod
def _create_user_role_on_project(cls, role_ids):
for role_id in role_ids:
cls.admin_roles_client.create_user_role_on_project(
cls._project_id, cls._user_id, role_id)
@classmethod
def _list_and_clear_user_roles_on_project(cls, role_ids):
roles = cls.admin_roles_client.list_user_roles_on_project(
cls._project_id, cls._user_id)['roles']
all_role_ids = [role['id'] for role in roles]
# NOTE(felipemonteiro): We do not use ``role_id in all_role_ids`` here
# to avoid over-permission errors: if the current list of roles on the
# project includes "admin" and "Member", and we are switching to the
# "Member" role, then we must delete the "admin" role. Thus, we only
# return early if the user's roles on the project are an exact match.
if set(role_ids) == set(all_role_ids):
return True
for role in roles:
cls.admin_roles_client.delete_role_from_user_on_project(
cls._project_id, cls._user_id, role['id'])
return False
@contextlib.contextmanager
def override_role_and_validate_list(self,
admin_resources=None,
admin_resource_id=None):
"""Call ``override_role`` and validate RBAC for a list API action.
List actions usually do soft authorization: partial or empty response
bodies are returned instead of exceptions. This helper validates
that unauthorized roles only return a subset of the available
resources.
Should only be used for validating list API actions.
:param test_obj: Instance of ``tempest.test.BaseTestCase``.
:param list admin_resources: The list of resources received before
calling the ``override_role_and_validate_list`` function.
:param UUID admin_resource_id: An ID of a resource created before
calling the ``override_role_and_validate_list`` function.
:return: py:class:`_ValidateListContext` object.
Example::
# the resource created by admin
admin_resource_id = (
self.ntp_client.create_dscp_marking_rule()
["dscp_marking_rule"]["id'])
with self.override_role_and_validate_list(
admin_resource_id=admin_resource_id) as ctx:
# the list of resources available for member role
ctx.resources = self.ntp_client.list_dscp_marking_rules(
policy_id=self.policy_id)["dscp_marking_rules"]
"""
ctx = _ValidateListContext(admin_resources, admin_resource_id)
with self.override_role():
yield ctx
ctx._validate()
@classmethod
def get_auth_providers(cls):
"""Returns list of auth_providers used within test.
Tests may redefine this method to include their own or third party
client auth_providers.
"""
return [cls.os_primary.auth_provider]
def _set_override_role_called(self):
"""Helper for tracking whether ``override_role`` was called."""
self.__override_role_called = True
def _set_override_role_caught_exc(self):
"""Helper for tracking whether exception was thrown inside
``override_role``.
"""
self.__override_role_caught_exc = True
def _validate_override_role_called(self):
"""Idempotently validate that ``override_role`` is called and reset
its value to False for sequential tests.
"""
was_called = self.__override_role_called
self.__override_role_called = False
return was_called
def _validate_override_role_caught_exc(self):
"""Idempotently validate that exception was caught inside
``override_role``, so that, by process of elimination, it can be
determined whether one was thrown outside (which is invalid).
"""
caught_exception = self.__override_role_caught_exc
self.__override_role_caught_exc = False
return caught_exception
def is_admin():
"""Verifies whether the current test role equals the admin role.
:returns: True if ``rbac_test_roles`` contain the admin role.
"""
roles = CONF.patrole.rbac_test_roles
# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed
if CONF.patrole.rbac_test_role:
roles.append(CONF.patrole.rbac_test_role)
roles = list(set(roles))
# TODO(felipemonteiro): Make this more robust via a context is admin
# lookup.
return CONF.identity.admin_role in roles
| 39.069721
| 120
| 0.629175
| 2,396
| 19,613
| 4.896077
| 0.18823
| 0.036826
| 0.020459
| 0.017646
| 0.29128
| 0.201603
| 0.151394
| 0.122411
| 0.105021
| 0.093087
| 0
| 0.001942
| 0.291133
| 19,613
| 501
| 121
| 39.147705
| 0.841833
| 0.431652
| 0
| 0.209091
| 0
| 0
| 0.054
| 0
| 0
| 0
| 0
| 0.003992
| 0
| 1
| 0.104545
| false
| 0
| 0.036364
| 0
| 0.240909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed66f473c8ee9e1a4cbf088bc3dc94834ee24ff9
| 6,029
|
py
|
Python
|
core/my_widgets/drug_picker.py
|
kimera1999/pmpktn
|
5307b6684a08bac4b88617f097017b5ea4192ab2
|
[
"MIT"
] | null | null | null |
core/my_widgets/drug_picker.py
|
kimera1999/pmpktn
|
5307b6684a08bac4b88617f097017b5ea4192ab2
|
[
"MIT"
] | null | null | null |
core/my_widgets/drug_picker.py
|
kimera1999/pmpktn
|
5307b6684a08bac4b88617f097017b5ea4192ab2
|
[
"MIT"
] | 1
|
2020-05-16T14:28:59.000Z
|
2020-05-16T14:28:59.000Z
|
from initialize import *
from core.db.db_func import query_linedrug_list
import os
import wx
class DrugPopup(wx.ComboPopup):
def __init__(self, parent):
super().__init__()
self.lc = None
self.mv = parent.mv
self.init_d_l = query_linedrug_list(self.mv.sess).all()
self.d_l = []
def Create(self, parent):
self.lc = wx.ListCtrl(
parent,
style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)
self.lc.AppendColumn('Thuแปc', width=200)
self.lc.AppendColumn('Thร nh phแบงn', width=150)
self.lc.AppendColumn('Sแป lฦฐแปฃng')
self.lc.AppendColumn('ฤฦกn giรก')
self.lc.AppendColumn('Cรกch dรนng', width=100)
self.lc.Bind(wx.EVT_MOTION, self.OnMotion)
self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Update()
return True
def Init(self):
self.value = -1
self.curitem = -1
def GetControl(self):
return self.lc
def SetStringValue(self, val):
idx = self.lc.FindItem(-1, val)
if idx != wx.NOT_FOUND:
self.lc.Select(idx)
def GetStringValue(self):
if self.value >= 0:
return self.lc.GetItemText(self.value, col=0)
return ""
def GetAdjustedSize(self, minWidth, prefHeight, maxHeight):
return super().GetAdjustedSize(*popup_size)
def Update(self, s=''):
self.lc.DeleteAllItems()
self.d_l = list(filter(
lambda x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(),
self.init_d_l))
for index, item in enumerate(self.d_l):
self.lc.Append(
[item.name, item.element, item.quantity, item.sale_price, item.usage])
if item.quantity <= user_setting["so_luong_thuoc_toi_thieu_de_bao_dong_do"]:
self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255))
def OnMotion(self, e):
item, flags = self.lc.HitTest(e.GetPosition())
if item >= 0:
self.lc.Select(item)
self.curitem = item
def OnLeftDown(self, e):
try:
self.value = self.curitem
self.ComboCtrl.drugWH = self.d_l[self.value]
self.Dismiss()
self.ComboCtrl.SelectAll()
self.ComboCtrl.SetInsertionPointEnd()
except IndexError:
self.Dismiss()
def OnPopup(self):
self.Init()
self.Update(self.ComboCtrl.Value)
if self.lc.ItemCount > 0:
if self.curitem < (self.lc.ItemCount - 1):
self.curitem += 1
self.lc.Select(self.curitem)
self.lc.EnsureVisible(self.curitem)
def KeyDown(self):
if self.lc.ItemCount > 0:
if self.curitem < (self.lc.ItemCount - 1):
self.curitem += 1
self.lc.Select(self.curitem)
self.lc.EnsureVisible(self.curitem)
def KeyUp(self):
if self.lc.ItemCount > 0:
if self.curitem > 0:
self.curitem -= 1
self.lc.Select(self.curitem)
self.lc.EnsureVisible(self.curitem)
else:
self.KeyESC()
def KeyESC(self):
a = self.ComboCtrl.Value
self.Dismiss()
self.ComboCtrl.ChangeValue(a)
self.ComboCtrl.SetInsertionPointEnd()
def KeyReturn(self):
self.OnLeftDown(None)
def onKeyPress(self, e):
c = e.GetKeyCode()
if c == wx.WXK_DOWN:
self.KeyDown()
elif c == wx.WXK_UP:
self.KeyUp()
elif c == wx.WXK_ESCAPE:
self.KeyESC()
elif c == wx.WXK_RETURN:
self.KeyReturn()
class DrugPicker(wx.ComboCtrl):
def __init__(self, parent):
super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER)
self.mv = parent.mv
self.drug_popup = DrugPopup(self)
self.SetPopupControl(self.drug_popup)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_TEXT, self.onTextChange)
self.SetHint("Nhแบฅn Enter ฤแป search thuแปc")
self._drugWH = None
self.EnablePopupAnimation(enable=False)
@property
def drugWH(self):
return self._drugWH
@drugWH.setter
def drugWH(self, dwh):
self._drugWH = dwh
pg = self.Parent
if dwh:
pg.usage_unit.Label = dwh.usage_unit + " "
pg.sale_unit.Label = dwh.sale_unit + " "
else:
self.ChangeValue('')
pg.dosage_per.ChangeValue('')
pg.usage_unit.Label = '{ฤฦกn vแป} '
pg.times.ChangeValue("")
pg.quantity.ChangeValue("")
pg.sale_unit.Label = '{ฤฦกn vแป} '
pg.usage.ChangeValue("")
def onKeyPress(self, e):
if os.name == "posix":
if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]:
if not self.IsPopupShown():
self.Popup()
else:
e.Skip()
else:
if e.GetKeyCode() not in [wx.WXK_RETURN,
wx.WXK_UP,
wx.WXK_DOWN,
wx.WXK_ESCAPE]:
if self.IsPopupShown():
a = self.Value
self.Dismiss()
self.ChangeValue(a)
self.SetInsertionPointEnd()
e.Skip()
def onTextChange(self, e):
if os.name == "nt":
if e.String == "":
self.Clear()
elif len(e.String) >= 1:
if not self.IsPopupShown():
self.Popup()
self.SetInsertionPointEnd()
if os.name == "posix":
if e.String == "":
self.Clear()
def Clear(self):
self.drugWH = None
def refreshPopup(self):
self.drug_popup.init_d_l = query_linedrug_list(self.mv.sess).all()
| 31.238342
| 96
| 0.543208
| 705
| 6,029
| 4.52766
| 0.255319
| 0.056391
| 0.028195
| 0.026629
| 0.280388
| 0.204887
| 0.148496
| 0.148496
| 0.127193
| 0.114975
| 0
| 0.008551
| 0.340521
| 6,029
| 192
| 97
| 31.401042
| 0.794266
| 0
| 0
| 0.272727
| 0
| 0
| 0.022558
| 0.006469
| 0
| 0
| 0
| 0
| 0
| 1
| 0.139394
| false
| 0
| 0.024242
| 0.018182
| 0.212121
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6710b9dafd0dadb8b0c6608676f1c2e79ad2c8
| 615
|
py
|
Python
|
em Python/Roteiro4/Roteiro4__grafos.py
|
GuilhermeEsdras/Grafos
|
b6556c3d679496d576f65b798a1a584cd73e40f4
|
[
"MIT"
] | null | null | null |
em Python/Roteiro4/Roteiro4__grafos.py
|
GuilhermeEsdras/Grafos
|
b6556c3d679496d576f65b798a1a584cd73e40f4
|
[
"MIT"
] | null | null | null |
em Python/Roteiro4/Roteiro4__grafos.py
|
GuilhermeEsdras/Grafos
|
b6556c3d679496d576f65b798a1a584cd73e40f4
|
[
"MIT"
] | null | null | null |
from Roteiro4.Roteiro4__funcoes import Grafo
class Grafos:
# Grafo da Paraรญba
paraiba = Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z'])
for aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']:
paraiba.adicionaAresta(aresta)
# --- #
# Grafo Completo
grafo_completo = Grafo(['J', 'C', 'E', 'P'])
for aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']:
grafo_completo.adicionaAresta(aresta)
# --- #
# K3
k3 = Grafo(['A', 'B', 'C'])
for aresta in ['A-B', 'B-C', 'C-A']:
k3.adicionaAresta(aresta)
# --- #
| 23.653846
| 82
| 0.461789
| 90
| 615
| 3.111111
| 0.277778
| 0.035714
| 0.117857
| 0.057143
| 0.192857
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.276423
| 615
| 25
| 83
| 24.6
| 0.617978
| 0.078049
| 0
| 0
| 0
| 0
| 0.122302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed67b3786dc5aa973280b427220b99a230def591
| 464
|
py
|
Python
|
flask/app.py
|
yatsu/react-flask-graphql-example
|
18a38b7602c81a85a3cc38c74440ce34d63fc32a
|
[
"MIT"
] | 21
|
2017-06-24T15:29:30.000Z
|
2021-03-03T06:58:41.000Z
|
flask/app.py
|
yatsu/react-flask-graphql-example
|
18a38b7602c81a85a3cc38c74440ce34d63fc32a
|
[
"MIT"
] | null | null | null |
flask/app.py
|
yatsu/react-flask-graphql-example
|
18a38b7602c81a85a3cc38c74440ce34d63fc32a
|
[
"MIT"
] | 6
|
2018-01-15T06:36:11.000Z
|
2022-03-18T07:57:39.000Z
|
from flask import Flask
from flask_cors import CORS
from flask_graphql import GraphQLView
from schema import Schema
def create_app(**kwargs):
app = Flask(__name__)
app.debug = True
app.add_url_rule(
'/graphql',
view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs)
)
return app
if __name__ == '__main__':
app = create_app(graphiql=True)
CORS(app, resources={r'/graphql': {'origins': '*'}})
app.run()
| 22.095238
| 73
| 0.668103
| 60
| 464
| 4.833333
| 0.466667
| 0.093103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 464
| 20
| 74
| 23.2
| 0.788043
| 0
| 0
| 0
| 0
| 0
| 0.084052
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.25
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed69c7e1252a3ec3f75d6d65d353de14affd6d0c
| 1,628
|
py
|
Python
|
bluesky/tests/utils.py
|
AbbyGi/bluesky
|
759f9c55dce97dc47513cca749a69dd861bdf58d
|
[
"BSD-3-Clause"
] | 43
|
2015-08-04T20:13:41.000Z
|
2019-04-12T17:21:36.000Z
|
bluesky/tests/utils.py
|
AbbyGi/bluesky
|
759f9c55dce97dc47513cca749a69dd861bdf58d
|
[
"BSD-3-Clause"
] | 966
|
2015-07-29T16:43:21.000Z
|
2019-05-09T21:02:28.000Z
|
bluesky/tests/utils.py
|
AbbyGi/bluesky
|
759f9c55dce97dc47513cca749a69dd861bdf58d
|
[
"BSD-3-Clause"
] | 48
|
2019-05-15T18:01:06.000Z
|
2022-03-03T18:53:43.000Z
|
from collections import defaultdict
import contextlib
import tempfile
import sys
import threading
import asyncio
@contextlib.contextmanager
def _print_redirect():
old_stdout = sys.stdout
try:
fout = tempfile.TemporaryFile(mode="w+", encoding="utf-8")
sys.stdout = fout
yield fout
finally:
sys.stdout = old_stdout
class MsgCollector:
def __init__(self, msg_hook=None):
self.msgs = []
self.msg_hook = msg_hook
def __call__(self, msg):
self.msgs.append(msg)
if self.msg_hook:
self.msg_hook(msg)
class DocCollector:
def __init__(self):
self.start = []
self.stop = {}
self.descriptor = defaultdict(list)
self.event = {}
def insert(self, name, doc):
if name == "start":
self.start.append(doc)
elif name == "stop":
self.stop[doc["run_start"]] = doc
elif name == "descriptor":
self.descriptor[doc["run_start"]].append(doc)
self.event[doc["uid"]] = []
elif name == 'bulk_events':
for k, v in doc.items():
self.event[k].extend(v)
else:
self.event[doc["descriptor"]].append(doc)
def _fabricate_asycio_event(loop):
th_ev = threading.Event()
aio_event = None
def really_make_the_event():
nonlocal aio_event
aio_event = asyncio.Event()
th_ev.set()
h = loop.call_soon_threadsafe(really_make_the_event)
if not th_ev.wait(0.1):
h.cancel()
raise Exception("failed to make asyncio event")
return aio_event
| 23.941176
| 66
| 0.594595
| 200
| 1,628
| 4.63
| 0.405
| 0.037797
| 0.047516
| 0.030238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002604
| 0.292383
| 1,628
| 67
| 67
| 24.298507
| 0.801215
| 0
| 0
| 0
| 0
| 0
| 0.058968
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12963
| false
| 0
| 0.111111
| 0
| 0.296296
| 0.018519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed69da856e9dae34d6443933a8a9df258e7f8e95
| 1,116
|
py
|
Python
|
cli/check_json.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | null | null | null |
cli/check_json.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | null | null | null |
cli/check_json.py
|
MJJojo97/openslides-backend
|
af0d1edb0070e352d46f285a1ba0bbe3702d49ae
|
[
"MIT"
] | null | null | null |
import json
import sys
from openslides_backend.models.checker import Checker, CheckException
def main() -> int:
files = sys.argv[1:]
if not files:
print("No files specified.")
return 1
possible_modes = tuple(f"--{mode}" for mode in Checker.modes)
modes = tuple(mode[2:] for mode in possible_modes if mode in files)
if len(modes) == 0:
mode = "all"
elif len(modes) > 1:
print(f"You can only choose one mode of {', '.join(possible_modes)}.")
exit(1)
else:
mode = modes[0]
if len(modes):
files = [x for x in files if x not in possible_modes]
failed = False
for f in files:
with open(f) as data:
try:
Checker(
json.load(data),
mode=mode,
).run_check()
except CheckException as e:
print(f"Check for {f} failed:\n", e)
failed = True
else:
print(f"Check for {f} successful.")
return 1 if failed else 0
if __name__ == "__main__":
sys.exit(main())
| 24.8
| 78
| 0.53405
| 147
| 1,116
| 3.959184
| 0.401361
| 0.089347
| 0.030928
| 0.04811
| 0.051546
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012605
| 0.360215
| 1,116
| 44
| 79
| 25.363636
| 0.802521
| 0
| 0
| 0.057143
| 0
| 0
| 0.130824
| 0.021505
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.085714
| 0
| 0.171429
| 0.114286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6b5de7ad69456fafac8a04559f11ef56300d5e
| 24,607
|
bzl
|
Python
|
web/repositories.bzl
|
Ubehebe/rules_webtesting
|
c231866a3bccc0f27b31050a57dc2b4a700ad64e
|
[
"Apache-2.0"
] | null | null | null |
web/repositories.bzl
|
Ubehebe/rules_webtesting
|
c231866a3bccc0f27b31050a57dc2b4a700ad64e
|
[
"Apache-2.0"
] | null | null | null |
web/repositories.bzl
|
Ubehebe/rules_webtesting
|
c231866a3bccc0f27b31050a57dc2b4a700ad64e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines external repositories needed by rules_webtesting."""
load("//web/internal:platform_http_file.bzl", "platform_http_file")
load("@bazel_gazelle//:deps.bzl", "go_repository")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:java.bzl", "java_import_external")
# NOTE: URLs are mirrored by an asynchronous review process. They must
# be greppable for that to happen. It's OK to submit broken mirror
# URLs, so long as they're correctly formatted. Bazel's downloader
# has fast failover.
def web_test_repositories(**kwargs):
"""Defines external repositories required by Webtesting Rules.
This function exists for other Bazel projects to call from their WORKSPACE
file when depending on rules_webtesting using http_archive. This function
makes it easy to import these transitive dependencies into the parent
workspace. This will check to see if a repository has been previously defined
before defining a new repository.
Alternatively, individual dependencies may be excluded with an
"omit_" + name parameter. This is useful for users who want to be rigorous
about declaring their own direct dependencies, or when another Bazel project
is depended upon (e.g. rules_closure) that defines the same dependencies as
this one (e.g. com_google_guava.) Alternatively, a whitelist model may be
used by calling the individual functions this method references.
Please note that while these dependencies are defined, they are not actually
downloaded, unless a target is built that depends on them.
Args:
**kwargs: omit_... parameters used to prevent importing specific
dependencies.
"""
if should_create_repository("bazel_skylib", kwargs):
bazel_skylib()
if should_create_repository("com_github_blang_semver", kwargs):
com_github_blang_semver()
if should_create_repository("com_github_gorilla_context", kwargs):
com_github_gorilla_context()
if should_create_repository("com_github_gorilla_mux", kwargs):
com_github_gorilla_mux()
if should_create_repository("com_github_tebeka_selenium", kwargs):
com_github_tebeka_selenium()
if should_create_repository("com_github_urllib3", kwargs):
com_github_urllib3()
if should_create_repository("com_google_code_findbugs_jsr305", kwargs):
com_google_code_findbugs_jsr305()
if should_create_repository("com_google_code_gson", kwargs):
com_google_code_gson()
if should_create_repository(
"com_google_errorprone_error_prone_annotations",
kwargs,
):
com_google_errorprone_error_prone_annotations()
if should_create_repository("com_google_guava", kwargs):
com_google_guava()
if should_create_repository("com_squareup_okhttp3_okhttp", kwargs):
com_squareup_okhttp3_okhttp()
if should_create_repository("com_squareup_okio", kwargs):
com_squareup_okio()
if should_create_repository("commons_codec", kwargs):
commons_codec()
if should_create_repository("commons_logging", kwargs):
commons_logging()
if should_create_repository("junit", kwargs):
junit()
if should_create_repository("net_bytebuddy", kwargs):
net_bytebuddy()
if should_create_repository("org_apache_commons_exec", kwargs):
org_apache_commons_exec()
if should_create_repository("org_apache_httpcomponents_httpclient", kwargs):
org_apache_httpcomponents_httpclient()
if should_create_repository("org_apache_httpcomponents_httpcore", kwargs):
org_apache_httpcomponents_httpcore()
if should_create_repository("org_hamcrest_core", kwargs):
org_hamcrest_core()
if should_create_repository("org_jetbrains_kotlin_stdlib", kwargs):
org_jetbrains_kotlin_stdlib()
if should_create_repository("org_json", kwargs):
org_json()
if should_create_repository("org_seleniumhq_py", kwargs):
org_seleniumhq_py()
if should_create_repository("org_seleniumhq_selenium_api", kwargs):
org_seleniumhq_selenium_api()
if should_create_repository("org_seleniumhq_selenium_remote_driver", kwargs):
org_seleniumhq_selenium_remote_driver()
if kwargs.keys():
print("The following parameters are unknown: " + str(kwargs.keys()))
def should_create_repository(name, args):
"""Returns whether the name repository should be created.
This allows creation of a repository to be disabled by either an
"omit_" _+ name parameter or by previously defining a rule for the repository.
The args dict will be mutated to remove "omit_" + name.
Args:
name: The name of the repository that should be checked.
args: A dictionary that contains "omit_...": bool pairs.
Returns:
boolean indicating whether the repository should be created.
"""
key = "omit_" + name
if key in args:
val = args.pop(key)
if val:
return False
if native.existing_rule(name):
return False
return True
def browser_repositories(firefox = False, chromium = False, sauce = False):
"""Sets up repositories for browsers defined in //browsers/....
This should only be used on an experimental basis; projects should define
their own browsers.
Args:
firefox: Configure repositories for //browsers:firefox-native.
chromium: Configure repositories for //browsers:chromium-native.
sauce: Configure repositories for //browser/sauce:chrome-win10.
"""
if chromium:
org_chromium_chromedriver()
org_chromium_chromium()
if firefox:
org_mozilla_firefox()
org_mozilla_geckodriver()
if sauce:
com_saucelabs_sauce_connect()
def bazel_skylib():
http_archive(
name = "bazel_skylib",
sha256 = "",
strip_prefix = "bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz",
"https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz",
],
)
def com_github_blang_semver():
go_repository(
name = "com_github_blang_semver",
importpath = "github.com/blang/semver",
sha256 = "3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57",
strip_prefix = "semver-3.5.1",
urls = [
"https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz",
"https://github.com/blang/semver/archive/v3.5.1.tar.gz",
],
)
def com_github_gorilla_context():
go_repository(
name = "com_github_gorilla_context",
importpath = "github.com/gorilla/context",
sha256 = "2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03",
strip_prefix = "context-1.1.1",
urls = [
"https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz",
"https://github.com/gorilla/context/archive/v1.1.1.tar.gz",
],
)
def com_github_gorilla_mux():
go_repository(
name = "com_github_gorilla_mux",
importpath = "github.com/gorilla/mux",
sha256 = "0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3",
strip_prefix = "mux-1.6.2",
urls = [
"https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz",
"https://github.com/gorilla/mux/archive/v1.6.2.tar.gz",
],
)
def com_github_tebeka_selenium():
go_repository(
name = "com_github_tebeka_selenium",
importpath = "github.com/tebeka/selenium",
sha256 = "c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd",
strip_prefix = "selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a",
urls = [
"https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz",
"https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz",
],
)
def com_github_urllib3():
http_archive(
name = "com_github_urllib3",
build_file = str(Label("//build_files:com_github_urllib3.BUILD")),
sha256 = "a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf",
strip_prefix = "urllib3-1.23",
urls = [
"https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz",
],
)
def com_google_code_findbugs_jsr305():
java_import_external(
name = "com_google_code_findbugs_jsr305",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
"https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar",
],
jar_sha256 =
"766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7",
licenses = ["notice"], # BSD 3-clause
)
def com_google_code_gson():
java_import_external(
name = "com_google_code_gson",
jar_sha256 =
"233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar",
"https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar",
],
licenses = ["notice"], # The Apache Software License, Version 2.0
)
def com_google_errorprone_error_prone_annotations():
java_import_external(
name = "com_google_errorprone_error_prone_annotations",
jar_sha256 =
"10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar",
"https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar",
],
licenses = ["notice"], # Apache 2.0
)
def com_google_guava():
java_import_external(
name = "com_google_guava",
jar_sha256 = "a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar",
"https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar",
],
licenses = ["notice"], # Apache 2.0
exports = [
"@com_google_code_findbugs_jsr305",
"@com_google_errorprone_error_prone_annotations",
],
)
def com_saucelabs_sauce_connect():
platform_http_file(
name = "com_saucelabs_sauce_connect",
licenses = ["by_exception_only"], # SauceLabs EULA
amd64_sha256 = "dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52",
amd64_urls = [
"https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz",
],
macos_sha256 = "920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66",
macos_urls = [
"https://saucelabs.com/downloads/sc-4.5.1-osx.zip",
],
windows_sha256 =
"ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5",
windows_urls = [
"https://saucelabs.com/downloads/sc-4.4.12-win32.zip",
],
)
def com_squareup_okhttp3_okhttp():
java_import_external(
name = "com_squareup_okhttp3_okhttp",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar",
"https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar",
],
jar_sha256 =
"a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e",
licenses = ["notice"], # Apache 2.0
deps = [
"@com_squareup_okio",
"@com_google_code_findbugs_jsr305",
],
)
def com_squareup_okio():
java_import_external(
name = "com_squareup_okio",
jar_sha256 = "79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar",
"https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar",
],
licenses = ["notice"], # Apache 2.0
deps = [
"@com_google_code_findbugs_jsr305",
"@org_jetbrains_kotlin_stdlib",
],
)
def commons_codec():
java_import_external(
name = "commons_codec",
jar_sha256 =
"e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar",
"https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar",
],
licenses = ["notice"], # Apache License, Version 2.0
)
def commons_logging():
java_import_external(
name = "commons_logging",
jar_sha256 =
"daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar",
"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar",
],
licenses = ["notice"], # The Apache Software License, Version 2.0
)
def junit():
java_import_external(
name = "junit",
jar_sha256 =
"59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar",
"https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar",
],
licenses = ["reciprocal"], # Eclipse Public License 1.0
testonly_ = 1,
deps = ["@org_hamcrest_core"],
)
def net_bytebuddy():
java_import_external(
name = "net_bytebuddy",
jar_sha256 = "4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar",
"https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar",
],
licenses = ["notice"], # Apache 2.0
deps = ["@com_google_code_findbugs_jsr305"],
)
def org_apache_commons_exec():
java_import_external(
name = "org_apache_commons_exec",
jar_sha256 =
"cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar",
"https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar",
],
licenses = ["notice"], # Apache License, Version 2.0
)
def org_apache_httpcomponents_httpclient():
java_import_external(
name = "org_apache_httpcomponents_httpclient",
jar_sha256 =
"c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar",
"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar",
],
licenses = ["notice"], # Apache License, Version 2.0
deps = [
"@org_apache_httpcomponents_httpcore",
"@commons_logging",
"@commons_codec",
],
)
def org_apache_httpcomponents_httpcore():
java_import_external(
name = "org_apache_httpcomponents_httpcore",
jar_sha256 =
"1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar",
"https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar",
],
licenses = ["notice"], # Apache License, Version 2.0
)
def org_chromium_chromedriver():
platform_http_file(
name = "org_chromium_chromedriver",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
amd64_sha256 =
"71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7",
amd64_urls = [
"https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip",
],
macos_sha256 =
"fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae",
macos_urls = [
"https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip",
],
windows_sha256 =
"a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e",
windows_urls = [
"https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip",
],
)
def org_chromium_chromium():
platform_http_file(
name = "org_chromium_chromium",
licenses = ["notice"], # BSD 3-clause (maybe more?)
amd64_sha256 =
"6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8",
amd64_urls = [
"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip",
],
macos_sha256 =
"084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32",
macos_urls = [
"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip",
],
windows_sha256 =
"d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c",
windows_urls = [
"https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip",
],
)
def org_hamcrest_core():
java_import_external(
name = "org_hamcrest_core",
jar_sha256 =
"66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar",
"https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar",
],
licenses = ["notice"], # New BSD License
testonly_ = 1,
)
def org_jetbrains_kotlin_stdlib():
java_import_external(
name = "org_jetbrains_kotlin_stdlib",
jar_sha256 = "62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar",
"https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar",
],
licenses = ["notice"], # The Apache Software License, Version 2.0
)
def org_json():
java_import_external(
name = "org_json",
jar_sha256 = "518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar",
"https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar",
],
licenses = ["notice"], # MIT-style license
)
def org_mozilla_firefox():
platform_http_file(
name = "org_mozilla_firefox",
licenses = ["reciprocal"], # MPL 2.0
amd64_sha256 =
"3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de",
amd64_urls = [
"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2",
"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2",
],
macos_sha256 =
"bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5",
macos_urls = [
"https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg",
"https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg",
],
)
def org_mozilla_geckodriver():
platform_http_file(
name = "org_mozilla_geckodriver",
licenses = ["reciprocal"], # MPL 2.0
amd64_sha256 =
"c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6",
amd64_urls = [
"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz",
"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz",
],
macos_sha256 =
"ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce",
macos_urls = [
"https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz",
"https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz",
],
)
def org_seleniumhq_py():
http_archive(
name = "org_seleniumhq_py",
build_file = str(Label("//build_files:org_seleniumhq_py.BUILD")),
sha256 = "f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40",
strip_prefix = "selenium-3.14.0",
urls = [
"https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz",
],
)
def org_seleniumhq_selenium_api():
java_import_external(
name = "org_seleniumhq_selenium_api",
jar_sha256 = "1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar",
"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar",
],
licenses = ["notice"], # The Apache Software License, Version 2.0
testonly_ = 1,
)
def org_seleniumhq_selenium_remote_driver():
java_import_external(
name = "org_seleniumhq_selenium_remote_driver",
jar_sha256 =
"284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2",
jar_urls = [
"https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar",
"https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar",
],
licenses = ["notice"], # The Apache Software License, Version 2.0
testonly_ = 1,
deps = [
"@com_google_code_gson",
"@com_google_guava",
"@net_bytebuddy",
"@com_squareup_okhttp3_okhttp",
"@com_squareup_okio",
"@commons_codec",
"@commons_logging",
"@org_apache_commons_exec",
"@org_apache_httpcomponents_httpclient",
"@org_apache_httpcomponents_httpcore",
"@org_seleniumhq_selenium_api",
],
)
| 43.019231
| 152
| 0.675011
| 2,760
| 24,607
| 5.812319
| 0.152174
| 0.021319
| 0.029173
| 0.042638
| 0.48398
| 0.440095
| 0.372273
| 0.314425
| 0.284815
| 0.258945
| 0
| 0.124626
| 0.211525
| 24,607
| 571
| 153
| 43.094571
| 0.702196
| 0.135368
| 0
| 0.375
| 0
| 0.122881
| 0.518002
| 0.192875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069915
| false
| 0
| 0.048729
| 0
| 0.125
| 0.002119
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6b5f33a003c3ef902a30bdc2ac23b77d488f11
| 8,045
|
py
|
Python
|
code/tools/run_viz_single_task.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 789
|
2018-03-21T05:28:38.000Z
|
2022-03-29T19:32:47.000Z
|
code/tools/run_viz_single_task.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 46
|
2018-05-03T07:11:10.000Z
|
2022-03-11T23:26:03.000Z
|
code/tools/run_viz_single_task.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 152
|
2018-03-24T10:20:44.000Z
|
2022-02-09T02:38:10.000Z
|
from __future__ import absolute_import, division, print_function
import argparse
import importlib
import itertools
import time
from multiprocessing import Pool
import numpy as np
import os
import pdb
import pickle
import subprocess
import sys
import tensorflow as tf
import tensorflow.contrib.slim as slim
import threading
import init_paths
from models.sample_models import *
target_tasks = "autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000"
list_of_tasks = target_tasks.split(" ")
ON_TEST_SET = True
IN_TRAIN_MODE = False
parser = argparse.ArgumentParser(description='Viz Single Task')
parser.add_argument('--idx', dest='idx',
help='Task to run', type=int)
parser.add_argument('--hs', dest='hs',
help='Hidden size to use', type=int)
parser.add_argument('--n-parallel', dest='n_parallel',
help='Number of models to run in parallel', type=int)
parser.set_defaults(n_parallel=1)
tf.logging.set_verbosity(tf.logging.ERROR)
ipython_std_out = sys.stdout
# Disabe
def blockPrint():
sys.stdout = open(os.devnull, 'w')
# Restore
def enablePrint():
sys.stdout = ipython_std_out
# Force Print
def forcePrint(str):
enablePrint()
print(str)
sys.stdout.flush()
blockPrint()
def remove_dups(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
pairs = list(itertools.product(list_of_tasks, list_of_tasks))
args = parser.parse_args()
idx_to_run = args.idx
if idx_to_run == -1:
pairs_to_run = pairs
else:
pairs_to_run = pairs[idx_to_run:idx_to_run+1]
def run_to_task(task_to):
import general_utils
from general_utils import RuntimeDeterminedEnviromentVars
import models.architectures as architectures
from data.load_ops import resize_rescale_image
import utils
from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction
import lib.data.load_ops as load_ops
tf.logging.set_verbosity(tf.logging.ERROR)
all_outputs = {}
pickle_dir = 'viz_output_single_task.pkl'
import os
if os.path.isfile(pickle_dir):
with open( pickle_dir, 'rb') as fp:
all_outputs = pickle.load(fp)
for task in list_of_tasks:
if task in all_outputs:
print("{} already exists....\n\n\n".format(task))
continue
print("Doing {task}".format(task=task))
general_utils = importlib.reload(general_utils)
tf.reset_default_graph()
training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() }
# task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs)
CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task)
############## Load Configs ##############
cfg = utils.load_config( CONFIG_DIR, nopause=True )
RuntimeDeterminedEnviromentVars.register_dict( cfg )
split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames']
cfg['train_filenames'] = split_file
cfg['val_filenames'] = split_file
cfg['test_filenames'] = split_file
cfg['num_epochs'] = 1
cfg['randomize'] = False
root_dir = cfg['root_dir']
cfg['num_read_threads'] = 1
print(cfg['log_root'])
if task == 'jigsaw':
continue
cfg['model_path'] = os.path.join(
cfg['log_root'],
task,
'model.permanent-ckpt'
)
print( cfg['model_path'])
if cfg['model_path'] is None:
continue
############## Set Up Inputs ##############
# tf.logging.set_verbosity( tf.logging.INFO )
inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether to use train/validaiton
RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg )
RuntimeDeterminedEnviromentVars.populate_registered_variables()
start_time = time.time()
# utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False )
############## Set Up Model ##############
model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE )
m = model[ 'model' ]
model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path' ] )
############## Start dataloading workers ##############
data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn(
inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False )
prefetch_threads = threading.Thread(
target=data_prefetch_init_fn,
args=( training_runners[ 'sess' ], training_runners[ 'coord' ] ))
prefetch_threads.start()
############## Run First Batch ##############
if not hasattr(m, 'masks'):
(
input_batch, target_batch,
data_idx,
predicted, loss,
) = training_runners['sess'].run( [
m.input_images, m.targets,
model[ 'data_idxs' ],
m.decoder_output, m.total_loss] )
mask_batch = 1.
else:
(
input_batch, target_batch, mask_batch,
data_idx,
predicted, loss,
) = training_runners['sess'].run( [
m.input_images, m.targets, m.masks,
model[ 'data_idxs' ],
m.decoder_output, m.total_loss] )
if task == 'segment2d' or task == 'segment25d':
from sklearn.decomposition import PCA
x = np.zeros((32,256,256,3), dtype='float')
for i in range(predicted.shape[0]):
embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64))
pca = PCA(n_components=3)
pca.fit(embedding_flattened)
lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1))
lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min())
x[i] = lower_dim
predicted = x
############## Clean Up ##############
training_runners[ 'coord' ].request_stop()
training_runners[ 'coord' ].join()
# if os.path.isfile(pickle_dir):
# with open(pickle_dir, 'rb') as fp:
# all_outputs = pickle.load(fp)
############## Store to dict ##############
to_store = {
'input': input_batch,
'target': target_batch,
'mask': mask_batch,
'data_idx':data_idx,
'output':predicted}
all_outputs[task] = to_store
print("Done: {}".format(task))
# os.system("sudo cp {d} /home/ubuntu/s3/model_log".format(d=pickle_dir))
############## Reset graph and paths ##############
tf.reset_default_graph()
training_runners['sess'].close()
try:
del sys.modules[ 'config' ]
except:
pass
sys.path = remove_dups(sys.path)
print("FINISHED: {}\n\n\n\n\n\n".format(task))
pickle_dir = 'viz_output_single_task.pkl'
with open( pickle_dir, 'wb') as fp:
pickle.dump(all_outputs, fp)
try:
subprocess.call("aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/".format(pickle_dir), shell=True)
except:
subprocess.call("sudo cp {} /home/ubuntu/s3/visualizations/".format(pickle_dir), shell=True)
return
if __name__ == '__main__':
run_to_task(None)
# with Pool(args.n_parallel) as p:
# p.map(run_to_task, list_of_tasks)
| 35.131004
| 313
| 0.605842
| 969
| 8,045
| 4.780186
| 0.308566
| 0.01943
| 0.024611
| 0.013601
| 0.18804
| 0.160622
| 0.137737
| 0.092832
| 0.092832
| 0.076857
| 0
| 0.009131
| 0.264885
| 8,045
| 228
| 314
| 35.285088
| 0.774095
| 0.082411
| 0
| 0.161677
| 0
| 0.005988
| 0.145898
| 0.030384
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02994
| false
| 0.005988
| 0.161677
| 0
| 0.203593
| 0.047904
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6bae7a17f418cda8c2e6d4ee817869bb64ec62
| 35,884
|
bzl
|
Python
|
stratum/portage/build_defs.bzl
|
cholve/stratum
|
09ddb5acb604f7e694a6b7d2fe93fea79f801794
|
[
"Apache-2.0"
] | 267
|
2019-09-11T15:01:37.000Z
|
2022-03-28T11:14:29.000Z
|
stratum/portage/build_defs.bzl
|
cholve/stratum
|
09ddb5acb604f7e694a6b7d2fe93fea79f801794
|
[
"Apache-2.0"
] | 906
|
2019-09-18T03:37:08.000Z
|
2022-03-30T00:59:53.000Z
|
stratum/portage/build_defs.bzl
|
cholve/stratum
|
09ddb5acb604f7e694a6b7d2fe93fea79f801794
|
[
"Apache-2.0"
] | 107
|
2019-09-16T07:30:53.000Z
|
2022-03-18T09:53:03.000Z
|
# Copyright 2018 Google LLC
# Copyright 2018-present Open Networking Foundation
# SPDX-License-Identifier: Apache-2.0
"""A portable build system for Stratum P4 switch stack.
To use this, load() this file in a BUILD file, specifying the symbols needed.
The public symbols are the macros:
decorate(path)
sc_cc_lib Declare a portable Library.
sc_proto_lib Declare a portable .proto Library.
sc_cc_bin Declare a portable Binary.
sc_package Declare a portable tarball package.
and the variables/lists:
ALL_ARCHES All known arches.
EMBEDDED_ARCHES All embedded arches.
EMBEDDED_PPC Name of PowerPC arch - "ppc".
EMBEDDED_X86 Name of "x86" arch.
HOST_ARCH Name of default "host" arch.
HOST_ARCHES All host arches.
STRATUM_INTERNAL For declaring Stratum internal visibility.
The macros are like cc_library(), proto_library(), and cc_binary(), but with
different options and some restrictions. The key difference: you can
supply lists of architectures for which they should be compiled - defaults
to all if left unstated. Internally, libraries and binaries are generated
for every listed architecture. The names are decorated to keep them different
and allow all to be generated and addressed independently.
This aspect of the system is suboptimal - something along the lines of
augmenting context with a user defined configuration fragment would be a
much cleaner solution.
Currently supported architectures:
ppc
x86
"""
load("//tools/build_defs/label:def.bzl", "parse_label")
load(
"//devtools/build_cleaner/skylark:build_defs.bzl",
"register_extension_info",
)
load("@rules_proto//proto:defs.bzl", "proto_library")
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test")
# Generic path & label helpers. ============================================
def _normpath(path):
"""Normalize a path.
Normalizes a path by removing unnecessary path-up segments and its
corresponding directories. Providing own implementation because import os
is not allowed in build defs.
For example
../../dir/to/deeply/nested/path/../../../other/path
will become
../../dir/to/other/path
Args:
path: A valid absolute or relative path to normalize.
Returns:
A path equivalent to the input path with minimal use of path-up segments.
Invalid input paths will stay invalid.
"""
sep = "/"
level = 0
result = []
for d in path.split(sep):
if d in ("", "."):
if result:
continue
elif d == "..":
if level > 0:
result.pop()
level += -1
continue
else:
level += 1
result.append(d)
return sep.join(result)
# Adds a suffix to a label, expanding implicit targets if needed.
def decorate(label, suffix):
if label.endswith(":"): # .../bar: -> .../bar
label = label[:-1]
if ":" in label: # .../bar:bat -> .../bar:bat_suffix
return "%s_%s" % (label, suffix)
elif label.startswith("//"): # //foo/bar -> //foo/bar:bar_suffix
return "%s:%s_%s" % (label, label.split("/")[-1], suffix)
else: # bar -> bar_suffix
return "%s_%s" % (label, suffix)
# Creates a relative filename from a label, replacing "//" and ":".
def _make_filename(label):
if label.startswith("//"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz
return label.replace("//", "google3/").replace(":", "/")
elif label.startswith(":"): # :bat/baz -> bat/baz
return label[1:]
else: # bat/baz -> bat/baz
return label
# Adds dquotes around a string.
def dquote(s):
return '"' + s + '"'
# Adds squotes around a string.
def squote(s):
return "'" + s + "'"
# Emulate Python 2.5+ str(startswith([prefix ...])
def starts_with(s, prefix_list):
for prefix in prefix_list:
if s.startswith(prefix):
return prefix
return None
def sc_platform_select(host = None, ppc = None, x86 = None, default = None):
"""Public macro to alter blaze rules based on the platform architecture.
Generates a blaze select(...) statement that can be used in most contexts to
alter a blaze rule based on the target platform architecture. If no selection
is provided for a given platform, {default} is used instead. A specific value
or default must be provided for every target platform.
Args:
host: The value to use for host builds.
ppc: The value to use for ppc builds.
x86: The value to use for x86 builds.
default: The value to use for any of {host,ppc,x86} that isn't specified.
Returns:
The requested selector.
"""
if default == None and (host == None or ppc == None or x86 == None):
fail("Missing a select value for at least one platform in " +
"sc_platform_select. Please add.")
config_label_prefix = "//stratum:stratum_"
return select({
"//conditions:default": (host or default),
config_label_prefix + "ppc": (ppc or default),
config_label_prefix + "x86": (x86 or default),
})
# Generates an sc_platform_select based on a textual list of arches.
def sc_platform_filter(value, default, arches):
return sc_platform_select(
host = value if "host" in arches else default,
ppc = value if "ppc" in arches else default,
x86 = value if "x86" in arches else default,
)
def sc_platform_alias(
name,
host = None,
ppc = None,
x86 = None,
default = None,
visibility = None):
"""Public macro to create an alias that changes based on target arch.
Generates a blaze alias that will select the appropriate target. If no
selection is provided for a given platform and no default is set, a
dummy default target is used instead.
Args:
name: The name of the alias target.
host: The result of the alias for host builds.
ppc: The result of the alias for ppc builds.
x86: The result of the alias for x86 builds.
default: The result of the alias for any of {host,ppc,x86} that isn't
specified.
visibility: The visibility of the alias target.
"""
native.alias(
name = name,
actual = sc_platform_select(
default = default or "//stratum/portage:dummy",
host = host,
ppc = ppc,
x86 = x86,
),
visibility = visibility,
)
# Embedded build definitions. ==============================================
EMBEDDED_PPC = "ppc"
EMBEDDED_X86 = "x86"
EMBEDDED_ARCHES = [
EMBEDDED_PPC,
EMBEDDED_X86,
]
HOST_ARCH = "host"
HOST_ARCHES = [HOST_ARCH]
ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES
# Identify Stratum platform arch for .pb.h shims and other portability hacks.
_ARCH_DEFINES = sc_platform_select(
default = ["STRATUM_ARCH_HOST"],
ppc = ["STRATUM_ARCH_PPC"],
x86 = ["STRATUM_ARCH_X86"],
)
STRATUM_INTERNAL = [
"//stratum:__subpackages__",
]
#
# Build options for all embedded architectures
#
# Set _TRACE_SRCS to show sources in embedded sc_cc_lib compile steps.
# This is more general than it may seem: genrule doesn't have hdrs or deps
# attributes, so all embedded dependencies appear as a `src'.
# TODO(unknown): if useful again then inject from cmdline else kill feature.
_TRACE_SRCS = False
# Used for all gcc invocations.
_EMBEDDED_FLAGS = [
"-O0", # Don't use this for program-sizing build
#-- "-Os", # Use this for program-sizing build
"-g", # Don't use this for program-sizing build
"-Wall",
"-Werror", # Warn lots, and force fixing warnings.
"-no-canonical-prefixes", # Don't mangle paths and confuse blaze.
"-fno-builtin-malloc", # We'll use tcmalloc
"-fno-builtin-calloc",
"-fno-builtin-realloc",
"-fno-builtin-free",
"-D__STDC_FORMAT_MACROS=1",
# TODO(unknown): Figure out how we can use $(CC_FLAGS) instead of this.
"-D__GOOGLE_STL_LEGACY_COMPATIBILITY",
]
# Used for C and C++ compiler invocations.
_EMBEDDED_CFLAGS = [
"-I$(GENDIR)",
]
# Used for C++ compiler invocations.
_EMBEDDED_CXXFLAGS = [
"-std=gnu++11", # Allow C++11 features _and_ GNU extensions.
]
# Used for linking binaries.
_EMBEDDED_LDFLAGS = [
# "-static", # Use this for program-sizing build
# "-Wl,--gc-sections,--no-wchar-size-warning", # Use this for program-sizing build
]
# PPC ======================================================================
_PPC_GRTE = "//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot"
# X86 ======================================================================
_X86_GRTE = "//grte/v4_x86/release/usr/grte/v4"
# Portability definitions ===================================================
def sc_cc_test(
name,
size = None,
srcs = None,
deps = None,
data = None,
defines = None,
copts = None,
linkopts = None,
visibility = None):
"""Creates a cc_test rule that interacts safely with Stratum builds.
Generates a cc_test rule that doesn't break the build when an embedded arch
is selected. During embedded builds this target will generate a dummy binary
and will not attempt to build any dependencies.
Args:
name: Analogous to cc_test name argument.
size: Analogous to cc_test size argument.
srcs: Analogous to cc_test srcs argument.
deps: Analogous to cc_test deps argument.
data: Analogous to cc_test data argument.
defines: Analogous to cc_test defines argument.
copts: Analogous to cc_test copts argument.
linkopts: Analogous to cc_test linkopts argument.
visibility: Analogous to cc_test visibility argument.
"""
cc_test(
name = name,
size = size or "small",
srcs = sc_platform_select(host = srcs or [], default = []),
deps = sc_platform_select(
host = deps or [],
default = ["//stratum/portage:dummy_with_main"],
),
data = data or [],
defines = defines,
copts = copts,
linkopts = linkopts,
visibility = visibility,
)
register_extension_info(
extension_name = "sc_cc_test",
label_regex_for_dep = "{extension_name}",
)
def sc_cc_lib(
name,
deps = None,
srcs = None,
hdrs = None,
arches = None,
copts = None,
defines = None,
includes = None,
include_prefix = None,
strip_include_prefix = None,
data = None,
testonly = None,
textual_hdrs = None,
visibility = None,
xdeps = None):
"""Creates rules for the given portable library and arches.
Args:
name: Analogous to cc_library name argument.
deps: Analogous to cc_library deps argument.
srcs: Analogous to cc_library srcs argument.
hdrs: Analogous to cc_library hdrs argument.
arches: List of architectures to generate this way.
copts: Analogous to cc_library copts argument.
defines: Symbols added as "-D" compilation options.
includes: Paths to add as "-I" compilation options.
include_prefix: Analogous to cc_library include_prefix argument.
strip_include_prefix: Analogous to cc_library strip_include_prefix argument.
data: Files to provide as data at runtime (host builds only).
testonly: Standard blaze testonly parameter.
textual_hdrs: Analogous to cc_library.
visibility: Standard blaze visibility parameter.
xdeps: External (file) dependencies of this library - no decorations
assumed, used and exported as header, not for flags, libs, etc.
"""
alwayslink = 0
deps = depset(deps or [])
srcs = depset(srcs or [])
hdrs = depset(hdrs or [])
xdeps = depset(xdeps or [])
copts = depset(copts or [])
includes = depset(includes or [])
data = depset(data or [])
textual_hdrs = depset(textual_hdrs or [])
if srcs:
if [s for s in srcs.to_list() if not s.endswith(".h")]:
alwayslink = 1
if not arches:
arches = ALL_ARCHES
defs_plus = (defines or []) + _ARCH_DEFINES
textual_plus = textual_hdrs | depset(deps.to_list())
cc_library(
name = name,
deps = sc_platform_filter(deps, [], arches),
srcs = sc_platform_filter(srcs, [], arches),
hdrs = sc_platform_filter(hdrs, [], arches),
alwayslink = alwayslink,
copts = sc_platform_filter(copts, [], arches),
defines = defs_plus,
includes = sc_platform_filter(includes, [], arches),
include_prefix = include_prefix,
strip_include_prefix = strip_include_prefix,
testonly = testonly,
textual_hdrs = sc_platform_filter(
textual_plus | xdeps,
[],
arches,
),
data = sc_platform_filter(data, [], arches),
visibility = visibility,
)
register_extension_info(
extension_name = "sc_cc_lib",
label_regex_for_dep = "{extension_name}",
)
def sc_cc_bin(
name,
deps = None,
srcs = None,
arches = None,
copts = None,
defines = None,
includes = None,
testonly = None,
visibility = None):
"""Creates rules for the given portable binary and arches.
Args:
name: Analogous to cc_binary name argument.
deps: Analogous to cc_binary deps argument.
srcs: Analogous to cc_binary srcs argument.
arches: List of architectures to generate this way.
copts: Analogous to cc_binary copts argument.
defines: Symbols added as "-D" compilation options.
includes: Paths to add as "-I" compilation options.
testonly: Standard blaze testonly parameter.
visibility: Standard blaze visibility parameter.
"""
deps = depset(deps or [])
srcs = depset(srcs or [])
if not arches:
arches = ALL_ARCHES
defs_plus = (defines or []) + _ARCH_DEFINES
cc_binary(
name = name,
deps = sc_platform_filter(
deps,
["//stratum/portage:dummy_with_main"],
arches,
),
srcs = sc_platform_filter(srcs, [], arches),
copts = copts,
defines = defs_plus,
includes = includes,
linkopts = ["-ldl", "-lutil"],
testonly = testonly,
visibility = visibility,
)
register_extension_info(
extension_name = "sc_cc_bin",
label_regex_for_dep = "{extension_name}",
)
# Protobuf =================================================================
_SC_GRPC_DEPS = [
"//sandblaze/prebuilt/grpc",
"//sandblaze/prebuilt/grpc:grpc++_codegen_base",
"//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib",
]
_PROTOC = "@com_google_protobuf//:protobuf:protoc"
_PROTOBUF = "@com_google_protobuf//:protobuf"
_SC_GRPC_PLUGIN = "//sandblaze/prebuilt/protobuf:grpc_cpp_plugin"
_GRPC_PLUGIN = "//grpc:grpc_cpp_plugin"
def _loc(target):
"""Return target location for constructing commands.
Args:
target: Blaze target name available to this build.
Returns:
$(location target)
"""
return "$(location %s)" % target
def _gen_proto_lib(
name,
srcs,
hdrs,
deps,
arch,
visibility,
testonly,
proto_include,
grpc_shim_rule):
"""Creates rules and filegroups for embedded protobuf library.
For every given ${src}.proto, generate:
:${src}_${arch}.pb rule to run protoc
${src}.proto => ${src}.${arch}.pb.{h,cc}
:${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin:
${src}.proto => ${src}.${arch}.grpc.pb.{h,cc}
:${src}_${arch}_proto_rollup collects include options for protoc:
${src}_${arch}_proto_rollup.flags
Feed each set into sc_cc_lib to wrap them them up into a usable library;
note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto.
Args:
name: Base name for this library.
srcs: List of proto files
hdrs: More files to build into this library, but also exported for
dependent rules to utilize.
deps: List of deps for this library
arch: Which architecture to build this library for.
visibility: Standard blaze visibility parameter, passed through to
subsequent rules.
testonly: Standard blaze testonly parameter.
proto_include: Include path for generated sc_cc_libs.
grpc_shim_rule: If needed, the name of the grpc shim for this proto lib.
"""
bash_vars = ["g3=$${PWD}"]
# TODO(unknown): Switch protobuf to using the proto_include mechanism
protoc_label = _PROTOC
protobuf_label = _PROTOBUF
protobuf_hdrs = "%s:well_known_types_srcs" % protobuf_label
protobuf_srcs = [protobuf_hdrs]
protobuf_include = "$${g3}/protobuf/src"
if arch in EMBEDDED_ARCHES:
grpc_plugin = _SC_GRPC_PLUGIN
else:
grpc_plugin = _GRPC_PLUGIN
protoc_deps = []
for dep in deps:
if dep.endswith("_proto"):
protoc_deps.append("%s_%s_headers" % (dep, arch))
name_arch = decorate(name, arch)
# We use this filegroup to accumulate the set of .proto files needed to
# compile this proto.
native.filegroup(
name = decorate(name_arch, "headers"),
srcs = hdrs + protoc_deps,
visibility = visibility,
)
my_proto_rollup = decorate(name_arch, "proto_rollup.flags")
protoc_srcs_set = (srcs + hdrs + protoc_deps +
protobuf_srcs + [my_proto_rollup])
gen_srcs = []
gen_hdrs = []
grpc_gen_hdrs = []
grpc_gen_srcs = []
tools = [protoc_label]
grpc_tools = [protoc_label, grpc_plugin]
protoc = "$${g3}/%s" % _loc(protoc_label)
grpc_plugin = "$${g3}/%s" % _loc(grpc_plugin)
cpp_out = "$${g3}/$(GENDIR)/%s/%s" % (native.package_name(), arch)
accum_flags = []
full_proto_include = None
if proto_include == ".":
full_proto_include = native.package_name()
elif proto_include:
full_proto_include = "%s/%s" % (native.package_name(), proto_include)
if full_proto_include:
temp_prefix = "%s/%s" % (cpp_out, native.package_name()[len(full_proto_include):])
# We do a bit of extra work with these include flags to avoid generating
# warnings.
accum_flags.append(
"$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)" %
(full_proto_include, full_proto_include),
)
accum_flags.append(
"$$(if [[ -e %s ]]; then echo -IG3LOC/%s; fi)" %
(full_proto_include, full_proto_include),
)
else:
temp_prefix = "%s/%s" % (cpp_out, native.package_name())
proto_rollups = [
decorate(decorate(dep, arch), "proto_rollup.flags")
for dep in deps
if dep.endswith("_proto")
]
proto_rollup_cmds = ["printf '%%s\n' %s" % flag for flag in accum_flags]
proto_rollup_cmds.append("cat $(SRCS)")
proto_rollup_cmd = "{ %s; } | sort -u -o $(@)" % "; ".join(proto_rollup_cmds)
native.genrule(
name = decorate(name_arch, "proto_rollup"),
srcs = proto_rollups,
outs = [my_proto_rollup],
cmd = proto_rollup_cmd,
visibility = visibility,
testonly = testonly,
)
for src in srcs + hdrs:
if src.endswith(".proto"):
src_stem = src[0:-6]
src_arch = "%s_%s" % (src_stem, arch)
temp_stem = "%s/%s" % (temp_prefix, src_stem)
gen_stem = "%s.%s" % (src_stem, arch)
# We can't use $${PWD} until this step, because our rollup command
# might be generated on another forge server.
proto_path_cmds = ["rollup=$$(sed \"s,G3LOC,$${PWD},g\" %s)" %
_loc(my_proto_rollup)]
proto_rollup_flags = ["$${rollup}"]
if proto_include:
# We'll be cd-ing to another directory before protoc, so
# adjust our .proto path accordingly.
proto_src_loc = "%s/%s" % (native.package_name(), src)
if proto_src_loc.startswith(full_proto_include + "/"):
proto_src_loc = proto_src_loc[len(full_proto_include) + 1:]
else:
print("Invalid proto include '%s' doesn't match src %s" %
(full_proto_include, proto_src_loc))
# By cd-ing to another directory, we force protoc to produce
# different symbols. Careful, our proto might be in GENDIR!
proto_path_cmds.append("; ".join([
"if [[ -e %s ]]" % ("%s/%s" % (full_proto_include, proto_src_loc)),
"then cd %s" % full_proto_include,
"else cd $(GENDIR)/%s" % full_proto_include,
"fi",
]))
gendir_include = ["-I$${g3}/$(GENDIR)", "-I$${g3}", "-I."]
else:
proto_src_loc = "%s/%s" % (native.package_name(), src)
proto_path_cmds.append("[[ -e %s ]] || cd $(GENDIR)" % proto_src_loc)
gendir_include = ["-I$(GENDIR)", "-I."]
# Generate messages
gen_pb_h = gen_stem + ".pb.h"
gen_pb_cc = gen_stem + ".pb.cc"
gen_hdrs.append(gen_pb_h)
gen_srcs.append(gen_pb_cc)
cmds = bash_vars + [
"mkdir -p %s" % temp_prefix,
] + proto_path_cmds + [
" ".join([protoc] +
gendir_include +
proto_rollup_flags +
[
"-I%s" % protobuf_include,
"--cpp_out=%s" % cpp_out,
proto_src_loc,
]),
"cd $${g3}",
"cp %s.pb.h %s" % (temp_stem, _loc(gen_pb_h)),
"cp %s.pb.cc %s" % (temp_stem, _loc(gen_pb_cc)),
]
pb_outs = [gen_pb_h, gen_pb_cc]
native.genrule(
name = src_arch + ".pb",
srcs = protoc_srcs_set,
outs = pb_outs,
tools = tools,
cmd = " && ".join(cmds),
heuristic_label_expansion = 0,
visibility = visibility,
)
# Generate GRPC
if grpc_shim_rule:
gen_grpc_pb_h = gen_stem + ".grpc.pb.h"
gen_grpc_pb_cc = gen_stem + ".grpc.pb.cc"
grpc_gen_hdrs.append(gen_grpc_pb_h)
grpc_gen_srcs.append(gen_grpc_pb_cc)
cmds = bash_vars + [
"mkdir -p %s" % temp_prefix,
] + proto_path_cmds + [
" ".join([
protoc,
"--plugin=protoc-gen-grpc-cpp=%s" % grpc_plugin,
] +
gendir_include +
proto_rollup_flags +
[
"-I%s" % protobuf_include,
"--grpc-cpp_out=%s" % cpp_out,
proto_src_loc,
]),
"cd $${g3}",
"cp %s.grpc.pb.h %s" % (temp_stem, _loc(gen_grpc_pb_h)),
"cp %s.grpc.pb.cc %s" % (temp_stem, _loc(gen_grpc_pb_cc)),
]
grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc]
native.genrule(
name = src_arch + ".grpc.pb",
srcs = protoc_srcs_set,
outs = grpc_pb_outs,
tools = grpc_tools,
cmd = " && ".join(cmds),
heuristic_label_expansion = 0,
visibility = visibility,
)
dep_set = depset(deps) | [protobuf_label]
includes = []
if proto_include:
includes = [proto_include]
# Note: Public sc_proto_lib invokes this once per (listed) arch;
# which then calls sc_cc_lib with same name for each arch;
# multiple such calls are OK as long as the arches are disjoint.
sc_cc_lib(
name = decorate(name, arch),
deps = dep_set,
srcs = gen_srcs,
hdrs = hdrs + gen_hdrs,
arches = [arch],
copts = [],
includes = includes,
testonly = testonly,
textual_hdrs = gen_hdrs,
visibility = visibility,
)
if grpc_shim_rule:
grpc_name = name[:-6] + "_grpc_proto"
grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS
grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs
sc_cc_lib(
name = decorate(grpc_name, arch),
deps = grpc_dep_set,
srcs = grpc_gen_srcs,
hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule],
arches = [arch],
copts = [],
includes = includes,
testonly = testonly,
textual_hdrs = grpc_gen_hdrs_plus,
visibility = visibility,
)
def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility):
"""Macro to build .pb.h multi-arch master switch for sc_proto_lib.
For each src path.proto, generates path.pb.h consisting of:
#ifdef logic to select path.${arch}.pb.h
Also generates an alias that will select the appropriate proto target
based on the currently selected platform architecture.
Args:
name: Base name for this library.
pb_modifier: protoc plugin-dependent file extension (e.g.: .pb)
srcs: List of proto files.
arches: List of arches this shim should support.
visibility: The blaze visibility of the generated alias.
Returns:
Name of shim rule for use in follow-on hdrs and/or src lists.
"""
outs = []
cmds = []
hdr_ext = pb_modifier + ".h"
for src in srcs:
pkg, filename = parse_label(src)
if not filename.endswith(".proto"):
continue
hdr_stem = filename[0:-6]
new_hdr_name = hdr_stem + hdr_ext
outs.append(new_hdr_name)
# Generate lines for shim switch file.
# Lines expand inside squotes, so quote accordingly.
include_fmt = "#include " + dquote(pkg + "/" + hdr_stem + ".%s" + hdr_ext)
lines = [
"#if defined(STRATUM_ARCH_%s)" % "PPC",
include_fmt % "ppc",
"#elif defined(STRATUM_ARCH_%s)" % "X86",
include_fmt % "x86",
"#elif defined(STRATUM_ARCH_%s)" % "HOST",
include_fmt % "host",
"#else",
"#error Unknown STRATUM_ARCH",
"#endif",
]
gen_cmds = [("printf '%%s\\n' '%s'" % line) for line in lines]
new_hdr_loc = "$(location %s)" % new_hdr_name
cmds.append("{ %s; } > %s" % (" && ".join(gen_cmds), new_hdr_loc))
shim_rule = decorate(name, "shims")
native.genrule(
name = shim_rule,
srcs = srcs,
outs = outs,
cmd = " && ".join(cmds) or "true",
)
sc_platform_alias(
name = name,
host = decorate(name, "host") if "host" in arches else None,
ppc = decorate(name, "ppc") if "ppc" in arches else None,
x86 = decorate(name, "x86") if "x86" in arches else None,
visibility = visibility,
)
return shim_rule
def _gen_py_proto_lib(name, srcs, deps, visibility, testonly):
"""Creates a py_proto_library from the given srcs.
There's no clean way to make python protos work with sc_proto_lib's
proto_include field, so we keep this simple.
For library "name", generates:
* ${name}_default_pb, a regular proto library.
* ${name}_py, a py_proto_library based on ${name}_default_pb.
Args:
name: Standard blaze name argument.
srcs: Standard blaze srcs argument.
deps: Standard blaze deps argument.
visibility: Standard blaze visibility argument.
testonly: Standard blaze testonly argument.
"""
regular_proto_name = decorate(name, "default_pb")
py_name = decorate(name, "py")
proto_library(
name = regular_proto_name,
srcs = srcs,
deps = [decorate(dep, "default_pb") for dep in deps],
visibility = visibility,
testonly = testonly,
)
native.py_proto_library(
name = py_name,
api_version = 2,
deps = [regular_proto_name],
visibility = visibility,
testonly = testonly,
)
# TODO(unknown): Add support for depending on normal proto_library rules.
def sc_proto_lib(
name = None,
srcs = [],
hdrs = [],
deps = [],
arches = [],
visibility = None,
testonly = None,
proto_include = None,
python_support = False,
services = []):
"""Public macro to build multi-arch library from Message protobuf(s).
For library "name", generates:
* ${name}_shim aka .pb.h master switch - see _gen_proto_shims, above.
* ${name}_${arch}_pb protobuf compile rules - one for each arch.
* sc_cc_lib(name) with those as input.
* ${name}_py a py_proto_library version of this library. Only generated
if python_support == True.
Args:
name: Base name for this library.
srcs: List of .proto files - private to this library.
hdrs: As above, but also exported for dependent rules to utilize.
deps: List of deps for this library
arches: Which architectures to build this library for, None => ALL.
visibility: Standard blaze visibility parameter, passed through to
subsequent rules.
testonly: Standard blaze testonly parameter.
proto_include: Path to add to include path. This will affect the
symbols generated by protoc, as well as the include
paths used for both sc_cc_lib and sc_proto_lib rules
that depend on this rule. Typically "."
python_support: Defaults to False. If True, generate a python proto library
from this rule. Any sc_proto_lib with python support may
only depend on sc_proto_libs that also have python support,
and may not use the proto_include field in this rule.
services: List of services to enable {"grpc", "rpc"};
Only "grpc" is supported. So "rpc" and "grpc" are equivalent.
"""
if not arches:
if testonly:
arches = HOST_ARCHES
else:
arches = ALL_ARCHES
service_enable = {
"grpc": 0,
}
for service in services or []:
if service == "grpc":
service_enable["grpc"] = 1
elif service == "rpc":
service_enable["grpc"] = 1
else:
fail("service='%s' not in (grpc, rpc)" % service)
deps = depset(deps or [])
shim_rule = _gen_proto_shims(
name = name,
pb_modifier = ".pb",
srcs = srcs + hdrs,
arches = arches,
visibility = visibility,
)
grpc_shim_rule = None
if (service_enable["grpc"]):
grpc_shim_rule = _gen_proto_shims(
name = decorate(name[:-6], "grpc_proto"),
pb_modifier = ".grpc.pb",
srcs = srcs + hdrs,
arches = arches,
visibility = visibility,
)
for arch in arches:
_gen_proto_lib(
name = name,
srcs = srcs,
hdrs = [shim_rule] + hdrs,
deps = deps,
arch = arch,
visibility = visibility,
testonly = testonly,
proto_include = proto_include,
grpc_shim_rule = grpc_shim_rule,
)
if python_support:
if proto_include:
fail("Cannot use proto_include on an sc_proto_lib with python support.")
_gen_py_proto_lib(
name = name,
srcs = depset(srcs + hdrs),
deps = deps,
visibility = visibility,
testonly = testonly,
)
register_extension_info(
extension_name = "sc_proto_lib",
label_regex_for_dep = "{extension_name}",
)
def sc_package(
name = None,
bins = None,
data = None,
deps = None,
arches = None,
visibility = None):
"""Public macro to package binaries and data for deployment.
For package "name", generates:
* ${name}_${arch}_bin and ${name}_${arch}_data filesets containing
respectively all of the binaries and all of the data needed for this
package and all dependency packages.
* ${name}_${arch} fileset containing the corresponding bin and data
filesets, mapped to bin/ and share/ respectively.
* ${name}_${arch}_tarball rule builds that .tar.gz package.
Args:
name: Base name for this package.
bins: List of sc_cc_bin rules to be packaged.
data: List of files (and file producing rules) to be packaged.
deps: List of other sc_packages to add to this package.
arches: Which architectures to build this library for,
None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported).
visibility: Standard blaze visibility parameter, passed through to
all filesets.
"""
bins = depset(bins or [])
data = depset(data or [])
deps = depset(deps or [])
if not arches:
arches = EMBEDDED_ARCHES
fileset_name = decorate(name, "fs")
for extension, inputs in [
("bin", ["%s.stripped" % b for b in bins.to_list()]),
("data", data),
]:
native.Fileset(
name = decorate(fileset_name, extension),
out = decorate(name, extension),
entries = [
native.FilesetEntry(
files = inputs,
),
] + [
native.FilesetEntry(srcdir = decorate(dep, extension))
for dep in deps.to_list()
],
visibility = visibility,
)
# Add any platform specific files to the final tarball.
platform_entries = sc_platform_select(
# We use a different ppc toolchain for Stratum.
# This means that we must provide portable shared libs for our ppc
# executables.
ppc = [native.FilesetEntry(
srcdir = "%s:BUILD" % _PPC_GRTE,
files = [":libs"],
destdir = "lib/stratum",
symlinks = "dereference",
)],
default = [],
)
native.Fileset(
name = fileset_name,
out = name,
entries = [
native.FilesetEntry(
srcdir = decorate(name, "bin"),
destdir = "bin",
),
native.FilesetEntry(
srcdir = decorate(name, "data"),
destdir = "share",
),
] + platform_entries,
visibility = visibility,
)
outs = ["%s.tar.gz" % name]
# Copy our files into a temporary directory and make any necessary changes
# before tarballing.
cmds = [
"TEMP_DIR=$(@D)/stratum_packaging_temp",
"mkdir $${TEMP_DIR}",
"cp -r %s $${TEMP_DIR}/tarball" % _loc(fileset_name),
"if [[ -e $${TEMP_DIR}/tarball/bin ]]",
"then for f in $${TEMP_DIR}/tarball/bin/*.stripped",
" do mv $${f} $${f%.stripped}", # rename not available.
"done",
"fi",
"tar czf %s -h -C $${TEMP_DIR}/tarball ." % _loc(name + ".tar.gz"),
"rm -rf $${TEMP_DIR}",
]
native.genrule(
name = decorate(name, "tarball"),
srcs = [":%s" % fileset_name],
outs = outs,
cmd = "; ".join(cmds),
visibility = visibility,
)
| 35.042969
| 90
| 0.582767
| 4,328
| 35,884
| 4.64903
| 0.143715
| 0.019681
| 0.013568
| 0.007604
| 0.293375
| 0.202823
| 0.157696
| 0.131554
| 0.108046
| 0.063317
| 0
| 0.005021
| 0.306293
| 35,884
| 1,023
| 91
| 35.077224
| 0.803278
| 0.369162
| 0
| 0.309231
| 0
| 0.001538
| 0.12986
| 0.044113
| 0
| 0
| 0
| 0.002933
| 0
| 1
| 0.027692
| false
| 0
| 0
| 0.004615
| 0.050769
| 0.004615
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6c12390ca654e898450e0424a1c59a124edd59
| 96,578
|
py
|
Python
|
src/genie/libs/parser/ios/tests/test_show_platform.py
|
miuvlad/genieparser
|
60b1151e3c67c6b55d75e30359d0bf52825efad8
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/ios/tests/test_show_platform.py
|
miuvlad/genieparser
|
60b1151e3c67c6b55d75e30359d0bf52825efad8
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/ios/tests/test_show_platform.py
|
miuvlad/genieparser
|
60b1151e3c67c6b55d75e30359d0bf52825efad8
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python
import unittest
from unittest.mock import Mock
from pyats.topology import Device
from genie.metaparser.util.exceptions import SchemaEmptyParserError,\
SchemaMissingKeyError
from genie.libs.parser.ios.show_platform import ShowVersion,\
Dir,\
ShowRedundancy,\
ShowInventory,\
ShowBootvar, \
ShowProcessesCpuSorted,\
ShowProcessesCpu,\
ShowVersionRp,\
ShowPlatform,\
ShowPlatformPower,\
ShowProcessesCpuHistory,\
ShowProcessesCpuPlatform,\
ShowPlatformSoftwareStatusControl,\
ShowPlatformSoftwareSlotActiveMonitorMem,\
ShowPlatformHardware,\
ShowPlatformHardwarePlim,\
ShowPlatformHardwareQfpBqsOpmMapping,\
ShowPlatformHardwareQfpBqsIpmMapping,\
ShowPlatformHardwareSerdes,\
ShowPlatformHardwareSerdesInternal,\
ShowPlatformHardwareQfpBqsStatisticsChannelAll,\
ShowPlatformHardwareQfpInterfaceIfnameStatistics,\
ShowPlatformHardwareQfpStatisticsDrop,\
ShowEnvironment,\
ShowModule,\
ShowSwitch, ShowSwitchDetail
from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\
TestShowPlatformPower as test_show_platform_power_iosxe,\
TestShowVersionRp as test_show_version_rp_iosxe,\
TestShowProcessesCpu as test_show_processes_cpu_iosxe,\
TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\
TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\
TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\
TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\
TestShowPlatformHardware as test_show_platform_hardware_iosxe,\
TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\
TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\
TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\
TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\
TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\
ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\
ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\
TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\
TestShowEnv as test_show_env_iosxe,\
TestShowModule as test_show_module_iosxe,\
TestShowSwitch as test_show_switch_iosxe,\
TestShowSwitchDetail as test_show_switch_detail_iosxe
class TestShowVersion(unittest.TestCase):
dev1 = Device(name='empty')
dev_iosv = Device(name='iosv')
empty_output = {'execute.return_value': ''}
semi_empty_output = {'execute.return_value': '''\
ROM: Bootstrap program is IOSv
'''}
golden_parsed_output_iosv = {
"version": {
"last_reload_reason": "Unknown reason",
"hostname": "N95_1",
"os": "IOS",
"version_short": "15.6",
"number_of_intfs": {
"Gigabit Ethernet": "6"
},
"version": "15.6(3)M2",
"rtr_type": "IOSv",
"chassis_sn": "9K66Z7TOKAACDEQA24N7S",
"chassis": "IOSv",
"image_id": "VIOS-ADVENTERPRISEK9-M",
'compiled_by': 'prod_rel_team',
'compiled_date': 'Wed 29-Mar-17 14:05',
"processor_type": "revision 1.0",
"platform": "IOSv",
"image_type": "production image",
'processor_board_flash': '10080K',
'returned_to_rom_by': 'reload',
"main_mem": "435457",
"mem_size": {
"non-volatile configuration": "256"
},
"system_image": "flash0:/vios-adventerprisek9-m",
"curr_config_register": "0x0",
"rom": "Bootstrap program is IOSv",
"uptime": "1 day, 16 hours, 42 minutes"
}
}
golden_output_iosv = {'execute.return_value': '''\
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2017 by Cisco Systems, Inc.
Compiled Wed 29-Mar-17 14:05 by prod_rel_team
ROM: Bootstrap program is IOSv
N95_1 uptime is 1 day, 16 hours, 42 minutes
System returned to ROM by reload
System image file is "flash0:/vios-adventerprisek9-m"
Last reload reason: Unknown reason
This product contains cryptographic features and is subject to United
States and local country laws governing import, export, transfer and
use. Delivery of Cisco cryptographic products does not imply
third-party authority to import, export, distribute or use encryption.
Importers, exporters, distributors and users are responsible for
compliance with U.S. and local country laws. By using this product you
agree to comply with applicable laws and regulations. If you are unable
to comply with U.S. and local laws, return this product immediately.
A summary of U.S. laws governing Cisco cryptographic products may be found at:
http://www.cisco.com/wwl/export/crypto/tool/stqrg.html
If you require further assistance please contact us by sending email to
export@cisco.com.
Cisco IOSv (revision 1.0) with with 435457K/87040K bytes of memory.
Processor board ID 9K66Z7TOKAACDEQA24N7S
6 Gigabit Ethernet interfaces
DRAM configuration is 72 bits wide with parity disabled.
256K bytes of non-volatile configuration memory.
2097152K bytes of ATA System CompactFlash 0 (Read/Write)
0K bytes of ATA CompactFlash 1 (Read/Write)
0K bytes of ATA CompactFlash 2 (Read/Write)
10080K bytes of ATA CompactFlash 3 (Read/Write)
Configuration register is 0x0'''}
golden_parsed_output_ios = {
'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version '
'15.2(3r)E, RELEASE SOFTWARE (fc1)',
'chassis': 'WS-C3750X-24P',
'chassis_sn': 'FDO2028F1WK',
'curr_config_register': '0xF',
'compiled_by': 'prod_rel_team',
'compiled_date': 'Wed 26-Jun-13 09:56',
'hostname': 'R5',
'image_id': 'C3750E-UNIVERSALK9-M',
'image_type': 'production image',
'last_reload_reason': 'power-on',
'license_level': 'ipservices',
'license_type': 'Permanent',
'main_mem': '262144',
'mem_size': {'flash-simulated non-volatile configuration': '512'},
'next_reload_license_level': 'ipservices',
'number_of_intfs': {'Gigabit Ethernet': '28',
'Ten Gigabit Ethernet': '2',
'Virtual Ethernet': '2',
'Gigabit Ethernet': '28',
'FastEthernet': '1'
},
'os': 'IOS',
'platform': 'C3750E',
'processor_type': 'PowerPC405',
'returned_to_rom_by': 'power-on',
'rom': 'Bootstrap program is C3750E boot loader',
'rtr_type': 'WS-C3750X-24P',
'system_image': 'flash:c3750e-universalk9-mz',
'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018',
'uptime': '9 weeks, 4 days, 2 hours, 3 minutes',
'version': '12.2(55)SE8',
'version_short': '12.2'
}
}
golden_output_ios = {'execute.return_value': '''\
Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2013 by Cisco Systems, Inc.
Compiled Wed 26-Jun-13 09:56 by prod_rel_team
Image text-base: 0x00003000, data-base: 0x02800000
ROM: Bootstrap program is C3750E boot loader
BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1)
R5 uptime is 9 weeks, 4 days, 2 hours, 3 minutes
System returned to ROM by power-on
System restarted at 12:22:21 PDT Mon Sep 10 2018
System image file is "flash:c3750e-universalk9-mz"
This product contains cryptographic features and is subject to United
States and local country laws governing import, export, transfer and
use. Delivery of Cisco cryptographic products does not imply
third-party authority to import, export, distribute or use encryption.
Importers, exporters, distributors and users are responsible for
compliance with U.S. and local country laws. By using this product you
agree to comply with applicable laws and regulations. If you are unable
to comply with U.S. and local laws, return this product immediately.
A summary of U.S. laws governing Cisco cryptographic products may be found at:
http://www.cisco.com/wwl/export/crypto/tool/stqrg.html
If you require further assistance please contact us by sending email to
export@cisco.com.
License Level: ipservices
License Type: Permanent
Next reload license Level: ipservices
cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K bytes of memory.
Processor board ID FDO2028F1WK
Last reset from power-on
2 Virtual Ethernet interfaces
1 FastEthernet interface
28 Gigabit Ethernet interfaces
2 Ten Gigabit Ethernet interfaces
The password-recovery mechanism is enabled.
512K bytes of flash-simulated non-volatile configuration memory.
Base ethernet MAC Address : 84:3D:C6:FF:F1:B8
Motherboard assembly number : 73-15476-04
Motherboard serial number : FDO202907UH
Model revision number : W0
Motherboard revision number : B0
Model number : WS-C3750X-24P-L
Daughterboard assembly number : 800-32727-03
Daughterboard serial number : FDO202823P8
System serial number : FDO2028F1WK
Top Assembly Part Number : 800-38990-01
Top Assembly Revision Number : F0
Version ID : V07
CLEI Code Number : CMMPP00DRB
Hardware Board Revision Number : 0x05
Switch Ports Model SW Version SW Image
------ ----- ----- ---------- ----------
* 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M
Configuration register is 0xF
'''}
golden_parsed_output_ios_cat6k = {
"version": {
"os": "IOS",
"version_short": "12.2",
"platform": "s72033_rp",
"version": "12.2(18)SXF7",
"image_id": "s72033_rp-ADVENTERPRISEK9_WAN-M",
'compiled_by': 'kellythw',
'compiled_date': 'Thu 23-Nov-06 06:26',
"image_type": "production image",
"rom": "System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)",
"bootldr": "s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)",
"hostname": "cat6k_tb1",
"uptime": "10 weeks, 5 days, 5 hours, 16 minutes",
"system_image": "disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7",
"chassis": "WS-C6503-E",
"main_mem": "983008",
"processor_type": "R7000",
'sp_by': 'power on',
'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010',
'returned_to_rom_by': 'power cycle',
"rtr_type": "WS-C6503-E",
"chassis_sn": "FXS1821Q2H9",
"last_reload_reason": "s/w reset",
'processor_board_flash': '65536K',
"number_of_intfs": {
"Gigabit Ethernet/IEEE 802.3": "50",
'Virtual Ethernet/IEEE 802.3': '1'
},
"mem_size": {"non-volatile configuration": "1917", "packet buffer": "8192"},
"curr_config_register": "0x2102",
}
}
golden_output_ios_cat6k = {'execute.return_value': '''
show version
Cisco Internetwork Operating System Software
IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2006 by cisco Systems, Inc.
Compiled Thu 23-Nov-06 06:26 by kellythw
Image text-base: 0x40101040, data-base: 0x42D98000
ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)
BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)
cat6k_tb1 uptime is 10 weeks, 5 days, 5 hours, 16 minutes
Time since cat6k_tb1 switched to active is 10 weeks, 5 days, 5 hours, 15 minutes
System returned to ROM by power cycle at 21:57:23 UTC Sat Aug 28 2010 (SP by power on)
System image file is "disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7"
This product contains cryptographic features and is subject to United
States and local country laws governing import, export, transfer and
use. Delivery of Cisco cryptographic products does not imply
third-party authority to import, export, distribute or use encryption.
Importers, exporters, distributors and users are responsible for
compliance with U.S. and local country laws. By using this product you
agree to comply with applicable laws and regulations. If you are unable
to comply with U.S. and local laws, return this product immediately.
A summary of U.S. laws governing Cisco cryptographic products may be found at:
http://www.cisco.com/wwl/export/crypto/tool/stqrg.html
If you require further assistance please contact us by sending email to
export@cisco.com.
cisco WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K bytes of memory.
Processor board ID FXS1821Q2H9
SR71000 CPU at 600Mhz, Implementation 0x504, Rev 1.2, 512KB L2 Cache
Last reset from s/w reset
SuperLAT software (copyright 1990 by Meridian Technology Corp).
X.25 software, Version 3.0.0.
Bridging software.
TN3270 Emulation software.
1 Virtual Ethernet/IEEE 802.3 interface
50 Gigabit Ethernet/IEEE 802.3 interfaces
1917K bytes of non-volatile configuration memory.
8192K bytes of packet buffer memory.
65536K bytes of Flash internal SIMM (Sector size 512K).
Configuration register is 0x2102
'''}
golden_output_ios_1 = {'execute.return_value': '''\
Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2018 by Cisco Systems, Inc.
Compiled Mon 22-Jan-18 04:07 by prod_rel_team
ROM: Bootstrap program is C3750E boot loader
BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)
sample_switch uptime is 8 weeks, 3 days, 10 hours, 27 minutes
System returned to ROM by power-on
System restarted at 05:06:40 GMT Tue Sep 10 2019
System image file is "flash:c3750e-universalk9-mz.152-2.E8.bin"
Last reload reason: Reload command
This product contains cryptographic features and is subject to United
States and local country laws governing import, export, transfer and
use. Delivery of Cisco cryptographic products does not imply
third-party authority to import, export, distribute or use encryption.
Importers, exporters, distributors and users are responsible for
compliance with U.S. and local country laws. By using this product you
agree to comply with applicable laws and regulations. If you are unable
to comply with U.S. and local laws, return this product immediately.
A summary of U.S. laws governing Cisco cryptographic products may be found at:
http://www.cisco.com/wwl/export/crypto/tool/stqrg.html
If you require further assistance please contact us by sending email to
export@cisco.com.
License Level: ipservices
License Type: Permanent
Next reload license Level: ipservices
cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K bytes of memory.
Processor board ID FDO1633Q14S
Last reset from power-on
14 Virtual Ethernet interfaces
1 FastEthernet interface
28 Gigabit Ethernet interfaces
2 Ten Gigabit Ethernet interfaces
The password-recovery mechanism is enabled.
512K bytes of flash-simulated non-volatile configuration memory.
Base ethernet MAC Address : AC:F2:C5:FF:55:E7
Motherboard assembly number : 73-13061-04
Motherboard serial number : FDO1633Q14M
Model revision number : A0
Motherboard revision number : A0
Model number : WS-C3750X-24S-E
Daughterboard assembly number : 800-32727-03
Daughterboard serial number : FDO172217ED
System serial number : FDO1633Q14S
Top Assembly Part Number : 800-33746-04
Top Assembly Revision Number : B0
Version ID : V03
CLEI Code Number : CMMFF00ARC
Hardware Board Revision Number : 0x04
Switch Ports Model SW Version SW Image
------ ----- ----- ---------- ----------
* 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M
Configuration register is 0xF
'''}
golden_parsed_output_ios_1 = {
'version': {'version_short': '15.2',
'platform': 'C3750E',
'version': '15.2(2)E8',
'image_id': 'C3750E-UNIVERSALK9-M',
'os': 'IOS',
'image_type': 'production image',
'compiled_date': 'Mon 22-Jan-18 04:07',
'compiled_by': 'prod_rel_team',
'rom': 'Bootstrap program is C3750E boot loader',
'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)',
'hostname': 'sample_switch',
'uptime': '8 weeks, 3 days, 10 hours, 27 minutes',
'returned_to_rom_by': 'power-on',
'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019',
'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin',
'last_reload_reason': 'power-on',
'license_level': 'ipservices',
'license_type': 'Permanent',
'next_reload_license_level': 'ipservices',
'chassis': 'WS-C3750X-24S',
'main_mem': '524288',
'processor_type': 'PowerPC405',
'rtr_type': 'WS-C3750X-24S',
'chassis_sn': 'FDO1633Q14S',
'number_of_intfs': {
'Virtual Ethernet': '14',
'FastEthernet': '1',
'Gigabit Ethernet': '28',
'Ten Gigabit Ethernet': '2'
},
'mem_size': {
'flash-simulated non-volatile configuration': '512'
},
'curr_config_register': '0xF'
}
}
device_output = {'execute.return_value':'''
best-c3945-IOS3#show version
Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2011 by Cisco Systems, Inc.
Compiled Fri 05-Aug-11 00:32 by prod_rel_team
ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)
best-c3945-IOS3 uptime is 1 hour, 20 minutes
System returned to ROM by reload at 10:26:47 EST Mon Dec 9 2019
System restarted at 10:27:57 EST Mon Dec 9 2019
System image file is "flash0:c3900-universalk9-mz.SPA.150-1.M7.bin"
Last reload type: Normal Reload
Last reload reason: Reload Command
This product contains cryptographic features and is subject to United
States and local country laws governing import, export, transfer and
use. Delivery of Cisco cryptographic products does not imply
third-party authority to import, export, distribute or use encryption.
Importers, exporters, distributors and users are responsible for
compliance with U.S. and local country laws. By using this product you
agree to comply with applicable laws and regulations. If you are unable
to comply with U.S. and local laws, return this product immediately.
A summary of U.S. laws governing Cisco cryptographic products may be found at:
http://www.cisco.com/wwl/export/crypto/tool/stqrg.html
If you require further assistance please contact us by sending email to
export@cisco.com.
Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of memory.
Processor board ID FGL161010K8
2 FastEthernet interfaces
3 Gigabit Ethernet interfaces
1 Virtual Private Network (VPN) Module
DRAM configuration is 72 bits wide with parity enabled.
255K bytes of non-volatile configuration memory.
2000880K bytes of ATA System CompactFlash 0 (Read/Write)
License Info:
License UDI:
-------------------------------------------------
Device# PID SN
-------------------------------------------------
*0 C3900-SPE150/K9 FOC16050QP6
Technology Package License Information for Module:'c3900'
-----------------------------------------------------------------
Technology Technology-package Technology-package
Current Type Next reboot
------------------------------------------------------------------
ipbase ipbasek9 Permanent ipbasek9
security securityk9 Permanent securityk9
uc None None None
data datak9 Permanent datak9
Configuration register is 0x2102
'''}
parsed_output = {
'version': {
'chassis': 'CISCO3945-CHASSIS',
'chassis_sn': 'FGL161010K8',
'compiled_by': 'prod_rel_team',
'compiled_date': 'Fri 05-Aug-11 00:32',
'curr_config_register': '0x2102',
'hostname': 'best-c3945-IOS3',
'image_id': 'C3900-UNIVERSALK9-M',
'image_type': 'production image',
'last_reload_reason': 'Reload Command',
'last_reload_type': 'Normal Reload',
'license_udi': {
'device_num': {
'*0': {
'pid': 'C3900-SPE150/K9',
'sn': 'FOC16050QP6'
}
}
},
'license_package': {
'data': {
'license_level': 'datak9',
'license_type': 'Permanent',
'next_reload_license_level': 'datak9',
},
'ipbase': {
'license_level': 'ipbasek9',
'license_type': 'Permanent',
'next_reload_license_level': 'ipbasek9',
},
'security': {
'license_level': 'securityk9',
'license_type': 'Permanent',
'next_reload_license_level': 'securityk9',
},
'uc': {
'license_level': 'None',
'license_type': 'None',
'next_reload_license_level': 'None',
},
},
'main_mem': '2027520',
'mem_size': {
'non-volatile configuration': '255',
},
'number_of_intfs': {
'FastEthernet': '2',
'Gigabit Ethernet': '3',
},
'os': 'IOS',
'platform': 'C3900',
'processor_board_flash': '2000880K',
'processor_type': 'C3900-SPE150/K9',
'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019',
'returned_to_rom_by': 'reload',
'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)',
'rtr_type': 'CISCO3945-CHASSIS',
'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin',
'system_restarted_at': '10:27:57 EST Mon Dec 9 2019',
'uptime': '1 hour, 20 minutes',
'version': '15.0(1)M7',
'version_short': '15.0',
},
}
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
version_obj = ShowVersion(device=self.dev1)
with self.assertRaises(AttributeError):
parsered_output = version_obj.parse()
def test_semi_empty(self):
self.dev1 = Mock(**self.semi_empty_output)
version_obj = ShowVersion(device=self.dev1)
with self.assertRaises(KeyError):
parsed_output = version_obj.parse()
def test_golden_iosv(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_iosv)
version_obj = ShowVersion(device=self.dev_iosv)
parsed_output = version_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_iosv)
def test_golden_ios(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_ios)
version_obj = ShowVersion(device=self.dev_iosv)
parsed_output = version_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_ios)
def test_golden_ios_cat6k(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_ios_cat6k)
version_obj = ShowVersion(device=self.dev_iosv)
parsed_output = version_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k)
def test_golden_ios_1(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_ios_1)
version_obj = ShowVersion(device=self.dev_iosv)
parsed_output = version_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_ios_1)
def test_golden_ios_2(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.device_output)
version_obj = ShowVersion(device=self.dev_iosv)
parsed_output = version_obj.parse()
self.assertEqual(parsed_output, self.parsed_output)
class test_dir(unittest.TestCase):
dev1 = Device(name='empty')
dev_iosv = Device(name='iosv')
empty_output = {'execute.return_value': ''}
semi_empty_output = {'execute.return_value': '''\
Directory of flash:/
'''}
golden_parsed_output_iosv = {
"dir": {
"flash0:/": {
"files": {
"e1000_bia.txt": {
"last_modified_date": "Oct 17 2018 18:57:18 +00:00",
"index": "269",
"size": "119",
"permissions": "-rw-"
},
"config": {
"last_modified_date": "Oct 14 2013 00:00:00 +00:00",
"index": "264",
"size": "0",
"permissions": "drw-"
},
"nvram": {
"last_modified_date": "Oct 17 2018 18:57:10 +00:00",
"index": "268",
"size": "524288",
"permissions": "-rw-"
},
"boot": {
"last_modified_date": "Jan 30 2013 00:00:00 +00:00",
"index": "1",
"size": "0",
"permissions": "drw-"
},
"vios-adventerprisek9-m": {
"last_modified_date": "Mar 29 2017 00:00:00 +00:00",
"index": "267",
"size": "147988420",
"permissions": "-rw-"
}
},
"bytes_total": "2142715904",
"bytes_free": "1989595136"
},
"dir": "flash0:/"
}
}
golden_output_iosv = {'execute.return_value': '''\
Directory of flash0:/
1 drw- 0 Jan 30 2013 00:00:00 +00:00 boot
264 drw- 0 Oct 14 2013 00:00:00 +00:00 config
267 -rw- 147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m
268 -rw- 524288 Oct 17 2018 18:57:10 +00:00 nvram
269 -rw- 119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt
2142715904 bytes total (1989595136 bytes free)
'''}
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
dir_obj = Dir(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsered_output = dir_obj.parse()
def test_semi_empty(self):
self.dev1 = Mock(**self.semi_empty_output)
dir_obj = Dir(device=self.dev1)
with self.assertRaises(SchemaMissingKeyError):
parsed_output = dir_obj.parse()
def test_golden_iosv(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_iosv)
dir_obj = Dir(device=self.dev_iosv)
parsed_output = dir_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_iosv)
class test_show_redundancy(unittest.TestCase):
dev1 = Device(name='empty')
dev_iosv = Device(name='iosv')
empty_output = {'execute.return_value': ''}
golden_parsed_output_iosv = {
"red_sys_info": {
"last_switchover_reason": "unsupported",
"maint_mode": "Disabled",
"switchovers_system_experienced": "0",
"available_system_uptime": "0 minutes",
"communications": "Down",
"hw_mode": "Simplex",
"communications_reason": "Failure",
"standby_failures": "0"
},
"slot": {
"slot 0": {
"image_ver": "Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)",
"uptime_in_curr_state": "1 day, 16 hours, 42 minutes",
"config_register": "0x0",
"curr_sw_state": "ACTIVE"
}
}
}
golden_output_iosv = {'execute.return_value': '''\
Redundant System Information :
------------------------------
Available system uptime = 0 minutes
Switchovers system experienced = 0
Standby failures = 0
Last switchover reason = unsupported
Hardware Mode = Simplex
Maintenance Mode = Disabled
Communications = Down Reason: Failure
Current Processor Information :
-------------------------------
Active Location = slot 0
Current Software state = ACTIVE
Uptime in current state = 1 day, 16 hours, 42 minutes
Image Version = Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2017 by Cisco Systems, Inc.
Compiled Wed 29-Mar-17 14:05 by prod_rel_team
Configuration register = 0x0
Peer (slot: 0) information is not available because it is in 'DISABLED' state
'''}
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
redundancy_obj = ShowRedundancy(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = redundancy_obj.parse()
def test_golden_iosv(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_iosv)
redundancy_obj = ShowRedundancy(device=self.dev_iosv)
parsed_output = redundancy_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_iosv)
class TestShowInventory(unittest.TestCase):
dev1 = Device(name='empty')
dev_iosv = Device(name='iosv')
empty_output = {'execute.return_value': ''}
golden_parsed_output_iosv = {
'main': {
'chassis': {
'IOSv': {
'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0',
'name': 'IOSv',
'pid': 'IOSv',
'sn': '9K66Z7TOKAACDEQA24N7S',
'vid': '1.0',
},
},
},
}
golden_output_iosv = {'execute.return_value': '''\
NAME: "IOSv", DESCR: "IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0"
PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S
'''}
golden_parsed_output_2 = {
"main": {
"chassis": {
"WS-C6504-E": {
"name": "WS-C6504-E",
"descr": "Cisco Systems Cisco 6500 4-slot Chassis System",
"pid": "WS-C6504-E",
"vid": "V01",
"sn": "FXS1712Q1R8",
}
}
},
"slot": {
"CLK-7600 1": {
"other": {
"CLK-7600 1": {
"name": "CLK-7600 1",
"descr": "OSR-7600 Clock FRU 1",
"pid": "CLK-7600",
"vid": "",
"sn": "FXS170802GL",
}
}
},
"CLK-7600 2": {
"other": {
"CLK-7600 2": {
"name": "CLK-7600 2",
"descr": "OSR-7600 Clock FRU 2",
"pid": "CLK-7600",
"vid": "",
"sn": "FXS170802GL",
}
}
},
"FAN-MOD-4HS 1": {
"other": {
"FAN-MOD-4HS 1": {
"name": "FAN-MOD-4HS 1",
"descr": "High Speed Fan Module for CISCO7604 1",
"pid": "FAN-MOD-4HS",
"vid": "V01",
"sn": "DCH170900PF",
}
}
},
"PS 1 PWR-2700-AC/4": {
"other": {
"PS 1 PWR-2700-AC/4": {
"name": "PS 1 PWR-2700-AC/4",
"descr": "2700W AC power supply for CISCO7604 1",
"pid": "PWR-2700-AC/4",
"vid": "V03",
"sn": "APS1707008Y",
}
}
},
"PS 2 PWR-2700-AC/4": {
"other": {
"PS 2 PWR-2700-AC/4": {
"name": "PS 2 PWR-2700-AC/4",
"descr": "2700W AC power supply for CISCO7604 2",
"pid": "PWR-2700-AC/4",
"vid": "V03",
"sn": "APS17070093",
}
}
},
"1": {
"rp": {
"VS-SUP2T-10G": {
"name": "1",
"descr": "VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5",
"pid": "VS-SUP2T-10G",
"vid": "V05",
"sn": "SAL17152N0F",
"subslot": {
"0": {
"VS-F6K-MSFC5": {
"descr": "VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0",
"name": "msfc sub-module of 1",
"pid": "VS-F6K-MSFC5",
"sn": "SAL17142D06",
"vid": "",
},
"VS-F6K-PFC4": {
"descr": "VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0",
"name": "VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1",
"pid": "VS-F6K-PFC4",
"sn": "SAL17163901",
"vid": "V03",
},
},
"4": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te1/4",
"name": "Transceiver Te1/4",
"pid": "X2-10GB-SR",
"sn": "ONT170202T1",
"vid": "V06 ",
}
},
"5": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te1/5",
"name": "Transceiver Te1/5",
"pid": "X2-10GB-SR",
"sn": "ONT1702033D",
"vid": "V06 ",
}
},
},
}
}
},
"2": {
"lc": {
"WS-X6816-10GE": {
"name": "2",
"descr": "WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0",
"pid": "WS-X6816-10GE",
"vid": "V02",
"sn": "SAL17152QB3",
"subslot": {
"0": {
"WS-F6K-DFC4-E": {
"descr": "WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2",
"name": "WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2",
"pid": "WS-F6K-DFC4-E",
"sn": "SAL171846RF",
"vid": "V02",
}
},
"1": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/1",
"name": "Transceiver Te2/1",
"pid": "X2-10GB-SR",
"sn": "ONT17020338",
"vid": "V06 ",
}
},
"2": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/2",
"name": "Transceiver Te2/2",
"pid": "X2-10GB-SR",
"sn": "ONT1702020H",
"vid": "V06 ",
}
},
"3": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/3",
"name": "Transceiver Te2/3",
"pid": "X2-10GB-SR",
"sn": "ONT170202UU",
"vid": "V06 ",
}
},
"4": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/4",
"name": "Transceiver Te2/4",
"pid": "X2-10GB-SR",
"sn": "ONT170202T5",
"vid": "V06 ",
}
},
"5": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/5",
"name": "Transceiver Te2/5",
"pid": "X2-10GB-SR",
"sn": "AGA1515XZE2",
"vid": "V05 ",
}
},
"6": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/6",
"name": "Transceiver Te2/6",
"pid": "X2-10GB-SR",
"sn": "FNS153920YJ",
"vid": "V06 ",
}
},
"16": {
"X2-10GB-SR": {
"descr": "X2 Transceiver 10Gbase-SR Te2/16",
"name": "Transceiver Te2/16",
"pid": "X2-10GB-SR",
"sn": "ONT170201TT",
"vid": "V06 ",
}
},
},
}
}
},
"3": {
"lc": {
"WS-X6824-SFP": {
"name": "3",
"descr": "WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0",
"pid": "WS-X6824-SFP",
"vid": "V01",
"sn": "SAL17152EG9",
"subslot": {
"0": {
"WS-F6K-DFC4-A": {
"descr": "WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0",
"name": "WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3",
"pid": "WS-F6K-DFC4-A",
"sn": "SAL171848KL",
"vid": "V04",
}
}
},
}
}
},
"4": {
"lc": {
"WS-X6748-GE-TX": {
"name": "4",
"descr": "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4",
"pid": "WS-X6748-GE-TX",
"vid": "V04",
"sn": "SAL14017TWF",
"subslot": {
"0": {
"WS-F6700-CFC": {
"descr": "WS-F6700-CFC Centralized Forwarding Card Rev. 4.1",
"name": "WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4",
"pid": "WS-F6700-CFC",
"sn": "SAL13516QS8",
"vid": "V06",
}
}
},
}
}
},
},
}
golden_output_2 = {'execute.return_value': '''
NAME: "WS-C6504-E", DESCR: "Cisco Systems Cisco 6500 4-slot Chassis System"
PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8
NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1"
PID: CLK-7600 , VID: , SN: FXS170802GL
NAME: "CLK-7600 2", DESCR: "OSR-7600 Clock FRU 2"
PID: CLK-7600 , VID: , SN: FXS170802GL
NAME: "1", DESCR: "VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5"
PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F
NAME: "msfc sub-module of 1", DESCR: "VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0"
PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06
NAME: "VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1", DESCR: "VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0"
PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901
NAME: "Transceiver Te1/4", DESCR: "X2 Transceiver 10Gbase-SR Te1/4"
PID: X2-10GB-SR , VID: V06 , SN: ONT170202T1
NAME: "Transceiver Te1/5", DESCR: "X2 Transceiver 10Gbase-SR Te1/5"
PID: X2-10GB-SR , VID: V06 , SN: ONT1702033D
NAME: "2", DESCR: "WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0"
PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3
NAME: "WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2", DESCR: "WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2"
PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF
NAME: "Transceiver Te2/1", DESCR: "X2 Transceiver 10Gbase-SR Te2/1"
PID: X2-10GB-SR , VID: V06 , SN: ONT17020338
NAME: "Transceiver Te2/2", DESCR: "X2 Transceiver 10Gbase-SR Te2/2"
PID: X2-10GB-SR , VID: V06 , SN: ONT1702020H
NAME: "Transceiver Te2/3", DESCR: "X2 Transceiver 10Gbase-SR Te2/3"
PID: X2-10GB-SR , VID: V06 , SN: ONT170202UU
NAME: "Transceiver Te2/4", DESCR: "X2 Transceiver 10Gbase-SR Te2/4"
PID: X2-10GB-SR , VID: V06 , SN: ONT170202T5
NAME: "Transceiver Te2/5", DESCR: "X2 Transceiver 10Gbase-SR Te2/5"
PID: X2-10GB-SR , VID: V05 , SN: AGA1515XZE2
NAME: "Transceiver Te2/6", DESCR: "X2 Transceiver 10Gbase-SR Te2/6"
PID: X2-10GB-SR , VID: V06 , SN: FNS153920YJ
NAME: "Transceiver Te2/16", DESCR: "X2 Transceiver 10Gbase-SR Te2/16"
PID: X2-10GB-SR , VID: V06 , SN: ONT170201TT
NAME: "3", DESCR: "WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0"
PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9
NAME: "WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3", DESCR: "WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0"
PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL
NAME: "4", DESCR: "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4"
PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF
NAME: "WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4", DESCR: "WS-F6700-CFC Centralized Forwarding Card Rev. 4.1"
PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8
NAME: "FAN-MOD-4HS 1", DESCR: "High Speed Fan Module for CISCO7604 1"
PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF
NAME: "PS 1 PWR-2700-AC/4", DESCR: "2700W AC power supply for CISCO7604 1"
PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y
NAME: "PS 2 PWR-2700-AC/4", DESCR: "2700W AC power supply for CISCO7604 2"
PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093
'''}
golden_parsed_output_3 = {
"main": {
"chassis": {
"WS-C6503-E": {
"name": "WS-C6503-E",
"descr": "Cisco Systems Catalyst 6500 3-slot Chassis System",
"pid": "WS-C6503-E",
"vid": "V03",
"sn": "FXS1821Q2H9",
}
}
},
"slot": {
"CLK-7600 1": {
"other": {
"CLK-7600 1": {
"name": "CLK-7600 1",
"descr": "OSR-7600 Clock FRU 1",
"pid": "CLK-7600",
"vid": "",
"sn": "FXS181101V4",
}
}
},
"CLK-7600 2": {
"other": {
"CLK-7600 2": {
"name": "CLK-7600 2",
"descr": "OSR-7600 Clock FRU 2",
"pid": "CLK-7600",
"vid": "",
"sn": "FXS181101V4",
}
}
},
"1": {
"rp": {
"WS-SUP720-3BXL": {
"name": "1",
"descr": "WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6",
"pid": "WS-SUP720-3BXL",
"vid": "V05",
"sn": "SAL11434P2C",
"subslot": {
"0": {
"WS-SUP720": {
"descr": "WS-SUP720 MSFC3 Daughterboard Rev. 3.1",
"name": "msfc sub-module of 1",
"pid": "WS-SUP720",
"sn": "SAL11434N9G",
"vid": "",
},
"WS-F6K-PFC3BXL": {
"descr": "WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8",
"name": "switching engine sub-module of 1",
"pid": "WS-F6K-PFC3BXL",
"sn": "SAL11434LYG",
"vid": "V01",
},
}
},
}
}
},
"2": {
"lc": {
"WS-X6748-GE-TX": {
"name": "2",
"descr": "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6",
"pid": "WS-X6748-GE-TX",
"vid": "V02",
"sn": "SAL1128UPQ9",
"subslot": {
"0": {
"WS-F6700-DFC3CXL": {
"descr": "WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1",
"name": "switching engine sub-module of 2",
"pid": "WS-F6700-DFC3CXL",
"sn": "SAL1214LAG5",
"vid": "V01",
}
}
},
}
}
},
"WS-C6503-E-FAN 1": {
"other": {
"WS-C6503-E-FAN 1": {
"name": "WS-C6503-E-FAN 1",
"descr": "Enhanced 3-slot Fan Tray 1",
"pid": "WS-C6503-E-FAN",
"vid": "V02",
"sn": "DCH183500KW",
}
}
},
"PS 1 PWR-1400-AC": {
"other": {
"PS 1 PWR-1400-AC": {
"name": "PS 1 PWR-1400-AC",
"descr": "AC power supply, 1400 watt 1",
"pid": "PWR-1400-AC",
"vid": "V01",
"sn": "ABC0830J127",
}
}
},
},
}
golden_output_3 = {'execute.return_value': '''
# show inventory
NAME: "WS-C6503-E", DESCR: "Cisco Systems Catalyst 6500 3-slot Chassis System"
PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9
NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1"
PID: CLK-7600 , VID: , SN: FXS181101V4
NAME: "CLK-7600 2", DESCR: "OSR-7600 Clock FRU 2"
PID: CLK-7600 , VID: , SN: FXS181101V4
NAME: "1", DESCR: "WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6"
PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C
NAME: "msfc sub-module of 1", DESCR: "WS-SUP720 MSFC3 Daughterboard Rev. 3.1"
PID: WS-SUP720 , VID: , SN: SAL11434N9G
NAME: "switching engine sub-module of 1", DESCR: "WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8"
PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG
NAME: "2", DESCR: "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6"
PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9
NAME: "switching engine sub-module of 2", DESCR: "WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1"
PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5
NAME: "WS-C6503-E-FAN 1", DESCR: "Enhanced 3-slot Fan Tray 1"
PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW
NAME: "PS 1 PWR-1400-AC", DESCR: "AC power supply, 1400 watt 1"
PID: PWR-1400-AC , VID: V01, SN: ABC0830J127
'''}
golden_output_4 = {'execute.return_value': '''
NAME: "1", DESCR: "WS-C8888X-88"
PID: WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W
NAME: "Switch 1 - Power Supply 1", DESCR: "ABC Power Supply"
PID: C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9
NAME: "TenGigabitEthernet1/1/1", DESCR: "SFP-10GBase-SR"
PID: SFP-10G-SR , VID: V03 , SN: SPC1519005V
NAME: "2", DESCR: "WS-C3210X-48"
PID: WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P
NAME: "Switch 2 - Power Supply 1", DESCR: "BCA Power Supply"
PID: C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R
NAME: "TenGigabitEthernet2/1/1", DESCR: "SFP-10GBase-LR"
PID: SFP-10G-LR , VID: V02 , SN: ONT182746GZ
NAME: "1", DESCR: "WS-C1010XR-48FPS-I"
PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3
NAME: "Switch 1 - Power Supply 1", DESCR: "LLL Power Supply"
PID: PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK
NAME: "Switch 1 - FlexStackPlus Module", DESCR: "Stacking Module"
PID: C1010X-STACK , VID: V02 , SN: FD232323XXZ
NAME: "GigabitEthernet1/0/49", DESCR: "1000BaseSX SFP"
PID: GLC-SX-MMD , VID: V01 , SN: ACW102938VS
'''}
golden_parsed_output_4 = {
'slot': {
'1': {
'rp': {
'WS-C0123X-45T-S': {
'descr': 'WS-C8888X-88',
'name': '1',
'pid': 'WS-C0123X-45T-S',
'sn': 'FDO123R12W',
'subslot': {
'1': {
'C3KX-PWR-350WAC': {
'descr': 'ABC Power Supply',
'name': 'Switch 1 - Power Supply 1',
'pid': 'C3KX-PWR-350WAC',
'sn': 'DTN1504L0E9',
'vid': 'V01D ',
},
},
'1/1/1': {
'SFP-10G-SR': {
'descr': 'SFP-10GBase-SR',
'name': 'TenGigabitEthernet1/1/1',
'pid': 'SFP-10G-SR',
'sn': 'SPC1519005V',
'vid': 'V03 ',
},
},
},
'vid': 'V00 ',
},
'WS-C1010XR-48FPS-I': {
'descr': 'WS-C1010XR-48FPS-I',
'name': '1',
'pid': 'WS-C1010XR-48FPS-I',
'sn': 'FD2043B0K3',
'subslot': {
'1': {
'C1010X-STACK': {
'descr': 'Stacking Module',
'name': 'Switch 1 - FlexStackPlus Module',
'pid': 'C1010X-STACK',
'sn': 'FD232323XXZ',
'vid': 'V02 ',
},
'PWR-C2-2929WAC': {
'descr': 'LLL Power Supply',
'name': 'Switch 1 - Power Supply 1',
'pid': 'PWR-C2-2929WAC',
'sn': 'LIT03728KKK',
'vid': 'V02L ',
},
},
'1/0/49': {
'GLC-SX-MMD': {
'descr': '1000BaseSX SFP',
'name': 'GigabitEthernet1/0/49',
'pid': 'GLC-SX-MMD',
'sn': 'ACW102938VS',
'vid': 'V01 ',
},
},
},
'vid': 'V05 ',
},
},
},
'2': {
'rp': {
'WS-C3210X-48T-S': {
'descr': 'WS-C3210X-48',
'name': '2',
'pid': 'WS-C3210X-48T-S',
'sn': 'FD5678Z90P',
'subslot': {
'2': {
'C3KX-PWR-007CBA': {
'descr': 'BCA Power Supply',
'name': 'Switch 2 - Power Supply 1',
'pid': 'C3KX-PWR-007CBA',
'sn': 'LTP13579L3R',
'vid': 'V01L ',
},
},
'2/1/1': {
'SFP-10G-LR': {
'descr': 'SFP-10GBase-LR',
'name': 'TenGigabitEthernet2/1/1',
'pid': 'SFP-10G-LR',
'sn': 'ONT182746GZ',
'vid': 'V02 ',
},
},
},
'vid': 'V02 ',
},
},
},
},
}
golden_output_5 = {'execute.return_value': '''
best-c3945-IOS3#show inventory
NAME: "CISCO3945-CHASSIS", DESCR: "CISCO3945-CHASSIS"
PID: CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8
NAME: "Cisco Services Performance Engine 150 for Cisco 3900 ISR on Slot 0", DESCR: "Cisco Services Performance Engine 150 for Cisco 3900 ISR"
PID: C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6
NAME: "Two-Port Fast Ethernet High Speed WAN Interface Card on Slot 0 SubSlot 3", DESCR: "Two-Port Fast Ethernet High Speed WAN Interface Card"
PID: HWIC-2FE , VID: V02 , SN: FOC16062824
NAME: "C3900 AC Power Supply 1", DESCR: "C3900 AC Power Supply 1"
PID: PWR-3900-AC , VID: V03 , SN: QCS1604P0BT
'''}
golden_parsed_output_5 = {
'main': {
'chassis': {
'CISCO3945-CHASSIS': {
'descr': 'CISCO3945-CHASSIS',
'name': 'CISCO3945-CHASSIS',
'pid': 'CISCO3945-CHASSIS',
'sn': 'FGL161010K8',
'vid': 'V05 ',
},
},
},
'slot': {
'0': {
'rp': {
'C3900-SPE150/K9': {
'descr': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR',
'name': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR on Slot 0',
'pid': 'C3900-SPE150/K9',
'sn': 'FOC16050QP6',
'subslot': {
'3': {
'HWIC-2FE': {
'descr': 'Two-Port Fast Ethernet High Speed WAN Interface Card',
'name': 'Two-Port Fast Ethernet High Speed WAN Interface Card on Slot 0 SubSlot 3',
'pid': 'HWIC-2FE',
'sn': 'FOC16062824',
'vid': 'V02 ',
},
},
},
'vid': 'V05 ',
},
},
},
'C3900 AC Power Supply 1': {
'other': {
'C3900 AC Power Supply 1': {
'descr': 'C3900 AC Power Supply 1',
'name': 'C3900 AC Power Supply 1',
'pid': 'PWR-3900-AC',
'sn': 'QCS1604P0BT',
'vid': 'V03 ',
},
},
},
},
}
golden_output_6 = {'execute.return_value': '''
NAME: "1", DESCR: "SM-ES2-16-P"
PID: SM-ES2-16-P , VID: , SN: FOC09876NP3
'''}
golden_parsed_output_6 = {
'slot': {
'1': {
'lc': {
'SM-ES2-16-P': {
'descr': 'SM-ES2-16-P',
'name': '1',
'pid': 'SM-ES2-16-P',
'sn': 'FOC09876NP3',
'vid': '',
},
},
},
},
}
golden_output_7 = {'execute.return_value': '''
NAME: "2821 chassis", DESCR: "2821 chassis"
PID: CISCO2821 , VID: V07 , SN: FTX1234AMWT
NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1"
PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D
NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1"
PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E
NAME: "Virtual Private Network (VPN) Module on Slot 0", DESCR: "Encryption AIM Element"
PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E
'''}
golden_parsed_output_7 = {
'main': {
'chassis': {
'CISCO2821': {
'descr': '2821 chassis',
'name': '2821 chassis',
'pid': 'CISCO2821',
'sn': 'FTX1234AMWT',
'vid': 'V07 ',
},
},
},
'slot': {
'0': {
'other': {
'AIM-VPN/SSL-2': {
'descr': 'Encryption AIM Element',
'name': 'Virtual Private Network (VPN) Module on Slot 0',
'pid': 'AIM-VPN/SSL-2',
'sn': 'FOC2837465E',
'vid': 'V01',
'subslot': {
'0': {
'VWIC2-2MFT-T1/E1': {
'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1',
'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0',
'pid': 'VWIC2-2MFT-T1/E1',
'sn': 'FOC98675U0D',
'vid': 'V01 ',
},
},
'1': {
'VWIC2-2MFT-T1/E1': {
'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1',
'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1',
'pid': 'VWIC2-2MFT-T1/E1',
'sn': 'FOC98675W3E',
'vid': 'V01 ',
},
},
},
},
},
},
},
}
golden_output_8 = {'execute.return_value': '''
NAME: "3825 chassis", DESCR: "3825 chassis"
PID: CISCO3825 , VID: V05 , SN: FTX7908A3RQ
NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1"
PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F
NAME: "Wan Interface Card BRI U (2091, 3086) on Slot 0 SubSlot 1", DESCR: "Wan Interface Card BRI U (2091, 3086)"
PID: WIC-1B-U-V2 , VID: V01, SN: 10293847
NAME: "PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot 4", DESCR: "PVDMII DSP SIMM with four DSPs"
PID: PVDM2-64 , VID: V01 , SN: FOC63358WSI
NAME: "High Density Voice Module - 8FXS/DID on Slot 1", DESCR: "High Density Voice Module - 8FXS/DID"
PID: EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8
NAME: "Six port FXO voice interface daughtercard on Slot 1 SubSlot 1", DESCR: "Six port FXO voice interface daughtercard"
PID: EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB
'''}
golden_parsed_output_8 = {
'main': {
'chassis': {
'CISCO3825': {
'descr': '3825 chassis',
'name': '3825 chassis',
'pid': 'CISCO3825',
'sn': 'FTX7908A3RQ',
'vid': 'V05 ',
},
},
},
'slot': {
'0': {
'rp': {
'CISCO3825': {
'subslot': {
'0': {
'VWIC2-2MFT-T1/E1': {
'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1',
'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0',
'pid': 'VWIC2-2MFT-T1/E1',
'sn': 'FOC65428K9F',
'vid': 'V01 ',
},
},
'1': {
'WIC-1B-U-V2': {
'descr': 'Wan Interface Card BRI U (2091, 3086)',
'name': 'Wan Interface Card BRI U (2091, 3086) on Slot 0 SubSlot 1',
'pid': 'WIC-1B-U-V2',
'sn': '10293847',
'vid': 'V01',
},
},
'4': {
'PVDM2-64': {
'descr': 'PVDMII DSP SIMM with four DSPs',
'name': 'PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot 4',
'pid': 'PVDM2-64',
'sn': 'FOC63358WSI',
'vid': 'V01 ',
},
},
},
},
},
},
'1': {
'other': {
'EVM-HD-8FXS/DID': {
'descr': 'High Density Voice Module - 8FXS/DID',
'name': 'High Density Voice Module - 8FXS/DID on Slot 1',
'pid': 'EVM-HD-8FXS/DID',
'sn': 'FOC65798TG8',
'subslot': {
'1': {
'EM-HDA-6FXO': {
'descr': 'Six port FXO voice interface daughtercard',
'name': 'Six port FXO voice interface daughtercard on Slot 1 SubSlot 1',
'pid': 'EM-HDA-6FXO',
'sn': 'FOC85389QXB',
'vid': 'V03 ',
},
},
},
'vid': 'V04 ',
},
},
},
},
}
golden_output_9 = {'execute.return_value': '''
NAME: "3845 chassis", DESCR: "3845 chassis"
PID: CISCO3845 , VID: V05 , SN: FTX6666ARJ9
NAME: "c3845 Motherboard with Gigabit Ethernet on Slot 0", DESCR: "c3845 Motherboard with Gigabit Ethernet"
PID: CISCO3845-MB , VID: V09 , SN: FOC729346GQ
NAME: "Virtual Private Network (VPN) Module on Slot 0", DESCR: "Encryption AIM Element"
PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO
NAME: "Clear/Subrate T3/E3 WAN on Slot 1", DESCR: "Clear/Subrate T3/E3 WAN"
PID: NM-1T3/E3= , VID: V01 , SN: FOC28476ADM
NAME: "16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2", DESCR: "16 Port 10BaseT/100BaseTX EtherSwitch"
PID: NM-16ESW , VID: V01 , SN: FOC135464KO
NAME: "Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0", DESCR: "Gigabit(1000BaseT) module for EtherSwitch NM"
PID: GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN
'''}
golden_parsed_output_9 = {
'main': {
'chassis': {
'CISCO3845': {
'descr': '3845 chassis',
'name': '3845 chassis',
'pid': 'CISCO3845',
'sn': 'FTX6666ARJ9',
'vid': 'V05 ',
},
},
},
'slot': {
'0': {
'lc': {
'CISCO3845-MB': {
'descr': 'c3845 Motherboard with Gigabit Ethernet',
'name': 'c3845 Motherboard with Gigabit Ethernet on Slot 0',
'pid': 'CISCO3845-MB',
'sn': 'FOC729346GQ',
'vid': 'V09 ',
},
},
'other': {
'AIM-VPN/SSL-3': {
'descr': 'Encryption AIM Element',
'name': 'Virtual Private Network (VPN) Module on Slot 0',
'pid': 'AIM-VPN/SSL-3',
'sn': 'FOC758693YO',
'vid': 'V01',
},
},
},
'1': {
'lc': {
'NM-1T3/E3=': {
'descr': 'Clear/Subrate T3/E3 WAN',
'name': 'Clear/Subrate T3/E3 WAN on Slot 1',
'pid': 'NM-1T3/E3=',
'sn': 'FOC28476ADM',
'vid': 'V01 ',
},
},
},
'16': {
'lc': {
'NM-16ESW': {
'descr': '16 Port 10BaseT/100BaseTX EtherSwitch',
'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2',
'pid': 'NM-16ESW',
'sn': 'FOC135464KO',
'subslot': {
'0': {
'GE-DCARD-ESW': {
'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM',
'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0',
'pid': 'GE-DCARD-ESW',
'sn': 'FOC91864MNN',
'vid': 'V01 ',
},
},
},
'vid': 'V01 ',
},
},
},
},
}
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
inventory_obj = ShowInventory(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = inventory_obj.parse()
def test_golden_iosv(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_iosv)
inventory_obj = ShowInventory(device=self.dev_iosv)
parsed_output = inventory_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_iosv)
def test_golden_output_2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_2)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_golden_output_3(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_3)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
def test_golden_output_4(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_4)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_4)
def test_golden_output_5(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_5)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_5)
def test_golden_output_6(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_6)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_6)
def test_golden_output_7(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_7)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_7)
def test_golden_output_8(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_8)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_8)
def test_golden_output_9(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_9)
obj = ShowInventory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_9)
class test_show_bootvar(unittest.TestCase):
dev = Device(name='ios')
dev_iosv = Device(name='iosv')
empty_output = {'execute.return_value': ''}
golden_parsed_output_iosv = {
"active": {
"boot_variable": "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12",
"configuration_register": "0x2012"
},
"next_reload_boot_variable": "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12"
}
golden_output_iosv = {'execute.return_value': '''\
BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12;
CONFIG_FILE variable =
BOOTLDR variable =
Configuration register is 0x2012
Standby not ready to show bootvar
'''}
def test_empty(self):
self.dev = Mock(**self.empty_output)
platform_obj = ShowBootvar(device=self.dev)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev_iosv = Mock(**self.golden_output_iosv)
platform_obj = ShowBootvar(device=self.dev_iosv)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_iosv)
class test_show_processes_cpu_sorted_CPU(unittest.TestCase):
dev = Device(name='c3850')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"five_sec_cpu_total": 13,
"five_min_cpu": 15,
"one_min_cpu": 23,
"five_sec_cpu_interrupts": 0
}
golden_output = {'execute.return_value': '''\
show processes cpu sorted 5min | inc CPU
CPU utilization for five seconds: 13%/0%; one minute: 23%; five minutes: 15%
'''}
golden_parsed_output_1 = {
"sort": {
1: {
"invoked": 3321960,
"usecs": 109,
"tty": 0,
"one_min_cpu": 0.54,
"process": "PIM Process",
"five_min_cpu": 0.48,
"runtime": 362874,
"pid": 368,
"five_sec_cpu": 1.03
},
2: {
"invoked": 1466728,
"usecs": 2442,
"tty": 0,
"one_min_cpu": 0.87,
"process": "IOSv e1000",
"five_min_cpu": 2.77,
"runtime": 3582279,
"pid": 84,
"five_sec_cpu": 0.55
},
3: {
"invoked": 116196,
"usecs": 976,
"tty": 0,
"one_min_cpu": 0.07,
"process": "OSPF-1 Hello",
"five_min_cpu": 0.07,
"runtime": 113457,
"pid": 412,
"five_sec_cpu": 0.15
}
},
"five_sec_cpu_total": 4,
"five_min_cpu": 9,
"one_min_cpu": 4,
"nonzero_cpu_processes": [
"PIM Process",
"IOSv e1000",
"OSPF-1 Hello"
],
"five_sec_cpu_interrupts": 0
}
golden_output_1 = {'execute.return_value': '''
CPU utilization for five seconds: 4%/0%; one minute: 4%; five minutes: 9%
PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY Process
368 362874 3321960 109 1.03% 0.54% 0.48% 0 PIM Process
84 3582279 1466728 2442 0.55% 0.87% 2.77% 0 IOSv e1000
412 113457 116196 976 0.15% 0.07% 0.07% 0 OSPF-1 Hello
'''}
def test_empty(self):
self.dev = Mock(**self.empty_output)
obj = ShowProcessesCpuSorted(device=self.dev)
with self.assertRaises(SchemaEmptyParserError):
parsered_output = obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev = Mock(**self.golden_output)
obj = ShowProcessesCpuSorted(device=self.dev)
parsed_output = obj.parse(key_word='CPU', sort_time='5min')
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_1(self):
self.maxDiff = None
self.dev = Mock(**self.golden_output_1)
obj = ShowProcessesCpuSorted(device=self.dev)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
class test_show_processes_cpu(test_show_processes_cpu_iosxe):
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowProcessesCpu(device=self.device)
parsed_output = obj.parse()
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_1)
obj = ShowProcessesCpu(device=self.device)
parsed_output = obj.parse(key_word='process')
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowProcessesCpu(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_version_rp(test_show_version_rp_iosxe):
def test_golden_active(self):
self.device = Mock(**self.golden_output_active)
obj = ShowVersionRp(device=self.device)
parsed_output = obj.parse(rp='active', status='running')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_active)
def test_golden_standby(self):
self.device = Mock(**self.golden_output_standby)
obj = ShowVersionRp(device=self.device)
parsed_output = obj.parse(rp='standby', status='running')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_standby)
def test_golden_standby_offline(self):
self.device = Mock(**self.golden_output_standby_offline)
obj = ShowVersionRp(device=self.device)
self.maxDiff = None
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(rp='standby', status='running')
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowVersionRp(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_platform(test_show_platform_iosxe):
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
platform_obj = ShowPlatform(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_semi_empty(self):
self.dev2 = Mock(**self.semi_empty_output)
platform_obj = ShowPlatform(device=self.dev2)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden_c3850(self):
self.maxDiff = None
self.dev_c3850 = Mock(**self.golden_output_c3850)
platform_obj = ShowPlatform(device=self.dev_c3850)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_c3850)
def test_golden_asr1k(self):
self.maxDiff = None
self.dev_asr1k = Mock(**self.golden_output_asr1k)
platform_obj = ShowPlatform(device=self.dev_asr1k)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_asr1k)
class test_show_platform_power(test_show_platform_power_iosxe):
def test_empty(self):
self.device = Mock(**self.empty_output)
platform_obj = ShowPlatformPower(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
platform_obj = ShowPlatformPower(device=self.device)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe):
def test_empty(self):
self.device = Mock(**self.empty_output)
platform_obj = ShowProcessesCpuHistory(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
platform_obj = ShowProcessesCpuHistory(device=self.device)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe):
def test_golden(self):
self.device = Mock(**self.golden_output)
cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device)
parsed_output = cpu_platform_obj.parse()
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = cpu_platform_obj.parse()
class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe):
def test_empty(self):
self.dev = Mock(**self.empty_output)
obj = ShowPlatformSoftwareStatusControl(device=self.dev)
with self.assertRaises(SchemaEmptyParserError):
parsered_output = obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev = Mock(**self.golden_output)
obj = ShowPlatformSoftwareStatusControl(device=self.dev)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe):
def test_empty(self):
self.dev = Mock(**self.empty_output)
obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev)
with self.assertRaises(SchemaEmptyParserError):
parsered_output = obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev = Mock(**self.golden_output)
obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class test_show_platform_hardware(test_show_platform_hardware_iosxe):
def test_golden_active(self):
self.device = Mock(**self.golden_output_active)
obj = ShowPlatformHardware(device=self.device)
parsed_output = obj.parse()
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_active)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowPlatformHardware(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe):
def test_golden_port(self):
self.device = Mock(**self.golden_output_port)
obj = ShowPlatformHardwarePlim(device=self.device)
parsed_output = obj.parse(port='0/0/0')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_port)
def test_golden_slot(self):
self.device = Mock(**self.golden_output_slot)
obj = ShowPlatformHardwarePlim(device=self.device)
parsed_output = obj.parse(slot='0')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_slot)
def test_golden_subslot(self):
self.device = Mock(**self.golden_output_subslot)
obj = ShowPlatformHardwarePlim(device=self.device)
parsed_output = obj.parse(subslot='0/1')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_subslot)
def test_golden_slot_internal(self):
self.device = Mock(**self.golden_output_slot_internal)
obj = ShowPlatformHardwarePlim(device=self.device)
parsed_output = obj.parse(slot='0', internal=True)
self.maxDiff = None
self.assertEqual(
parsed_output, self.golden_parsed_output_slot_internal)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowPlatformHardwarePlim(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(port='0/0/0')
class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe):
def test_golden_active_opm(self):
self.device = Mock(**self.golden_output_active_opm)
obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device)
parsed_output = obj.parse(status='active', slot='0')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_active_opm)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(status='active', slot='0')
class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe):
def test_golden_active_ipm(self):
self.device = Mock(**self.golden_output_active_ipm)
obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device)
parsed_output = obj.parse(status='active', slot='0')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(status='active', slot='0')
class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe):
def test_golden_serdes(self):
self.device = Mock(**self.golden_output_serdes)
obj = ShowPlatformHardwareSerdes(device=self.device)
parsed_output = obj.parse(slot='0')
self.maxDiff = None
self.assertEqual(parsed_output, self.golden_parsed_output_serdes)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowPlatformHardwareSerdes(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(slot='0')
class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe):
def test_golden(self):
self.device = Mock(**self.golden_output_serdes_internal)
obj = ShowPlatformHardwareSerdesInternal(device=self.device)
parsed_output = obj.parse(slot='0')
self.maxDiff = None
self.assertEqual(
parsed_output, self.golden_parsed_output_serdes_internal)
def test_empty(self):
self.device1 = Mock(**self.empty_output)
obj = ShowPlatformHardwareSerdesInternal(device=self.device1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(slot='0')
class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe):
def test_empty(self):
self.device = Mock(**self.empty_output)
platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll(
device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse(
status='active', slot='0', iotype='ipm')
def test_golden_active_ipm(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_active_ipm)
platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll(
device=self.device)
parsed_output = platform_obj.parse(
status='active', slot='0', iotype='ipm')
self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm)
def test_golden_active_opm(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_active_opm)
platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll(
device=self.device)
parsed_output = platform_obj.parse(
status='active', slot='0', iotype='opm')
self.assertEqual(parsed_output, self.golden_parsed_output_active_opm)
class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe):
def test_empty(self):
self.device = Mock(**self.empty_output)
platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics(
device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse(
status='active', interface='gigabitEthernet 0/0/0')
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics(
device=self.device)
parsed_output = platform_obj.parse(
status='active', interface='gigabitEthernet 0/0/0')
self.assertEqual(parsed_output, self.golden_parsed_output)
class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe):
def test_empty(self):
self.device = Mock(**self.empty_output)
platform_obj = ShowPlatformHardwareQfpStatisticsDrop(
device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse(status='active')
def test_golden_active(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_active)
platform_obj = ShowPlatformHardwareQfpStatisticsDrop(
device=self.device)
parsed_output = platform_obj.parse(status='active')
self.assertEqual(parsed_output, self.golden_parsed_output_active)
class test_show_env(test_show_env_iosxe):
def test_empty(self):
self.dev = Mock(**self.empty_output)
obj = ShowEnvironment(device=self.dev)
with self.assertRaises(SchemaEmptyParserError):
parsered_output = obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev = Mock(**self.golden_output)
obj = ShowEnvironment(device=self.dev)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class test_show_module(test_show_module_iosxe):
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
platform_obj = ShowModule(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev_c3850 = Mock(**self.golden_output_c3850)
platform_obj = ShowModule(device=self.dev_c3850)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output_c3850)
class test_show_switch(test_show_switch_iosxe):
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
platform_obj = ShowSwitch(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev_c3850 = Mock(**self.golden_output_c3850)
platform_obj = ShowSwitch(device=self.dev_c3850)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output_c3850)
class test_show_switch_detail(test_show_switch_detail_iosxe):
def test_empty(self):
self.dev1 = Mock(**self.empty_output)
platform_obj = ShowSwitchDetail(device=self.dev1)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = platform_obj.parse()
def test_golden(self):
self.maxDiff = None
self.dev_c3850 = Mock(**self.golden_output_c3850)
platform_obj = ShowSwitchDetail(device=self.dev_c3850)
parsed_output = platform_obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output_c3850)
if __name__ == '__main__':
unittest.main()
| 41.307956
| 153
| 0.510582
| 9,787
| 96,578
| 4.897415
| 0.092061
| 0.046317
| 0.024035
| 0.018631
| 0.736601
| 0.685652
| 0.636623
| 0.588513
| 0.549874
| 0.503119
| 0
| 0.08707
| 0.381857
| 96,578
| 2,337
| 154
| 41.325631
| 0.715808
| 0.000166
| 0
| 0.40197
| 0
| 0.031527
| 0.425099
| 0.019666
| 0
| 0
| 0.00117
| 0
| 0.038424
| 1
| 0.038424
| false
| 0.000985
| 0.010345
| 0
| 0.090148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6c1e66e6a96e129aa7826692f68edb943e0fae
| 9,245
|
py
|
Python
|
Analytics/resources/themes/test_subthemes.py
|
thanosbnt/SharingCitiesDashboard
|
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
|
[
"Apache-2.0"
] | 4
|
2018-11-21T14:42:18.000Z
|
2020-05-11T10:52:59.000Z
|
Analytics/resources/themes/test_subthemes.py
|
thanosbnt/SharingCitiesDashboard
|
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
|
[
"Apache-2.0"
] | 60
|
2018-11-21T15:11:59.000Z
|
2019-12-02T10:46:44.000Z
|
Analytics/resources/themes/test_subthemes.py
|
thanosbnt/SharingCitiesDashboard
|
5d123691d1f25d0b85e20e4e8293266bf23c9f8a
|
[
"Apache-2.0"
] | 7
|
2018-11-21T14:42:44.000Z
|
2019-11-28T16:24:14.000Z
|
import unittest
from http import HTTPStatus
from unittest import TestCase
import bcrypt
from flask.ctx import AppContext
from flask.testing import FlaskClient
from app import create_app
from models.theme import Theme, SubTheme
from models.users import Users
class TestSubTemes(TestCase):
"""
Unittest for the creation, renaming and deleting of Themes
"""
def setUp(self):
"""
Setup a FlaskClient for testing, creates an admin user and creates the authorization header for requests to
the Flask Client and a dummy theme
"""
self.client, self.app_context = self.create_test_client()
self.user = self.create_admin_user()
self.auth_header = self.get_auth_header()
self.theme = Theme.get_by_name("_test_add_Subtheme_")
if not self.theme:
self.theme = Theme("_test_add_Subtheme_")
self.theme.save()
self.theme.commit()
self.theme = Theme.get_by_name("_test_add_Subtheme_")
self.subtheme = self.create_dummy_subtheme()
def create_test_client(self) -> (FlaskClient, AppContext):
"""
Create flask testing client
:return: FlaskClient for tests and AppContext
"""
test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True)
testing_client = test_app.test_client()
test_app_context = test_app.app_context()
test_app_context.push()
return testing_client, test_app_context
def create_dummy_subtheme(self) -> SubTheme:
"""
Create SubTheme for tests
:return: SubTheme for tests
"""
subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')
if not subtheme:
subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_')
subtheme.save()
subtheme.commit()
subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')
return subtheme
def create_admin_user(self) -> Users:
"""
Create Admin user for tests
:return: an admin user for tests
"""
password_hash = bcrypt.hashpw("wfnbqk".encode("utf-8"), bcrypt.gensalt())
user = Users.find_by_email("admin@FCC.com")
if not user:
user = Users("Admin", "admin@FCC.com", password_hash.decode("utf8"), True, True)
try:
user.save()
user.commit()
except Exception as e:
pass
return user
def get_auth_header(self) -> {str: str}:
"""
Create an Authorization header for test
:return: An authorization header
"""
response_login = self.client.post('/login', data=dict(email=self.user.email, password="wfnbqk", remember=True),
follow_redirects=True)
response_login_json = response_login.get_json()
return {'Authorization': 'Bearer {}'.format(response_login_json["access_token"])}
def test_add_subtheme(self):
"""
Create a new SubTheme and check the client response status code for http status 200 (OK)
Check JSON response data for the expected message 'New theme created' and
Theme name
"""
response = self.client.post('/admin/themes/add_subtheme',
json={"theme_id": self.theme.id, "subtheme": "_TEST_SUB_THEME_2"},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
json_response = response.get_json()
self.assertEqual(json_response["message"], "sub theme created")
self.assertEqual(json_response["theme_id"], self.theme.id)
self.assertEqual(json_response["subtheme"], "_TEST_SUB_THEME_2")
def test_rename_subtheme_theme_id(self):
"""
Rename a SubTheme by theme_id and check the clients response status code for http status 200 (OK)
Check response data for the expected message 'Subtheme renamed' and the
Subtheme name has been changed
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
current_name = self.subtheme.name
response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": self.subtheme.t_id,
"current_name": current_name,
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
response = response.get_json()
self.assertEqual(response["id"], self.subtheme.id)
self.assertEqual(response["message"], "Subtheme renamed")
self.assertEqual(response["old_name"], current_name)
self.assertEqual(response["new_name"], "new_name_not_1")
def test_rename_subtheme_id(self):
"""
Rename a SubTheme by id and check the clients response status code for http status 200 (OK)
Check response data for the expected message 'Subtheme renamed' and the
Subtheme name has been changed
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
current_name = self.subtheme.name
response = self.client.post('/admin/themes/rename_subtheme', json={"id": self.subtheme.id,
"current_name": current_name,
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
response = response.get_json()
self.assertEqual(response["id"], self.subtheme.id)
self.assertEqual(response["message"], "Subtheme renamed")
self.assertEqual(response["old_name"], current_name)
self.assertEqual(response["new_name"], "new_name_not_1")
def test_rename_non_existant_subtheme(self):
"""
Rename a SubTheme that does not exist and check the clients response status code for http status 404 (OK)
"""
response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": -1,
"current_name": "a3d4f5g6h7j8k0",
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
def test_delete_non_exsitant_subtheme(self):
"""
Delete a SubTheme that does not exist and check the client response status code for http status 404
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme',
json={"name": "weA_gfj24fhurtyui", "theme_id": -1},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
def test_delete_subtheme_by_id(self):
"""
Delete a SubTheme by id and check the client response status code for http status 204 (NO_CONTENT)
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme', json={"id": self.subtheme.id},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)
def test_delete_subtheme_by_theme_id_and_name(self):
"""
Delete a SubTheme by theme_id and name: check the client response status code for http status 204 (NO_CONTENT)
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme',
json={"theme_id": self.subtheme.t_id, "name": self.subtheme.name},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)
def tearDown(self):
""" Handle the cleanup after tests"""
self.subtheme = SubTheme.get_by_name("new_name_not_1")
if not self.subtheme:
self.subtheme = SubTheme.get_by_name("_TEST_SUB_THEME_")
if self.subtheme:
self.subtheme.delete()
self.subtheme.commit()
test_sub = SubTheme.get_by_name("_TEST_SUB_THEME_2")
if test_sub:
test_sub.delete()
test_sub.commit()
if self.theme:
self.theme.delete()
self.theme.commit()
self.client.post('/logout', headers=self.auth_header)
if self.user:
self.user.delete()
self.user.commit()
self.app_context.pop()
if __name__ == '__main__':
unittest.main()
| 42.800926
| 119
| 0.589508
| 1,038
| 9,245
| 5.026012
| 0.133911
| 0.062105
| 0.06613
| 0.027602
| 0.575618
| 0.548591
| 0.502588
| 0.477477
| 0.463676
| 0.422657
| 0
| 0.006824
| 0.318442
| 9,245
| 215
| 120
| 43
| 0.82114
| 0.160087
| 0
| 0.377778
| 0
| 0
| 0.115056
| 0.027104
| 0
| 0
| 0
| 0
| 0.133333
| 1
| 0.096296
| false
| 0.02963
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6ced72ed9bc083484bd7a8ca32221ff538be8a
| 12,541
|
py
|
Python
|
python2.7libs/hammer_tools/content_browser.py
|
anvdev/Hammer-Tools
|
0211ec837da6754e537c98624ecd07c23abab28e
|
[
"Apache-2.0"
] | 19
|
2019-10-09T13:48:11.000Z
|
2021-06-14T01:25:23.000Z
|
python2.7libs/hammer_tools/content_browser.py
|
anvdev/Hammer-Tools
|
0211ec837da6754e537c98624ecd07c23abab28e
|
[
"Apache-2.0"
] | 219
|
2019-10-08T14:44:48.000Z
|
2021-06-19T06:27:46.000Z
|
python2.7libs/hammer_tools/content_browser.py
|
anvdev/Hammer-Tools
|
0211ec837da6754e537c98624ecd07c23abab28e
|
[
"Apache-2.0"
] | 3
|
2020-02-14T06:18:06.000Z
|
2020-11-25T20:47:06.000Z
|
from __future__ import print_function
try:
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
except ImportError:
from PySide2.QtWidgets import *
from PySide2.QtGui import *
from PySide2.QtCore import *
import hou
from hammer_tools.utils import createAction
def isRevertToDefaultEvent(event):
return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton
class Slider(QSlider):
def __init__(self, orientation=Qt.Horizontal, parent=None):
super(Slider, self).__init__(orientation, parent)
self.defaultValue = 0
self.valueLadderMode = False
def revertToDefault(self):
self.setValue(self.defaultValue)
def setDefaultValue(self, value, reset=True):
self.defaultValue = value
if reset:
self.revertToDefault()
def mousePressEvent(self, event):
if False: # Type hint
event = QMouseEvent
if event.button() == Qt.MiddleButton:
return
elif event.button() == Qt.LeftButton:
event = QMouseEvent(QEvent.MouseButtonPress, event.pos(),
Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier)
super(Slider, self).mousePressEvent(event)
def mouseMoveEvent(self, event):
if False: # Type hint
event = QMouseEvent
if not self.valueLadderMode and event.buttons() == Qt.MiddleButton:
try:
hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int)
except hou.OperationFailed:
return
else:
self.valueLadderMode = True
elif self.valueLadderMode:
hou.ui.updateValueLadder(event.globalX(), event.globalY(),
bool(event.modifiers() & Qt.AltModifier),
bool(event.modifiers() & Qt.ShiftModifier))
else:
super(Slider, self).mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
if False: # Type hint
event = QMouseEvent
if self.valueLadderMode and event.button() == Qt.MiddleButton:
hou.ui.closeValueLadder()
self.valueLadderMode = False
elif isRevertToDefaultEvent(event):
self.revertToDefault()
else:
super(Slider, self).mouseReleaseEvent(event)
class SearchField(QComboBox):
def __init__(self, parent=None):
super(SearchField, self).__init__(parent)
self.setEditable(True)
edit = self.lineEdit()
edit.setPlaceholderText('Search...')
edit.installEventFilter(self)
edit.setFont(QFont('Segoe UI'))
self.setFixedHeight(26)
comp = self.completer()
comp.setCompletionMode(QCompleter.PopupCompletion)
comp.setFilterMode(Qt.MatchContains)
comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel)
comp.setMaxVisibleItems(5)
popup = comp.popup()
popup.setStyleSheet(hou.qt.styleSheet())
def mouseReleaseEvent(self, event):
if False: # Type hint
event = QMouseEvent
if isRevertToDefaultEvent(event):
self.clearEditText()
def eventFilter(self, watched, event):
if False: # Type hint
watched = QObject
event = QEvent
if watched == self.lineEdit():
if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event):
self.clearEditText()
event.accept()
return True
return False
def keyPressEvent(self, event):
if False: # Type hint
event = QKeyEvent
key = event.key()
mod = event.modifiers()
if mod == Qt.NoModifier and key == Qt.Key_Escape:
self.clearEditText()
else:
super(SearchField, self).keyPressEvent(event)
def hidePopup(self):
super(SearchField, self).hidePopup()
self.lineEdit().setFocus()
link_or_state_icon = 'BUTTONS_link'
embedded_icon = 'BUTTONS_pinned'
class BrowserMode(QStandardItemModel):
def __init__(self):
super(BrowserMode, self).__init__()
class BrowserTreeView(QTreeView):
def __init__(self, parent=None):
super(BrowserTreeView, self).__init__(parent)
self.setAlternatingRowColors(True)
class BrowserTableView(QListView):
def __init__(self, parent=None):
super(BrowserTableView, self).__init__(parent)
self.setViewMode(QListView.IconMode)
self.setResizeMode(QListView.Adjust)
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel)
self.setIconSize(QSize(120, 90))
self.setUniformItemSizes(True)
self.setContextMenuPolicy(Qt.CustomContextMenu)
class ContentBrowser(QWidget):
def __init__(self, parent=None):
super(ContentBrowser, self).__init__(parent)
self.setWindowTitle('Content Browser')
self.setProperty('houdiniStyle', True)
topLayout = QHBoxLayout()
topLayout.setContentsMargins(4, 4, 4, 2)
topLayout.setSpacing(2)
self.refreshButton = QPushButton()
self.refreshButton.setFixedSize(26, 26)
self.refreshButton.setToolTip('Update\tF5')
self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18))
self.refreshButton.setIconSize(QSize(18, 18))
topLayout.addWidget(self.refreshButton)
sep = hou.qt.Separator()
if False: # Type hint
sep = QFrame
sep.setFixedWidth(2)
sep.setFrameShape(QFrame.VLine)
topLayout.addWidget(sep)
viewModeButtonGroup = QButtonGroup(self)
viewModeButtonGroup.setExclusive(True)
self.treeViewButton = QPushButton()
self.treeViewButton.setFixedSize(26, 26)
self.treeViewButton.setToolTip('Tree View\t\tCtrl+1')
self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18))
self.treeViewButton.setIconSize(QSize(18, 18))
self.treeViewButton.setCheckable(True)
viewModeButtonGroup.addButton(self.treeViewButton)
topLayout.addWidget(self.treeViewButton)
self.tableViewButton = QPushButton()
self.tableViewButton.setFixedSize(26, 26)
self.tableViewButton.setToolTip('Table View\tCtrl+2')
self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18))
self.tableViewButton.setIconSize(QSize(18, 18))
self.tableViewButton.setCheckable(True)
self.tableViewButton.toggle()
viewModeButtonGroup.addButton(self.tableViewButton)
topLayout.addWidget(self.tableViewButton)
topLayout.addWidget(sep)
self.searchField = SearchField()
self.searchField.setToolTip('Search\tCtrl+F, F3')
topLayout.addWidget(self.searchField)
searchModeButtonGroup = QButtonGroup(self)
searchModeButtonGroup.setExclusive(True)
self.wholeSearchButton = QPushButton()
self.wholeSearchButton.setFixedSize(26, 26)
self.wholeSearchButton.setCheckable(True)
self.wholeSearchButton.setToolTip('Whole word search')
self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18))
self.wholeSearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.wholeSearchButton)
topLayout.addWidget(self.wholeSearchButton)
self.fuzzySearchButton = QPushButton()
self.fuzzySearchButton.setFixedSize(26, 26)
self.fuzzySearchButton.setCheckable(True)
self.fuzzySearchButton.toggle()
self.fuzzySearchButton.setToolTip('Fuzzy search')
self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18))
self.fuzzySearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.fuzzySearchButton)
topLayout.addWidget(self.fuzzySearchButton)
self.patternSearchButton = QPushButton()
self.patternSearchButton.setFixedSize(26, 26)
self.patternSearchButton.setCheckable(True)
self.patternSearchButton.setToolTip('Search by Pattern')
self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18))
self.patternSearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.patternSearchButton)
topLayout.addWidget(self.patternSearchButton)
self.regexSearchButton = QPushButton()
self.regexSearchButton.setFixedSize(26, 26)
self.regexSearchButton.setCheckable(True)
self.regexSearchButton.setToolTip('Search by Regular Expression')
self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18))
self.regexSearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.regexSearchButton)
topLayout.addWidget(self.regexSearchButton)
topLayout.addWidget(sep)
topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\tF1'))
middleLayout = QHBoxLayout()
middleLayout.setContentsMargins(4, 0, 0, 4)
middleLayout.setSpacing(4)
self.viewLayout = QStackedLayout(middleLayout)
model = QFileSystemModel()
model.setRootPath('C:/')
treeView = BrowserTreeView()
treeView.setModel(model)
treeView.setRootIndex(model.index('C:/'))
self.viewLayout.addWidget(treeView)
tableView = BrowserTableView()
tableView.setModel(model)
tableView.setRootIndex(model.index('C:/'))
tableView.setSelectionModel(treeView.selectionModel())
self.viewLayout.addWidget(tableView)
self.viewLayout.setCurrentIndex(1)
self.treeViewButton.clicked.connect(self.switchToTreeView)
self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1'))
self.tableViewButton.clicked.connect(self.switchToTableView)
self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2'))
bottomLayout = QHBoxLayout()
bottomLayout.setContentsMargins(4, 0, 4, 4)
bottomLayout.setSpacing(2)
settingsButton = QPushButton()
settingsButton.setFixedSize(26, 26)
settingsButton.setToolTip('Settings')
settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18))
settingsButton.setIconSize(QSize(18, 18))
bottomLayout.addWidget(settingsButton)
spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored)
bottomLayout.addSpacerItem(spacer)
self.scaleSlider = Slider()
self.scaleSlider.setDefaultValue(50)
self.scaleSlider.setFixedWidth(120)
self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100))
bottomLayout.addWidget(self.scaleSlider)
mainLayout = QVBoxLayout(self)
mainLayout.setContentsMargins(0, 0, 0, 0)
mainLayout.setSpacing(4)
mainLayout.addLayout(topLayout)
mainLayout.addLayout(middleLayout)
mainLayout.addLayout(bottomLayout)
def switchToTreeView(self):
self.viewLayout.setCurrentIndex(0)
self.scaleSlider.hide()
self.treeViewButton.setChecked(True)
def switchToTableView(self):
self.viewLayout.setCurrentIndex(1)
self.scaleSlider.show()
self.tableViewButton.setChecked(True)
def keyPressEvent(self, event):
if False: # Type hint
event = QKeyEvent
key = event.key()
mod = event.modifiers()
if mod == Qt.NoModifier and key == Qt.Key_F5:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_F:
self.searchField.setFocus()
elif mod == Qt.NoModifier and key == Qt.Key_F3:
self.searchField.setFocus()
elif mod == Qt.ControlModifier and key == Qt.Key_Equal:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_Minus:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_1:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_2:
pass
elif mod == Qt.NoModifier and key == Qt.Key_F1:
pass
else:
super(ContentBrowser, self).keyPressEvent(event)
if __name__ == '__main__':
app = QApplication([])
window = ContentBrowser()
window.show()
app.exec_()
| 37.21365
| 106
| 0.660633
| 1,186
| 12,541
| 6.908938
| 0.228499
| 0.007811
| 0.008787
| 0.012082
| 0.16866
| 0.127532
| 0.109226
| 0.082866
| 0.071272
| 0.041982
| 0
| 0.016994
| 0.239853
| 12,541
| 336
| 107
| 37.324405
| 0.842547
| 0.006299
| 0
| 0.197133
| 0
| 0
| 0.033406
| 0.003533
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0681
| false
| 0.021505
| 0.035842
| 0.003584
| 0.143369
| 0.003584
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6cf42a0947849b7e11a5ffae5ba378599d9f7e
| 1,106
|
py
|
Python
|
rt-thread/applications/server/udp_sender.py
|
luhuadong/stm32f769-disco-demo
|
c7fb0d627b02c3f87959f43f1447bc79f62a7099
|
[
"Apache-2.0"
] | null | null | null |
rt-thread/applications/server/udp_sender.py
|
luhuadong/stm32f769-disco-demo
|
c7fb0d627b02c3f87959f43f1447bc79f62a7099
|
[
"Apache-2.0"
] | null | null | null |
rt-thread/applications/server/udp_sender.py
|
luhuadong/stm32f769-disco-demo
|
c7fb0d627b02c3f87959f43f1447bc79f62a7099
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
"""
UDP sender
"""
import socket
import time
import sys
smsg = b'\xaa\x08\xfe\x00\xc9\xe6\x5f\xee'
def main():
ip_port = ('192.168.3.188', 8888)
if len(sys.argv) < 2:
port = 8888
else:
port = int(sys.argv[1])
# 1. ๅๅปบ udp ๅฅๆฅๅญ
udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# 2. ็ปๅฎๆฌๅฐไฟกๆฏ
udp_socket.bind(('', port))
cnt = 100
loop = 4
print("send %d...", cnt*loop)
# 3. ๆฅๆถๅ้็ๆฐๆฎ
while cnt > 0:
#loop = 10
#while loop > 0:
for i in range(0, loop):
udp_socket.sendto(smsg, ip_port)
print('.', end=' ')
#loop = loop -1
#recv_data = udp_socket.recvfrom(1024)
#print(recv_data.decode('gbk'))
#print(recv_data.decode('utf-8'))
#print('.', end=' ')
#data = recv_data.decode('utf-8')
#print('0x%x'%data)
cnt = cnt - 1
time.sleep(0.005)
print("")
print("finished")
# 7. ๅ
ณ้ญๅฅๆฅๅญ
udp_socket.close()
print("close")
if __name__ == '__main__':
main()
| 17.83871
| 65
| 0.513562
| 149
| 1,106
| 3.671141
| 0.516779
| 0.082267
| 0.076782
| 0.06947
| 0.084095
| 0.084095
| 0
| 0
| 0
| 0
| 0
| 0.071809
| 0.320072
| 1,106
| 61
| 66
| 18.131148
| 0.655585
| 0.251356
| 0
| 0
| 0
| 0
| 0.096894
| 0.039752
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.111111
| 0
| 0.148148
| 0.185185
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed6e6d96a4c0121238dbb61b6a4a506e75d9c0bd
| 1,007
|
py
|
Python
|
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
|
Andy-Wilkinson/ChemMLToolk
|
83efc7ea66d2def860a3e04ccd70d77fb689fddc
|
[
"MIT"
] | 1
|
2019-10-30T03:43:24.000Z
|
2019-10-30T03:43:24.000Z
|
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
|
Andy-Wilkinson/ChemMLToolk
|
83efc7ea66d2def860a3e04ccd70d77fb689fddc
|
[
"MIT"
] | 2
|
2021-11-28T21:09:30.000Z
|
2021-11-28T21:09:39.000Z
|
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
|
Andy-Wilkinson/ChemMLToolkit
|
83efc7ea66d2def860a3e04ccd70d77fb689fddc
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
class VariableScheduler(tf.keras.callbacks.Callback):
"""Schedules an arbitary variable during training.
Arguments:
variable: The variable to modify the value of.
schedule: A function that takes an epoch index (integer, indexed
from 0) and current variable value as input and returns a new
value to assign to the variable as output.
verbose: int. 0: quiet, 1: update messages.
"""
def __init__(self, variable, schedule, verbose=0):
super(VariableScheduler, self).__init__()
self.variable = variable
self.schedule = schedule
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
value = self.variable.read_value()
value = self.schedule(epoch, value)
self.variable.assign(value)
if self.verbose > 0:
print(f'\nEpoch {epoch + 1}: VariableScheduler assigning '
f'variable {self.variable.name} to {value}.')
| 37.296296
| 73
| 0.646475
| 122
| 1,007
| 5.245902
| 0.491803
| 0.09375
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.26713
| 1,007
| 26
| 74
| 38.730769
| 0.859079
| 0.345581
| 0
| 0
| 0
| 0
| 0.144231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.071429
| 0
| 0.285714
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed71593db0e5552171798bc1852cca8f7c4d9f3e
| 2,285
|
py
|
Python
|
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
|
mastermind88/dash
|
760af721980e18d91bdbc4e204d1d063c7ed325c
|
[
"MIT"
] | null | null | null |
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
|
mastermind88/dash
|
760af721980e18d91bdbc4e204d1d063c7ed325c
|
[
"MIT"
] | null | null | null |
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
|
mastermind88/dash
|
760af721980e18d91bdbc4e204d1d063c7ed325c
|
[
"MIT"
] | null | null | null |
from dash import Dash, Input, Output, dcc, html
from dash.exceptions import PreventUpdate
def test_dddo001_dynamic_options(dash_dcc):
dropdown_options = [
{"label": "New York City", "value": "NYC"},
{"label": "Montreal", "value": "MTL"},
{"label": "San Francisco", "value": "SF"},
]
app = Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
Output("my-dynamic-dropdown", "options"),
[Input("my-dynamic-dropdown", "search_value")],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in dropdown_options if search_value in o["label"]]
dash_dcc.start_server(app)
# Get the inner input used for search value.
input_ = dash_dcc.find_element("#my-dynamic-dropdown input")
# Focus on the input to open the options menu
input_.send_keys("x")
# No options to be found with `x` in them, should show the empty message.
dash_dcc.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
assert dash_dcc.get_logs() == []
def test_dddo002_array_comma_value(dash_dcc):
app = Dash(__name__)
dropdown = dcc.Dropdown(
options=["New York, NY", "Montreal, QC", "San Francisco, CA"],
value=["San Francisco, CA"],
multi=True,
)
app.layout = html.Div(dropdown)
dash_dcc.start_server(app)
dash_dcc.wait_for_text_to_equal("#react-select-2--value-0", "San Francisco, CA\n ")
assert dash_dcc.get_logs() == []
def test_dddo003_value_no_options(dash_dcc):
app = Dash(__name__)
app.layout = html.Div(
[
dcc.Dropdown(value="foobar", id="dropdown"),
]
)
dash_dcc.start_server(app)
assert dash_dcc.get_logs() == []
dash_dcc.wait_for_element("#dropdown")
| 27.202381
| 87
| 0.647265
| 299
| 2,285
| 4.705686
| 0.327759
| 0.074627
| 0.072495
| 0.03838
| 0.284293
| 0.208955
| 0.167733
| 0.093817
| 0.093817
| 0
| 0
| 0.007834
| 0.217943
| 2,285
| 83
| 88
| 27.53012
| 0.779519
| 0.088403
| 0
| 0.207547
| 0
| 0
| 0.210881
| 0.034665
| 0
| 0
| 0
| 0
| 0.113208
| 1
| 0.075472
| false
| 0
| 0.037736
| 0
| 0.132075
| 0.018868
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed725c3c070133c88aad862a90d3bfcbc58edf09
| 768
|
py
|
Python
|
Server.py
|
dipghoshraj/live-video-streming-with-web-socket
|
dda924e22a4c40d225ec39dd94ee1e489233c403
|
[
"BSD-2-Clause"
] | 3
|
2020-06-30T03:49:46.000Z
|
2021-07-17T16:15:55.000Z
|
Server.py
|
dipghoshraj/live-video-streming-with-web-socket
|
dda924e22a4c40d225ec39dd94ee1e489233c403
|
[
"BSD-2-Clause"
] | null | null | null |
Server.py
|
dipghoshraj/live-video-streming-with-web-socket
|
dda924e22a4c40d225ec39dd94ee1e489233c403
|
[
"BSD-2-Clause"
] | null | null | null |
import cv2
import io
import socket
import struct
import time
import pickle
import zlib
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 8485))
connection = client_socket.makefile('wb')
cam = cv2.VideoCapture("E:/songs/Attention Charlie Puth(GabbarWorld.com) 1080p.mp4")
cam.set(3, 320)
cam.set(4, 240)
img_counter = 0
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
while True:
ret, frame = cam.read()
result, frame = cv2.imencode('.jpg', frame, encode_param)
# data = zlib.compress(pickle.dumps(frame, 0))
data = pickle.dumps(frame, 0)
size = len(data)
print("{}: {}".format(img_counter, size))
client_socket.sendall(struct.pack(">L", size) + data)
img_counter += 1
cam.release()
| 29.538462
| 84
| 0.71224
| 115
| 768
| 4.643478
| 0.565217
| 0.089888
| 0.067416
| 0.06367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049849
| 0.138021
| 768
| 25
| 85
| 30.72
| 0.756798
| 0.057292
| 0
| 0
| 0
| 0
| 0.112656
| 0.029207
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.291667
| 0
| 0.291667
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed74d0762a12ab84a6b4c685f57a0a532e003b99
| 7,059
|
py
|
Python
|
hal/agent/tf2_utils.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-13T21:48:52.000Z
|
2022-03-13T21:48:52.000Z
|
hal/agent/tf2_utils.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
hal/agent/tf2_utils.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Tensorflow 2.0.
Partially adapted from:
https://www.tensorflow.org/tutorials/text/image_captioning
"""
# Lint as: python3
# pylint: disable=invalid-name
from __future__ import absolute_import
from __future__ import division
import tensorflow as tf
def film_params(sentence_embedding, n_layer_channel):
"""Generate FiLM parameters from a sentence embedding.
Generate FiLM parameters from a sentence embedding. This method assumes a
batch dimension exists.
Args:
sentence_embedding: a tensor containing batched sentenced embedding to be
transformed
n_layer_channel: a list of integers specifying how many channels are at
each hidden layer to be FiLM'ed
Returns:
a tuple of tensors the same length as n_layer_channel. Each element
contains all gamma_i and beta_i for a single hidden layer.
"""
n_total = sum(n_layer_channel) * 2
all_params = tf.layers.dense(sentence_embedding, n_total)
all_params = tf.keras.layers.Dense(
2 * sum * (n_layer_channel), activation=tf.nn.relu)
return tf.split(all_params, [c * 2 for c in n_layer_channel], 1)
def stack_conv_layer(layer_cfg, padding='same'):
"""Stack convolution layers per layer_cfg.
Args:
layer_cfg: list of integer tuples specifying the parameter each layer;
each tuple should be (channel, kernel size, strides)
padding: what kind of padding the conv layers use
Returns:
the keras model with stacked conv layers
"""
layers = []
for cfg in layer_cfg[:-1]:
layers.append(
tf.keras.layers.Conv2D(
filters=cfg[0],
kernel_size=cfg[1],
strides=cfg[2],
activation=tf.nn.relu,
padding=padding))
final_cfg = layer_cfg[-1]
layers.append(
tf.keras.layers.Conv2D(
final_cfg[0], final_cfg[1], final_cfg[2], padding=padding))
return tf.keras.Sequential(layers)
def stack_dense_layer(layer_cfg):
"""Stack Dense layers.
Args:
layer_cfg: list of integer specifying the number of units at each layer
Returns:
the keras model with stacked dense layers
"""
layers = []
for cfg in layer_cfg[:-1]:
layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu))
layers.append(tf.keras.layers.Dense(layer_cfg[-1]))
return tf.keras.Sequential(layers)
def soft_variables_update(source_variables, target_variables, polyak_rate=1.0):
"""Update the target variables using exponential moving average.
Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate) * v_t
Args:
source_variables: the moving average variables
target_variables: the new observations
polyak_rate: rate of moving average
Returns:
Operation that does the update
"""
updates = []
for (v_s, v_t) in zip(source_variables, target_variables):
v_t.shape.assert_is_compatible_with(v_s.shape)
def update_fn(v1, v2):
"""Update variables."""
# For not trainable variables do hard updates.
return v1.assign(polyak_rate * v1 + (1 - polyak_rate) * v2)
update = update_fn(v_t, v_s)
updates.append(update)
return updates
def vector_tensor_product(a, b):
""""Returns keras layer that perfrom a outer product between a and b."""
# a shape: [B, ?, d], b shape: [B, ?, d]
shape_layer = tf.keras.layers.Lambda(tf.shape)
shape = shape_layer(b)
shape_numpy = b.get_shape()
variable_length = shape[1] # variable_len = ?
expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1]))
expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1]))
a = expand_dims_layer_1(a) # a shape: [B, ?, 1, d]
b = expand_dims_layer_2(b) # a shape: [B, ?, 1, d]
tile_layer = tf.keras.layers.Lambda(
lambda inputs: tf.tile(inputs[0], multiples=inputs[1]))
a = tile_layer((a, [1, 1, variable_length, 1])) # a shape: [B, ?, ?, d]
b = tile_layer((b, [1, 1, variable_length, 1])) # b shape: [B, ?, ?, d]
b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?, ?, d]
return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d]
class BahdanauAttention(tf.keras.Model):
"""Bahdanau Attention Layer.
Attributes:
w1: weights that process the feature
w2: weights that process the memory state
v: projection layer that project score vector to scalar
"""
def __init__(self, units):
"""Initialize Bahdanau attention layer.
Args:
units: size of the dense layers
"""
super(BahdanauAttention, self).__init__()
self.W1 = tf.keras.layers.Dense(units)
self.W2 = tf.keras.layers.Dense(units)
self.V = tf.keras.layers.Dense(1)
def call(self, features, hidden):
# features(CNN_encoder output) shape == (batch_size, 64, embedding_dim)
# hidden shape == (batch_size, hidden_size)
# hidden_with_time_axis shape == (batch_size, 1, hidden_size)
hidden_with_time_axis = tf.expand_dims(hidden, 1)
# score shape == (batch_size, 64, hidden_size)
score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis))
# attention_weights shape == (batch_size, 64, 1)
# you get 1 at the last axis because you are applying score to self.V
attention_weights = tf.nn.softmax(self.V(score), axis=1)
# context_vector shape after sum == (batch_size, hidden_size)
context_vector = attention_weights * features
context_vector = tf.reduce_sum(context_vector, axis=1)
return context_vector, attention_weights
class GRUEnecoder(tf.keras.Model):
"""TF2.0 GRE encoder.
Attributes:
embedding: word embedding matrix
gru: the GRU layer
"""
def __init__(self, embedding_dim, units, vocab_size):
"""Initialize the GRU encoder.
Args:
embedding_dim: dimension of word emebdding
units: number of units of the memory state
vocab_size: total number of vocabulary
"""
super(GRUEnecoder, self).__init__()
self._units = units
self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim)
self.gru = tf.keras.layers.GRU(
self.units,
return_sequences=True,
return_state=True,
recurrent_initializer='glorot_uniform')
def call(self, x, hidden):
# x shape after passing through embedding == (batch_size, 1, embedding_dim)
x = self.embedding(x)
# passing the concatenated vector to the GRU
output, state = self.gru(x)
return output, state
def reset_state(self, batch_size):
return tf.zeros((batch_size, self._units))
| 32.380734
| 79
| 0.696841
| 1,034
| 7,059
| 4.591876
| 0.268859
| 0.029486
| 0.043808
| 0.022746
| 0.157329
| 0.132477
| 0.064869
| 0.046335
| 0.046335
| 0.022746
| 0
| 0.01482
| 0.197053
| 7,059
| 217
| 80
| 32.529954
| 0.822865
| 0.47103
| 0
| 0.120482
| 0
| 0
| 0.005108
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 1
| 0.13253
| false
| 0
| 0.036145
| 0.012048
| 0.301205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed753328e567a24c6d1169588942c86a984af1ee
| 4,437
|
py
|
Python
|
wolk/logger_factory.py
|
Wolkabout/WolkConnect-Python-
|
11412e3f88911170f587b5e857d07ab41c8f52b5
|
[
"Apache-2.0"
] | 6
|
2016-12-19T13:36:44.000Z
|
2018-05-10T15:08:15.000Z
|
wolk/logger_factory.py
|
Wolkabout/WolkConnect-Python
|
11412e3f88911170f587b5e857d07ab41c8f52b5
|
[
"Apache-2.0"
] | 5
|
2019-02-23T09:37:12.000Z
|
2021-09-17T13:54:58.000Z
|
wolk/logger_factory.py
|
Wolkabout/WolkConnect-Python-
|
11412e3f88911170f587b5e857d07ab41c8f52b5
|
[
"Apache-2.0"
] | 3
|
2016-08-15T22:19:00.000Z
|
2017-12-28T09:48:37.000Z
|
"""LoggerFactory Module."""
# Copyright 2020 WolkAbout Technology s.r.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import List
from typing import Optional
class LoggerFactory:
"""Factory for issuing ready to use loggers in other modules."""
def __init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore
"""
Create a factory that will give loggers through calls to get_logger().
:param level: Set the desired logging level
:type level: int or None
:param console: Should the log messages be outputted to the console
:type console: bool or None
:param log_file: Name of the log file to output to
:type log_file: str or None
"""
self.level = level
self.device_key = None
self.console = console
self.log_file = log_file
self.loggers: List[logging.Logger] = []
def set_device_key(self, device_key: str) -> None:
"""
Set device key.
:param device_key: Device key
:type device_key: str
"""
self.device_key = device_key
def get_logger(
self, name: str, level: Optional[int] = None
) -> logging.Logger:
"""
Return a ready to use logger instance.
:param name: Name of the logger
:type name: str
:param level: Override the log level
:type level: int or None
:returns: Logger instance
:rtype: logger
"""
logger = logging.getLogger(name)
if level is not None:
logger.setLevel(level)
else:
logger.setLevel(self.level)
if self.device_key is not None:
formatter = logging.Formatter(
"%(asctime)s - '"
+ str(self.device_key)
+ "' - %(levelname)s [%(filename)s:%(lineno)s"
+ " - %(funcName)s()] - %(message)s"
)
else:
formatter = logging.Formatter(
"%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s"
+ " - %(funcName)s()] - %(message)s"
)
if self.console:
console_handler = logging.StreamHandler()
if level is not None:
console_handler.setLevel(level)
else:
console_handler.setLevel(self.level)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
if self.log_file is not None:
file_handler = logging.FileHandler(self.log_file)
if level is not None:
file_handler.setLevel(level)
else:
file_handler.setLevel(self.level)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
self.loggers.append(logger)
return logger
# Logging levels available: NOTSET, INFO, DEBUG
logger_factory = LoggerFactory(level=logging.INFO)
LEVELS = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical": logging.CRITICAL,
"notset": logging.NOTSET,
}
def logging_config(level: str, log_file: Optional[str] = None) -> None:
"""
Set desired log level and designate a log file.
:param level: Available levels : debug, info, notset
:type level: str
:param log_file: path to log file
:type log_file: str or None
"""
if log_file is not None:
logger_factory.log_file = log_file
if level not in LEVELS:
print(f"Invalid level '{level}'")
return
if LEVELS[level] == logger_factory.level:
return
logger_factory.level = LEVELS[level]
for logger in logger_factory.loggers:
logger.setLevel(logger_factory.level)
for handler in logger.handlers:
handler.setLevel(logger_factory.level)
| 30.8125
| 88
| 0.610773
| 539
| 4,437
| 4.940631
| 0.278293
| 0.042058
| 0.020278
| 0.013519
| 0.160721
| 0.064589
| 0.032294
| 0.032294
| 0.032294
| 0.032294
| 0
| 0.002569
| 0.298174
| 4,437
| 143
| 89
| 31.027972
| 0.852601
| 0.334235
| 0
| 0.180556
| 0
| 0
| 0.08502
| 0.017667
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.041667
| 0
| 0.152778
| 0.013889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7563752fb4afab443eb59eb4484ffff4182b40
| 1,830
|
py
|
Python
|
raw.py
|
andre-marcos-perez/data-pipeline-demo
|
2647cce6e90d39798eda352608dc0f6d6ab5255a
|
[
"MIT"
] | 3
|
2021-05-20T23:24:13.000Z
|
2021-08-20T12:23:18.000Z
|
raw.py
|
andre-marcos-perez/data-pipeline-demo
|
2647cce6e90d39798eda352608dc0f6d6ab5255a
|
[
"MIT"
] | null | null | null |
raw.py
|
andre-marcos-perez/data-pipeline-demo
|
2647cce6e90d39798eda352608dc0f6d6ab5255a
|
[
"MIT"
] | 3
|
2021-05-26T14:49:20.000Z
|
2022-03-21T23:17:54.000Z
|
import json
import gzip
import requests
from datetime import datetime
import pendulum
import boto3
from botocore.exceptions import ClientError
from util.log import Log
from settings.aws_settings import AWSSettings
from settings.telegram_settings import TelegramSettings
def lambda_handler(event: dict, context: dict) -> dict:
log = Log.setup(name='logger')
aws_settings = AWSSettings()
telegram_settings = TelegramSettings()
timezone = pendulum.timezone('America/Sao_Paulo')
date = datetime.now(tz=timezone).strftime('%Y-%m-%d')
timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S')
try:
token = telegram_settings.access_token
base_url = f"https://api.telegram.org/bot{token}"
data = json.loads(event["body"])
chat_id = data["message"]["chat"]["id"]
if chat_id == telegram_settings.chat_id:
client = boto3.client('s3')
bucket = aws_settings.raw_bucket
root_path = aws_settings.root_path
try:
with open(f"{root_path}/{timestamp}.json", mode='w', encoding='utf8') as fp:
json.dump(data, fp)
client.upload_file(f"{root_path}/{timestamp}.json", bucket, f"{date}/{timestamp}.json")
except ClientError as exc:
raise exc
else:
text = "I can't talk to strangers, sorry mate!"
data = {"text": text, "chat_id": chat_id}
data = gzip.compress(json.dumps(data).encode('utf-8'))
headers = {'content-type': 'application/json', 'content-encoding': 'gzip'}
url = base_url + "/sendMessage"
requests.post(url=url, data=data, headers=headers)
except Exception as exc:
log.error(msg=exc)
finally:
return dict(statusCode="200")
| 31.016949
| 103
| 0.62623
| 226
| 1,830
| 4.964602
| 0.469027
| 0.032086
| 0.023173
| 0.037433
| 0.096257
| 0.057041
| 0.057041
| 0.057041
| 0
| 0
| 0
| 0.005818
| 0.248634
| 1,830
| 58
| 104
| 31.551724
| 0.810182
| 0
| 0
| 0.046512
| 0
| 0
| 0.162842
| 0.043169
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.232558
| 0
| 0.27907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed75b8a825782f227e671daaa305387cdcbcd9d0
| 2,688
|
py
|
Python
|
v2_hier/site_stat.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
v2_hier/site_stat.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
v2_hier/site_stat.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
"""Collecting statistics of site visits."""
import collections
from datetime import datetime
from functools import reduce
from django.utils.translation import gettext_lazy as _
from hier.models import IPInfo, AccessLog, SiteStat
from v2_hier.utils import APPS
def get_site_stat(user):
"""Processing a new portion of log file records.
The site applications that users have visited and information about their IP addresses will be shown.
"""
TOTAL_IP = _('total different').capitalize() + ' IP'
TOTAL_LOG = _('total log records').capitalize()
NEW_LOG = _('new log records').capitalize()
cnt = collections.Counter()
cnt[TOTAL_IP] = len(IPInfo.objects.all())
cnt[TOTAL_LOG] = len(AccessLog.objects.all())
#Determining the last previously processed log file entry
last = datetime.min
site_stat = None
if SiteStat.objects.filter(user=user.id).exists():
site_stat = SiteStat.objects.filter(user = user.id).get()
if site_stat.record and site_stat.record.event:
last = site_stat.record.event
# New records
records = AccessLog.objects.filter(event__gt=last).order_by('-event')
cnt[NEW_LOG] += len(records)
# Save last processed log record
last_rec = None
if (len(records) > 0):
last_rec = records[0]
if site_stat:
site_stat.record = last_rec
site_stat.save()
else:
SiteStat.objects.create(user=user, record=last_rec)
#raise Exception(last_rec.event)
apps = {}
for rec in records:
uri = valid_uri(rec)
if not uri:
continue
# Determining the access to the site application
a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS))
if not a_app:
continue
app = a_app[0]
if not app in apps:
apps[app] = {}
host = str(rec.host.info())
#raise Exception('aaa = ', aaa)
if not host in apps[app]:
apps[app][host] = []
page = '{} {}'.format(rec.method, uri)
if not page in apps[app][host]:
apps[app][host].append(page)
return cnt.most_common(), apps
def valid_uri(rec):
if (rec.status >= 400) or (rec.status == 301):
return None
if 'favicon.ico' in rec.uri or '/static/' in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri:
return None
if ('/?' in rec.uri) and (rec.method != 'POST'):
uri = rec.uri.split('?')[0]
else:
uri = rec.uri
uri = uri.replace('/ru/', '/').replace('/en/', '/')
if (uri == '/'):
return None
return uri
| 31.623529
| 115
| 0.599702
| 358
| 2,688
| 4.399441
| 0.335196
| 0.045714
| 0.025397
| 0.019048
| 0.039365
| 0.039365
| 0
| 0
| 0
| 0
| 0
| 0.006684
| 0.276414
| 2,688
| 84
| 116
| 32
| 0.803085
| 0.146577
| 0
| 0.116667
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.1
| 0
| 0.216667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed75ce190b9f65a6716720968d522d43762ebdb0
| 16,643
|
py
|
Python
|
cli/pcluster/utils.py
|
mkosmo/cfncluster
|
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
|
[
"Apache-2.0"
] | 1
|
2021-04-08T05:08:07.000Z
|
2021-04-08T05:08:07.000Z
|
cli/pcluster/utils.py
|
mkosmo/cfncluster
|
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
|
[
"Apache-2.0"
] | null | null | null |
cli/pcluster/utils.py
|
mkosmo/cfncluster
|
f1817cc187f2b92127d48f16debb4b7ea4f4a80f
|
[
"Apache-2.0"
] | 1
|
2019-05-10T16:03:19.000Z
|
2019-05-10T16:03:19.000Z
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
# the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
# limitations under the License.
# fmt: off
from __future__ import absolute_import, print_function # isort:skip
from future import standard_library # isort:skip
standard_library.install_aliases()
# fmt: on
import json
import logging
import os
import sys
import time
import urllib.request
import zipfile
from io import BytesIO
import boto3
import pkg_resources
from botocore.exceptions import ClientError
LOGGER = logging.getLogger(__name__)
PCLUSTER_STACK_PREFIX = "parallelcluster-"
PCLUSTER_ISSUES_LINK = "https://github.com/aws/aws-parallelcluster/issues"
def get_stack_name(cluster_name):
return PCLUSTER_STACK_PREFIX + cluster_name
def get_region():
"""Get AWS_DEFAULT_REGION from the environment."""
return os.environ.get("AWS_DEFAULT_REGION")
def get_partition():
"""Get partition for the AWS_DEFAULT_REGION set in the environment."""
return "aws-us-gov" if get_region().startswith("us-gov") else "aws"
def paginate_boto3(method, **kwargs):
"""
Return a generator for a boto3 call, this allows pagination over an arbitrary number of responses.
:param method: boto3 method
:param kwargs: arguments to method
:return: generator with boto3 results
"""
client = method.__self__
paginator = client.get_paginator(method.__name__)
for page in paginator.paginate(**kwargs).result_key_iters():
for result in page:
yield result
def create_s3_bucket(bucket_name, region):
"""
Create a new S3 bucket.
:param bucket_name: name of the S3 bucket to create
:param region: aws region
"""
s3_client = boto3.client("s3")
""" :type : pyboto3.s3 """
try:
if region != "us-east-1":
s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": region})
else:
s3_client.create_bucket(Bucket=bucket_name)
except s3_client.exceptions.BucketAlreadyOwnedByYou:
print("Bucket already exists")
def delete_s3_bucket(bucket_name):
"""
Delete an S3 bucket together with all stored objects.
:param bucket_name: name of the S3 bucket to delete
"""
try:
bucket = boto3.resource("s3").Bucket(bucket_name)
bucket.objects.all().delete()
bucket.delete()
except boto3.client("s3").exceptions.NoSuchBucket:
pass
except ClientError:
print("Failed to delete bucket %s. Please delete it manually." % bucket_name)
def zip_dir(path):
"""
Create a zip archive containing all files and dirs rooted in path.
The archive is created in memory and a file handler is returned by the function.
:param path: directory containing the resources to archive.
:return file handler pointing to the compressed archive.
"""
file_out = BytesIO()
with zipfile.ZipFile(file_out, "w", zipfile.ZIP_DEFLATED) as ziph:
for root, _, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path))
file_out.seek(0)
return file_out
def upload_resources_artifacts(bucket_name, root):
"""
Upload to the specified S3 bucket the content of the directory rooted in root path.
All dirs contained in root dir will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip.
All files contained in root dir will be uploaded to $bucket_name.
:param bucket_name: name of the S3 bucket where files are uploaded
:param root: root directory containing the resources to upload.
"""
bucket = boto3.resource("s3").Bucket(bucket_name)
for res in os.listdir(root):
if os.path.isdir(os.path.join(root, res)):
bucket.upload_fileobj(zip_dir(os.path.join(root, res)), "%s/artifacts.zip" % res)
elif os.path.isfile(os.path.join(root, res)):
bucket.upload_file(os.path.join(root, res), res)
def _get_json_from_s3(region, file_name):
"""
Get pricing file (if none) and parse content as json.
:param region: AWS Region
:param file_name the object name to get
:return: a json object representing the file content
:raises ClientError if unable to download the file
:raises ValueError if unable to decode the file content
"""
bucket_name = "{0}-aws-parallelcluster".format(region)
file_contents = boto3.resource("s3").Object(bucket_name, file_name).get()["Body"].read().decode("utf-8")
return json.loads(file_contents)
def get_supported_features(region, feature):
"""
Get a json object containing the attributes supported by a feature, for example.
{
"Features": {
"efa": {
"instances": ["c5n.18xlarge", "p3dn.24xlarge", "i3en.24xlarge"],
"baseos": ["alinux", "centos7"],
"schedulers": ["sge", "slurm", "torque"]
},
"batch": {
"instances": ["r3.8xlarge", ..., "m5.4xlarge"]
}
}
}
:param region: AWS Region
:param feature: the feature to search for, i.e. "efa" "awsbatch"
:return: json object containing all the attributes supported by feature
"""
try:
features = _get_json_from_s3(region, "features/feature_whitelist.json")
supported_features = features.get("Features").get(feature)
except (ValueError, ClientError, KeyError) as e:
if isinstance(e, ClientError):
code = e.response.get("Error").get("Code")
if code == "InvalidAccessKeyId":
error(e.response.get("Error").get("Message"))
error(
"Failed validate {0}. This is probably a bug on our end. "
"Please submit an issue {1}".format(feature, PCLUSTER_ISSUES_LINK)
)
return supported_features
def get_instance_vcpus(region, instance_type):
"""
Get number of vcpus for the given instance type.
:param region: AWS Region
:param instance_type: the instance type to search for.
:return: the number of vcpus or -1 if the instance type cannot be found
or the pricing file cannot be retrieved/parsed
"""
try:
instances = _get_json_from_s3(region, "instances/instances.json")
vcpus = int(instances[instance_type]["vcpus"])
except (KeyError, ValueError, ClientError):
vcpus = -1
return vcpus
def get_supported_os(scheduler):
"""
Return a tuple of the os supported by parallelcluster for the specific scheduler.
:param scheduler: the scheduler for which we want to know the supported os
:return: a tuple of strings of the supported os
"""
return "alinux" if scheduler == "awsbatch" else "alinux", "centos6", "centos7", "ubuntu1604", "ubuntu1804"
def get_supported_schedulers():
"""
Return a tuple of the scheduler supported by parallelcluster.
:return: a tuple of strings of the supported scheduler
"""
return "sge", "torque", "slurm", "awsbatch"
def get_stack_output_value(stack_outputs, output_key):
"""
Get output value from Cloudformation Stack Output.
:param stack_outputs: Cloudformation Stack Outputs
:param output_key: Output Key
:return: OutputValue if that output exists, otherwise None
"""
return next((o.get("OutputValue") for o in stack_outputs if o.get("OutputKey") == output_key), None)
def get_stack(stack_name, cfn_client=None):
"""
Get the output for a DescribeStacks action for the given Stack.
:param stack_name: the CFN Stack name
:param cfn_client: boto3 cloudformation client
:return: the Stack data type
"""
try:
if not cfn_client:
cfn_client = boto3.client("cloudformation")
return cfn_client.describe_stacks(StackName=stack_name).get("Stacks")[0]
except (ClientError, IndexError) as e:
error(e.response.get("Error").get("Message"))
def verify_stack_creation(stack_name, cfn_client):
"""
Wait for the stack creation to be completed and notify if the stack creation fails.
:param stack_name: the stack name that we should verify
:param cfn_client: the CloudFormation client to use to verify stack status
:return: True if the creation was successful, false otherwise.
"""
status = get_stack(stack_name, cfn_client).get("StackStatus")
resource_status = ""
while status == "CREATE_IN_PROGRESS":
status = get_stack(stack_name, cfn_client).get("StackStatus")
events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents")[0]
resource_status = ("Status: %s - %s" % (events.get("LogicalResourceId"), events.get("ResourceStatus"))).ljust(
80
)
sys.stdout.write("\r%s" % resource_status)
sys.stdout.flush()
time.sleep(5)
# print the last status update in the logs
if resource_status != "":
LOGGER.debug(resource_status)
if status != "CREATE_COMPLETE":
LOGGER.critical("\nCluster creation failed. Failed events:")
events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents")
for event in events:
if event.get("ResourceStatus") == "CREATE_FAILED":
LOGGER.info(
" - %s %s %s",
event.get("ResourceType"),
event.get("LogicalResourceId"),
event.get("ResourceStatusReason"),
)
return False
return True
def get_templates_bucket_path():
"""Return a string containing the path of bucket."""
region = get_region()
s3_suffix = ".cn" if region.startswith("cn") else ""
return "https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/".format(
REGION=region, S3_SUFFIX=s3_suffix
)
def get_installed_version():
"""Get the version of the installed aws-parallelcluster package."""
return pkg_resources.get_distribution("aws-parallelcluster").version
def check_if_latest_version():
"""Check if the current package version is the latest one."""
try:
latest = json.loads(urllib.request.urlopen("https://pypi.python.org/pypi/aws-parallelcluster/json").read())[
"info"
]["version"]
if get_installed_version() < latest:
print("Info: There is a newer version %s of AWS ParallelCluster available." % latest)
except Exception:
pass
def warn(message):
"""Print a warning message."""
print("WARNING: {0}".format(message))
def error(message, fail_on_error=True):
"""Print an error message and Raise SystemExit exception to the stderr if fail_on_error is true."""
if fail_on_error:
sys.exit("ERROR: {0}".format(message))
else:
print("ERROR: {0}".format(message))
def get_cfn_param(params, key_name):
"""
Get parameter value from Cloudformation Stack Parameters.
:param params: Cloudformation Stack Parameters
:param key_name: Parameter Key
:return: ParameterValue if that parameter exists, otherwise None
"""
param_value = next((i.get("ParameterValue") for i in params if i.get("ParameterKey") == key_name), "NONE")
return param_value.strip()
def get_efs_mount_target_id(efs_fs_id, avail_zone):
"""
Search for a Mount Target Id in given availability zone for the given EFS file system id.
:param efs_fs_id: EFS file system Id
:param avail_zone: Availability zone to verify
:return: the mount_target_id or None
"""
mount_target_id = None
if efs_fs_id:
mount_targets = boto3.client("efs").describe_mount_targets(FileSystemId=efs_fs_id)
for mount_target in mount_targets.get("MountTargets"):
# Check to see if there is an existing mt in the az of the stack
mount_target_subnet = mount_target.get("SubnetId")
if avail_zone == get_avail_zone(mount_target_subnet):
mount_target_id = mount_target.get("MountTargetId")
return mount_target_id
def get_avail_zone(subnet_id):
avail_zone = None
try:
avail_zone = (
boto3.client("ec2").describe_subnets(SubnetIds=[subnet_id]).get("Subnets")[0].get("AvailabilityZone")
)
except ClientError as e:
LOGGER.debug(
"Unable to detect availability zone for subnet {0}.\n{1}".format(
subnet_id, e.response.get("Error").get("Message")
)
)
return avail_zone
def get_latest_alinux_ami_id():
"""Get latest alinux ami id."""
try:
alinux_ami_id = (
boto3.client("ssm")
.get_parameters_by_path(Path="/aws/service/ami-amazon-linux-latest")
.get("Parameters")[0]
.get("Value")
)
except ClientError as e:
error("Unable to retrieve Amazon Linux AMI id.\n{0}".format(e.response.get("Error").get("Message")))
return alinux_ami_id
def list_ec2_instance_types():
"""Return a list of all the instance types available on EC2, independent by the region."""
return boto3.client("ec2").meta.service_model.shape_for("InstanceType").enum
def get_master_server_id(stack_name):
"""Return the physical id of the master server, or [] if no master server."""
try:
resources = boto3.client("cloudformation").describe_stack_resource(
StackName=stack_name, LogicalResourceId="MasterServer"
)
return resources.get("StackResourceDetail").get("PhysicalResourceId")
except ClientError as e:
error(e.response.get("Error").get("Message"))
def _get_master_server_ip(stack_name):
"""
Get the IP Address of the MasterServer.
:param stack_name: The name of the cloudformation stack
:param config: Config object
:return private/public ip address
"""
ec2 = boto3.client("ec2")
master_id = get_master_server_id(stack_name)
if not master_id:
error("MasterServer not running. Can't SSH")
instance = ec2.describe_instances(InstanceIds=[master_id]).get("Reservations")[0].get("Instances")[0]
ip_address = instance.get("PublicIpAddress")
if ip_address is None:
ip_address = instance.get("PrivateIpAddress")
state = instance.get("State").get("Name")
if state != "running" or ip_address is None:
error("MasterServer: %s\nCannot get ip address.", state.upper())
return ip_address
def get_master_ip_and_username(cluster_name):
cfn = boto3.client("cloudformation")
try:
stack_name = get_stack_name(cluster_name)
stack_result = cfn.describe_stacks(StackName=stack_name).get("Stacks")[0]
stack_status = stack_result.get("StackStatus")
valid_status = ["CREATE_COMPLETE", "UPDATE_COMPLETE", "UPDATE_ROLLBACK_COMPLETE"]
invalid_status = ["DELETE_COMPLETE", "DELETE_IN_PROGRESS"]
if stack_status in invalid_status:
error("Unable to retrieve master_ip and username for a stack in the status: {0}".format(stack_status))
elif stack_status in valid_status:
outputs = stack_result.get("Outputs")
master_ip = get_stack_output_value(outputs, "MasterPublicIP") or _get_master_server_ip(stack_name)
username = get_stack_output_value(outputs, "ClusterUser")
else:
# Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running
master_ip = _get_master_server_ip(stack_name)
template = cfn.get_template(StackName=stack_name)
mappings = template.get("TemplateBody").get("Mappings").get("OSFeatures")
base_os = get_cfn_param(stack_result.get("Parameters"), "BaseOS")
username = mappings.get(base_os).get("User")
if not master_ip:
error("Failed to get cluster {0} ip.".format(cluster_name))
if not username:
error("Failed to get cluster {0} username.".format(cluster_name))
except ClientError as e:
error(e.response.get("Error").get("Message"))
return master_ip, username
def get_cli_log_file():
return os.path.expanduser(os.path.join("~", ".parallelcluster", "pcluster-cli.log"))
| 35.714592
| 119
| 0.672715
| 2,168
| 16,643
| 5.005074
| 0.208026
| 0.019077
| 0.006451
| 0.010967
| 0.16404
| 0.110497
| 0.091697
| 0.057414
| 0.040273
| 0.025528
| 0
| 0.008649
| 0.221955
| 16,643
| 465
| 120
| 35.791398
| 0.82933
| 0.303251
| 0
| 0.112971
| 0
| 0
| 0.180763
| 0.01251
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125523
| false
| 0.008368
| 0.054393
| 0.008368
| 0.280335
| 0.025105
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7707a9a93d2eb459c06d85459c2db5718ad3cc
| 3,963
|
py
|
Python
|
tools/telemetry/telemetry/core/platform/android_device_unittest.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2019-11-28T10:46:52.000Z
|
2019-11-28T10:46:52.000Z
|
tools/telemetry/telemetry/core/platform/android_device_unittest.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/telemetry/telemetry/core/platform/android_device_unittest.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2015-03-27T11:15:39.000Z
|
2016-08-17T14:19:56.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry import benchmark
from telemetry.core import browser_options
from telemetry.core.platform import android_device
from telemetry.core.platform import android_platform_backend
from telemetry.unittest_util import system_stub
class AndroidDeviceTest(unittest.TestCase):
def setUp(self):
self._android_device_stub = system_stub.Override(
android_device, ['adb_commands'])
def testGetAllAttachedAndroidDevices(self):
self._android_device_stub.adb_commands.attached_devices = [
'01', '02']
self.assertEquals(
set(['01', '02']),
set(device.device_id for device in
android_device.AndroidDevice.GetAllConnectedDevices()
))
def tearDown(self):
self._android_device_stub.Restore()
class GetDeviceTest(unittest.TestCase):
def setUp(self):
self._android_device_stub = system_stub.Override(
android_device, ['adb_commands', 'os', 'subprocess', 'logging'])
self._apb_stub = system_stub.Override(
android_platform_backend, ['adb_commands'])
def tearDown(self):
self._android_device_stub.Restore()
self._apb_stub.Restore()
def testNoAdbReturnsNone(self):
finder_options = browser_options.BrowserFinderOptions()
def NoAdb(*_, **__):
raise OSError('not found')
self._android_device_stub.subprocess.Popen = NoAdb
self.assertEquals([], self._android_device_stub.logging.warnings)
self.assertIsNone(android_device.GetDevice(finder_options))
def testAdbNoDevicesReturnsNone(self):
finder_options = browser_options.BrowserFinderOptions()
self.assertEquals([], self._android_device_stub.logging.warnings)
self.assertIsNone(android_device.GetDevice(finder_options))
def testAdbPermissionsErrorReturnsNone(self):
finder_options = browser_options.BrowserFinderOptions()
self._android_device_stub.subprocess.Popen.communicate_result = (
'List of devices attached\n????????????\tno permissions\n',
'* daemon not running. starting it now on port 5037 *\n'
'* daemon started successfully *\n')
device = android_device.GetDevice(finder_options)
self.assertEquals([
'adb devices gave a permissions error. Consider running adb as root:',
' adb kill-server',
' sudo `which adb` devices\n\n'],
self._android_device_stub.logging.warnings)
self.assertIsNone(device)
def testAdbTwoDevicesReturnsNone(self):
finder_options = browser_options.BrowserFinderOptions()
self._android_device_stub.adb_commands.attached_devices = [
'015d14fec128220c', '015d14fec128220d']
device = android_device.GetDevice(finder_options)
self.assertEquals([
'Multiple devices attached. Please specify one of the following:\n'
' --device=015d14fec128220c\n'
' --device=015d14fec128220d'],
self._android_device_stub.logging.warnings)
self.assertIsNone(device)
def testAdbPickOneDeviceReturnsDeviceInstance(self):
finder_options = browser_options.BrowserFinderOptions()
finder_options.android_device = '555d14fecddddddd' # pick one
self._android_device_stub.adb_commands.attached_devices = [
'015d14fec128220c', '555d14fecddddddd']
device = android_device.GetDevice(finder_options)
self.assertEquals([], self._android_device_stub.logging.warnings)
self.assertEquals('555d14fecddddddd', device.device_id)
def testAdbOneDeviceReturnsDeviceInstance(self):
finder_options = browser_options.BrowserFinderOptions()
self._android_device_stub.adb_commands.attached_devices = (
['015d14fec128220c'])
device = android_device.GetDevice(finder_options)
self.assertEquals([], self._android_device_stub.logging.warnings)
self.assertEquals('015d14fec128220c', device.device_id)
| 39.63
| 78
| 0.7459
| 427
| 3,963
| 6.665105
| 0.276347
| 0.123331
| 0.095573
| 0.11806
| 0.582221
| 0.570625
| 0.490162
| 0.472242
| 0.385453
| 0.363317
| 0
| 0.032364
| 0.157961
| 3,963
| 99
| 79
| 40.030303
| 0.820497
| 0.041383
| 0
| 0.417722
| 0
| 0
| 0.152346
| 0.020559
| 0
| 0
| 0
| 0
| 0.164557
| 1
| 0.151899
| false
| 0
| 0.075949
| 0
| 0.253165
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7911d27c0fa532add30880dc5c7b6aaf924408
| 1,265
|
py
|
Python
|
logger.py
|
bekaaa/xgboost_tuner
|
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
|
[
"MIT"
] | null | null | null |
logger.py
|
bekaaa/xgboost_tuner
|
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
|
[
"MIT"
] | null | null | null |
logger.py
|
bekaaa/xgboost_tuner
|
2d93f6cc751b3a8778420a88caf73fd1dc8ef2ce
|
[
"MIT"
] | 1
|
2019-03-16T14:30:07.000Z
|
2019-03-16T14:30:07.000Z
|
#! /usr/bin/env python
import logging
#---------------------------------------
class logger :
'''
A ready to use logging class.
All you need to do is set an object with the parameters (log_filename, directory to save it)
then whenever you want to add text, type obj.add("some text").
The function obj.close() is not important, I just added it for coverage.
You can edit any of the below configuration to whatever you like.
'''
def __init__(self, filename, log_dir='../data/log'):
self.log = None
self.handler = None
LOG_PATH = log_dir
assert type(filename) == str or filename != ''
self.logger = logging.getLogger();
self.logger.setLevel(logging.INFO)
filename = LOG_PATH + str(filename)
self.handler = logging.FileHandler(filename)
self.handler.setLevel(logging.INFO)
formatter = logging.Formatter(
fmt='%(asctime)s : %(message)s',
datefmt='%d-%m %H:%M'
)
self.handler.setFormatter(formatter)
self.logger.addHandler(self.handler)
return
#------------------------------------
def add(self, message):
assert type(message) == str
self.logger.info(message);
return
#------------------------------------
def close(self):
self.logger.removeHandler(self.handler)
return
#----------------------------------------
| 31.625
| 93
| 0.622925
| 162
| 1,265
| 4.808642
| 0.5
| 0.084724
| 0.04878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149407
| 1,265
| 39
| 94
| 32.435897
| 0.723978
| 0.392885
| 0
| 0.115385
| 0
| 0
| 0.062667
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.115385
| false
| 0
| 0.038462
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed79b8872d0353b944045d77a3b550a09342bdbf
| 5,536
|
py
|
Python
|
baselines/prep_baseline.py
|
lessleslie/slm-code-generation
|
017ac0828faf3467e9f85883e27be09ec3898b14
|
[
"MIT"
] | 64
|
2020-06-23T06:27:42.000Z
|
2022-03-30T07:44:52.000Z
|
baselines/prep_baseline.py
|
lessleslie/slm-code-generation
|
017ac0828faf3467e9f85883e27be09ec3898b14
|
[
"MIT"
] | 11
|
2020-07-14T23:29:31.000Z
|
2021-09-17T15:17:49.000Z
|
baselines/prep_baseline.py
|
tech-srl/slm-code-generation
|
15fe4e1df82e49587f725577f870ca12dc42903a
|
[
"MIT"
] | 6
|
2020-07-09T08:42:04.000Z
|
2021-03-02T14:35:31.000Z
|
import json
import multiprocessing as mp
import re
from argparse import ArgumentParser
from enum import Enum, auto
import javalang
from functools import partial
PRED_TOKEN = 'PRED'
modifiers = ['public', 'private', 'protected', 'static']
class TargetType(Enum):
seq = auto()
tree = auto()
@staticmethod
def from_string(s):
try:
return TargetType[s]
except KeyError:
raise ValueError()
target_type = TargetType.seq
RE_WORDS = re.compile(r'''
# Find words in a string. Order matters!
[A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word
[A-Z]?[a-z]+ | # Capitalized words / all lower case
[A-Z]+ | # All upper case
\d+ | # Numbers
_ |
\" |
.+
''', re.VERBOSE)
TREE_SPLIT = re.compile(r'([(),])')
def split_subtokens(str):
return [subtok for subtok in RE_WORDS.findall(str) if not subtok == '_']
def subtokenize(s):
failed = False
try:
tokens = list(javalang.tokenizer.tokenize(s))
except:
try:
tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2]
except:
try:
tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1]
except:
tokens = s.split()
failed = True
if failed:
return [' _ '.join(split_subtokens(i)) for i in tokens if not i in modifiers]
else:
return [' _ '.join(split_subtokens(i.value)) for i in tokens if not i.value in modifiers]
def subtokenize_tree(s):
return ' '.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub) > 0])
def process_line(target_type, max_targets, max_nodes, line):
obj = json.loads(line)
left_context = obj['left_context']
right_context = obj['right_context']
target_seq = obj['target_seq']
num_targets = obj['num_targets']
num_nodes = obj['num_nodes']
if max_targets is not None and num_targets > max_targets:
return None, None
if max_nodes is not None and num_nodes > max_nodes:
return None, None
if target_type is TargetType.seq:
target_pred = ' '.join(subtokenize(target_seq)).lower()
elif target_type is TargetType.tree:
target_pred = subtokenize_tree(obj['linearized_tree'])
source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower())
return source, target_pred
def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes):
total_examples = 0
source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role)
target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role)
with open(source_output_path, 'w') as source_output_file:
with open(target_output_path, 'w') as target_output_file:
with open(file_path, 'r') as file:
subtokenize_line = partial(process_line, target_type, max_targets, max_nodes)
with mp.Pool(64) as pool:
if data_file_role in ['test', 'val']:
examples = [process_line(target_type, max_targets, max_nodes, line) for line in file]
else:
examples = pool.imap_unordered(subtokenize_line, file, chunksize=100)
#examples = [process_line(target_type, max_targets, max_nodes, line) for line in file]
for source_seq, target_seq in examples:
if source_seq is None or target_seq is None:
continue
source_output_file.write(source_seq + '\n')
target_output_file.write(target_seq + '\n')
total_examples += 1
#print(source_seq, target_seq)
print('File: ' + file_path)
print('Total examples: ' + str(total_examples))
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument("-trd", "--train_data", dest="train_data_path",
help="path to training data file", required=True)
parser.add_argument("-ted", "--test_data", dest="test_data_path",
help="path to test data file", required=True)
parser.add_argument("-vd", "--val_data", dest="val_data_path",
help="path to validation data file", required=True)
parser.add_argument("-o", "--output_name", dest="output_name",
help="output name - the base name for the created dataset", metavar="FILE", required=True,
default='data')
parser.add_argument("--target_type", dest="target_type", type=TargetType.from_string, choices=list(TargetType), required=True)
parser.add_argument("--max_targets", dest="max_targets", type=int, required=False, default=40)
parser.add_argument("--max_nodes", dest="max_nodes", type=int, required=False, default=None)
parser.add_argument('--local', action='store_true')
args = parser.parse_args()
train_data_path = args.train_data_path
test_data_path = args.test_data_path
val_data_path = args.val_data_path
for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']):
process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name,
target_type=args.target_type, max_targets=args.max_targets, max_nodes=args.max_nodes)
| 40.408759
| 146
| 0.629516
| 718
| 5,536
| 4.603064
| 0.215877
| 0.04236
| 0.04115
| 0.036309
| 0.302874
| 0.222693
| 0.172466
| 0.116188
| 0.077156
| 0.038729
| 0
| 0.004564
| 0.248013
| 5,536
| 136
| 147
| 40.705882
| 0.789335
| 0.020592
| 0
| 0.099099
| 0
| 0.009009
| 0.148893
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.063063
| 0.018018
| 0.216216
| 0.018018
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7d572858561992a56ab8312f08925dad1d2745
| 6,260
|
py
|
Python
|
ebay.py
|
SpironoZeppeli/Magic-The-Scannening
|
93c595a4c98fb725a79eeddfaba99cb0409d41fb
|
[
"MIT"
] | null | null | null |
ebay.py
|
SpironoZeppeli/Magic-The-Scannening
|
93c595a4c98fb725a79eeddfaba99cb0409d41fb
|
[
"MIT"
] | null | null | null |
ebay.py
|
SpironoZeppeli/Magic-The-Scannening
|
93c595a4c98fb725a79eeddfaba99cb0409d41fb
|
[
"MIT"
] | null | null | null |
import requests
import urllib.request
import urllib.parse
import PIL
import re
import configparser
import json
from PIL import Image
from ebaysdk.trading import Connection as Trading
from ebaysdk.exception import ConnectionError
from yaml import load
from PyQt5.QtWidgets import QMessageBox
class EbaySeller:
def __init__(self):
self.api = Trading()
config = configparser.ConfigParser()
config.read('config.ini')
with open('details.yaml', 'r') as file:
self.yaml_config = load(file)
def upload_card(self, card_name, eu_card_price, us_card_price, card_id):
if us_card_price != 0:
card_price = us_card_price * 0.8
else:
card_price = eu_card_price
if card_price < 1:
card_price = 1
card_price = str(round(card_price, 2))
try:
card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card'
except:
self.msg = QMessageBox()
self.msg.setWindowTitle("Upload Failed")
self.msg.setText("Upload Failed, wizards gatherer error")
self.msg.setStandardButtons(QMessageBox.Ok)
self.msg.exec()
urllib.request.urlretrieve(card_image, 'temp.jpg')
# Resize card
base_height = 500
img = Image.open('temp.jpg')
height_percent = (base_height / float(img.size[1]))
wsize = int((float(img.size[0]) * float(height_percent)))
img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS)
img.save('temp.png')
# Upload to PictShare
files = {'file': open('temp.png', 'rb')}
try:
r = requests.post('https://pictshare.net/api/upload.php', files=files)
except:
self.msg = QMessageBox()
self.msg.setWindowTitle("Upload Failed")
self.msg.setText("Upload Failed, PictShare error")
self.msg.setStandardButtons(QMessageBox.Ok)
self.msg.exec()
print(r)
r = r.text
r = json.loads(r)
print(r)
r = r['url']
# Fix using regular expression, may not be needed at a later date
r = re.sub('\\.net', '.net/', r)
r = re.sub('\\.net//', '.net/', r)
print(r)
try:
image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r})
image = image.dict()
image = image['SiteHostedPictureDetails']['FullURL']
print(image)
# Upload to ebay
response = self.api.execute('AddFixedPriceItem', {
'Item': {'Title': card_name + ' MTG - NM/M', 'Description': card_name + ' MTG - NM/M',
'Quantity': '1', 'PictureDetails': {'PictureURL': image},
'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3',
'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config["PostalCode"],
'Currency': self.yaml_config["Currency"],
'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal',
'PayPalEmailAddress': self.yaml_config["PayPalEmailAddress"],
'PrimaryCategory': {'CategoryID': '38292'},
'ShippingDetails': {'ShippingType': 'Flat',
'ShippingServiceOptions': {'ShippingServicePriority': '1',
'ShippingService': self.yaml_config[
"ShippingService"],
'ShippingServiceCost': '1'}}}})
print(response.dict())
print(response.reply)
self.msg = QMessageBox()
if response.reply.Ack == 'Failure':
self.msg.setWindowTitle("Upload Failed")
self.msg.setText("Upload Complete, please check log.txt")
self.msg.setStandardButtons(QMessageBox.Ok)
with open('log.txt', 'a+') as log_file:
log_file.write(response.reply)
else:
self.msg.setWindowTitle("Upload Complete")
self.msg.setText("Upload Complete, please check your ebay account to confirm")
self.msg.setStandardButtons(QMessageBox.Ok)
self.msg.exec()
except ConnectionError as e:
print(e)
print(e.response.dict())
def get_multiverse_id(self, name):
try:
name = re.sub(' ', '%20', name)
r = requests.get('https://api.scryfall.com/cards/named?exact=' + name)
r = json.loads(r.text)
return r['multiverse_ids'][0]
except:
self.msg = QMessageBox()
self.msg.setWindowTitle("Upload Failed")
self.msg.setText("Upload Failed, scryfall error")
self.msg.setStandardButtons(QMessageBox.Ok)
self.msg.exec()
def get_card_info_and_sell(self, name):
try:
multiverse_id = self.get_multiverse_id(name)
r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id))
r = json.loads(r.text)
r = r[0]
card_name = r.get('name')
eu_card_price = r.get('price_normal')
us_card_price = r.get('us_normal')
card_set = r.get('set_id')
card_set_name = r.get('set_name')
card_id = r.get('multiverse_id')
# Display card info in CLI
print('Name: ' + card_name)
print('Set: ' + card_set)
print('Set name: ' + card_set_name)
print('Card ID: ' + str(card_id))
self.upload_card(card_name, eu_card_price, us_card_price, card_id)
except:
self.msg = QMessageBox()
self.msg.setWindowTitle("Upload Failed")
self.msg.setText("Upload Failed, card name not valid")
self.msg.setStandardButtons(QMessageBox.Ok)
self.msg.exec()
| 42.013423
| 119
| 0.546486
| 657
| 6,260
| 5.086758
| 0.295282
| 0.058648
| 0.037702
| 0.048474
| 0.283064
| 0.239677
| 0.231897
| 0.214542
| 0.185218
| 0.120886
| 0
| 0.008375
| 0.332428
| 6,260
| 148
| 120
| 42.297297
| 0.791338
| 0.021566
| 0
| 0.284615
| 0
| 0
| 0.204609
| 0.018631
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030769
| false
| 0
| 0.092308
| 0
| 0.138462
| 0.092308
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7de0c98f16f1e656d840a2c9ad1e60a28cfa7f
| 3,175
|
py
|
Python
|
bot/exts/github/github.py
|
v1nam/gurkbot
|
a0f5e05a5f65e6169accc90271fca58f4df211fe
|
[
"MIT"
] | 24
|
2020-12-18T07:26:14.000Z
|
2022-03-30T22:56:49.000Z
|
bot/exts/github/github.py
|
v1nam/gurkbot
|
a0f5e05a5f65e6169accc90271fca58f4df211fe
|
[
"MIT"
] | 143
|
2020-12-18T09:13:51.000Z
|
2022-03-02T19:27:44.000Z
|
bot/exts/github/github.py
|
v1nam/gurkbot
|
a0f5e05a5f65e6169accc90271fca58f4df211fe
|
[
"MIT"
] | 44
|
2020-12-18T09:05:29.000Z
|
2022-03-02T20:06:23.000Z
|
import typing
from bot.constants import BOT_REPO_URL
from discord import Embed
from discord.ext import commands
from discord.ext.commands.cooldowns import BucketType
from . import _issues, _profile, _source
class Github(commands.Cog):
"""
Github Category cog, which contains commands related to github.
Commands:
โ profile Fetches a user's GitHub information.
โ issue Command to retrieve issue(s) from a GitHub repository.
โ source Displays information about the bot's source code.
"""
def __init__(self, bot: commands.Bot) -> None:
self.bot = bot
@commands.group(name="github", aliases=("gh",), invoke_without_command=True)
async def github_group(self, ctx: commands.Context) -> None:
"""Commands for Github."""
await ctx.send_help(ctx.command)
@github_group.command(name="profile")
@commands.cooldown(1, 10, BucketType.user)
async def profile(self, ctx: commands.Context, username: str) -> None:
"""
Fetches a user's GitHub information.
Username is optional and sends the help command if not specified.
"""
github_profile = _profile.GithubInfo(self.bot.http_session)
embed = await github_profile.get_github_info(username)
await ctx.send(embed=embed)
@github_group.command(name="issue", aliases=("pr",))
async def issue(
self,
ctx: commands.Context,
numbers: commands.Greedy[int],
repository: typing.Optional[str] = None,
) -> None:
"""Command to retrieve issue(s) from a GitHub repository."""
github_issue = _issues.Issues(self.bot.http_session)
if not numbers:
raise commands.MissingRequiredArgument(ctx.command.clean_params["numbers"])
if repository is None:
user = "gurkult"
else:
user, _, repository = repository.rpartition("/")
if user == "":
user = "gurkult"
embed = await github_issue.issue(ctx.message.channel, numbers, repository, user)
await ctx.send(embed=embed)
@github_group.command(name="source", aliases=("src", "inspect"))
async def source_command(
self, ctx: commands.Context, *, source_item: typing.Optional[str] = None
) -> None:
"""Displays information about the bot's source code."""
if source_item is None:
embed = Embed(title="Gurkbot's GitHub Repository")
embed.add_field(name="Repository", value=f"[Go to GitHub]({BOT_REPO_URL})")
embed.set_thumbnail(url=self.bot.user.avatar_url)
await ctx.send(embed=embed)
return
elif not ctx.bot.get_command(source_item):
raise commands.BadArgument(
f"Unable to convert `{source_item}` to valid command or Cog."
)
github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url)
embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item))
await ctx.send(embed=embed)
def setup(bot: commands.Bot) -> None:
"""Load the Github cog."""
bot.add_cog(Github(bot))
| 34.89011
| 88
| 0.640945
| 391
| 3,175
| 5.092072
| 0.28133
| 0.024611
| 0.030136
| 0.044199
| 0.253139
| 0.185836
| 0.129583
| 0.129583
| 0.088398
| 0
| 0
| 0.001258
| 0.249134
| 3,175
| 90
| 89
| 35.277778
| 0.832634
| 0.093543
| 0
| 0.145455
| 0
| 0
| 0.072125
| 0.008967
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.109091
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7de7f5235ff8dd0c5f7e122b59415ab3622dc6
| 1,154
|
py
|
Python
|
log/slack_sender.py
|
SmashKs/BarBarian
|
b308dcb9e24ec621abbbc121847923e14e5b6a4b
|
[
"MIT"
] | null | null | null |
log/slack_sender.py
|
SmashKs/BarBarian
|
b308dcb9e24ec621abbbc121847923e14e5b6a4b
|
[
"MIT"
] | 2
|
2020-06-05T19:25:24.000Z
|
2021-06-10T20:56:57.000Z
|
log/slack_sender.py
|
SmashKs/BarBarian
|
b308dcb9e24ec621abbbc121847923e14e5b6a4b
|
[
"MIT"
] | null | null | null |
from slackclient import SlackClient
from external import SLACK_API_KEY
class SlackBot:
API_CHAT_MSG = 'chat.postMessage'
BOT_NAME = 'News Bot'
DEFAULT_CHANNEL = 'news_notification'
def __new__(cls, *p, **k):
if '_the_instance' not in cls.__dict__:
cls._the_instance = object.__new__(cls)
return cls._the_instance
def __init__(self):
self.__slack_client = SlackClient(SLACK_API_KEY)
def send_msg_to(self, text='', channel=DEFAULT_CHANNEL):
self.__slack_client.api_call(SlackBot.API_CHAT_MSG,
username=SlackBot.BOT_NAME,
channel=channel,
text=text)
def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL):
self.__slack_client.api_call(SlackBot.API_CHAT_MSG,
username=SlackBot.BOT_NAME,
mrkdwn=True,
channel=channel,
text=text)
if __name__ == '__main__':
SlackBot().send_msg_to('hello world!!')
| 32.971429
| 70
| 0.559792
| 121
| 1,154
| 4.801653
| 0.371901
| 0.056799
| 0.077453
| 0.092943
| 0.333907
| 0.333907
| 0.333907
| 0.333907
| 0.333907
| 0.333907
| 0
| 0
| 0.358752
| 1,154
| 34
| 71
| 33.941176
| 0.785135
| 0
| 0
| 0.32
| 0
| 0
| 0.064991
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.08
| 0
| 0.44
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7e4d3da4d7bdad5eca61e8c5160dfe0d14608f
| 2,379
|
py
|
Python
|
src/pytezos/block/forge.py
|
miracle2k/pytezos
|
e6b99f00f342d9a05b0c36a9883040961fd6d58e
|
[
"MIT"
] | 98
|
2019-02-07T16:33:38.000Z
|
2022-03-31T15:53:41.000Z
|
src/pytezos/block/forge.py
|
miracle2k/pytezos
|
e6b99f00f342d9a05b0c36a9883040961fd6d58e
|
[
"MIT"
] | 152
|
2019-05-20T16:38:56.000Z
|
2022-03-30T14:24:38.000Z
|
src/pytezos/block/forge.py
|
miracle2k/pytezos
|
e6b99f00f342d9a05b0c36a9883040961fd6d58e
|
[
"MIT"
] | 34
|
2019-07-25T12:03:51.000Z
|
2021-11-11T22:23:38.000Z
|
from typing import Any, Dict, List, Tuple
from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp
def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]:
if len(fitness) == 0:
major = 0
minor = 1
else:
major = int.from_bytes(bytes.fromhex(fitness[0]), 'big')
minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1
return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex()
def forge_int_fixed(value: int, length: int) -> bytes:
return value.to_bytes(length, 'big')
def forge_command(command: str) -> bytes:
if command == 'activate':
return b'\x00'
raise NotImplementedError(command)
def forge_fitness(fitness: List[str]) -> bytes:
return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness)))
def forge_priority(priority: int) -> bytes:
return priority.to_bytes(2, 'big')
def forge_content(content: Dict[str, Any]) -> bytes:
res = b''
res += forge_command(content['command'])
res += forge_base58(content['hash'])
res += forge_fitness(content['fitness'])
res += bytes.fromhex(content['protocol_parameters'])
return res
def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes:
res = b''
if protocol_data.get('content'):
res += forge_content(protocol_data['content'])
else:
res += forge_priority(protocol_data['priority'])
res += bytes.fromhex(protocol_data['proof_of_work_nonce'])
if protocol_data.get('seed_nonce_hash'):
res += b'\xFF'
res += forge_base58(protocol_data['seed_nonce_hash'])
else:
res += b'\x00'
res += b'\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\x00'
return res
def forge_block_header(shell_header: Dict[str, Any]) -> bytes:
res = forge_int_fixed(shell_header['level'], 4)
res += forge_int_fixed(shell_header['proto'], 1)
res += forge_base58(shell_header['predecessor'])
res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8)
res += forge_int_fixed(shell_header['validation_pass'], 1)
res += forge_base58(shell_header['operations_hash'])
res += forge_fitness(shell_header['fitness'])
res += forge_base58(shell_header['context'])
res += bytes.fromhex(shell_header['protocol_data'])
return res
| 33.985714
| 87
| 0.666246
| 320
| 2,379
| 4.725
| 0.2375
| 0.074074
| 0.042989
| 0.042328
| 0.180556
| 0.154101
| 0
| 0
| 0
| 0
| 0
| 0.016021
| 0.186633
| 2,379
| 69
| 88
| 34.478261
| 0.765375
| 0
| 0
| 0.153846
| 0
| 0
| 0.113073
| 0.01177
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0.019231
| 0.038462
| 0.057692
| 0.346154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed7f467835f32242a9650f226b4a5ad9d6d87af4
| 5,321
|
py
|
Python
|
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
|
jichangjichang/Paddle
|
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
|
[
"Apache-2.0"
] | 9
|
2017-12-04T02:58:01.000Z
|
2020-12-03T14:46:30.000Z
|
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
|
jichangjichang/Paddle
|
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
|
[
"Apache-2.0"
] | 7
|
2017-12-05T20:29:08.000Z
|
2018-10-15T08:57:40.000Z
|
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
|
jichangjichang/Paddle
|
4fa3cee5499c6df0ad6043b0cfa220d09f2034e8
|
[
"Apache-2.0"
] | 6
|
2018-03-19T22:38:46.000Z
|
2019-11-01T22:28:27.000Z
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import math
import sys
import paddle.compat as cpt
from op_test import OpTest
class TestROIPoolOp(OpTest):
def set_data(self):
self.init_test_case()
self.make_rois()
self.calc_roi_pool()
self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)}
self.attrs = {
'spatial_scale': self.spatial_scale,
'pooled_height': self.pooled_height,
'pooled_width': self.pooled_width
}
self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes}
def init_test_case(self):
self.batch_size = 3
self.channels = 3
self.height = 6
self.width = 4
# n, c, h, w
self.x_dim = (self.batch_size, self.channels, self.height, self.width)
self.spatial_scale = 1.0 / 4.0
self.pooled_height = 2
self.pooled_width = 2
self.x = np.random.random(self.x_dim).astype('float32')
def calc_roi_pool(self):
out_data = np.zeros((self.rois_num, self.channels, self.pooled_height,
self.pooled_width))
argmax_data = np.zeros((self.rois_num, self.channels,
self.pooled_height, self.pooled_width))
for i in range(self.rois_num):
roi = self.rois[i]
roi_batch_id = roi[0]
roi_start_w = int(cpt.round(roi[1] * self.spatial_scale))
roi_start_h = int(cpt.round(roi[2] * self.spatial_scale))
roi_end_w = int(cpt.round(roi[3] * self.spatial_scale))
roi_end_h = int(cpt.round(roi[4] * self.spatial_scale))
roi_height = int(max(roi_end_h - roi_start_h + 1, 1))
roi_width = int(max(roi_end_w - roi_start_w + 1, 1))
x_i = self.x[roi_batch_id]
bin_size_h = float(roi_height) / float(self.pooled_height)
bin_size_w = float(roi_width) / float(self.pooled_width)
for c in range(self.channels):
for ph in range(self.pooled_height):
for pw in range(self.pooled_width):
hstart = int(math.floor(ph * bin_size_h))
wstart = int(math.floor(pw * bin_size_w))
hend = int(math.ceil((ph + 1) * bin_size_h))
wend = int(math.ceil((pw + 1) * bin_size_w))
hstart = min(max(hstart + roi_start_h, 0), self.height)
hend = min(max(hend + roi_start_h, 0), self.height)
wstart = min(max(wstart + roi_start_w, 0), self.width)
wend = min(max(wend + roi_start_w, 0), self.width)
is_empty = (hend <= hstart) or (wend <= wstart)
if is_empty:
out_data[i, c, ph, pw] = 0
else:
out_data[i, c, ph, pw] = -sys.float_info.max
argmax_data[i, c, ph, pw] = -1
for h in range(hstart, hend):
for w in range(wstart, wend):
if x_i[c, h, w] > out_data[i, c, ph, pw]:
out_data[i, c, ph, pw] = x_i[c, h, w]
argmax_data[i, c, ph,
pw] = h * self.width + w
self.outs = out_data.astype('float32')
self.argmaxes = argmax_data.astype('int64')
def make_rois(self):
rois = []
self.rois_lod = [[]]
for bno in range(self.batch_size):
self.rois_lod[0].append(bno + 1)
for i in range(bno + 1):
x1 = np.random.random_integers(
0, self.width // self.spatial_scale - self.pooled_width)
y1 = np.random.random_integers(
0, self.height // self.spatial_scale - self.pooled_height)
x2 = np.random.random_integers(x1 + self.pooled_width,
self.width // self.spatial_scale)
y2 = np.random.random_integers(
y1 + self.pooled_height, self.height // self.spatial_scale)
roi = [bno, x1, y1, x2, y2]
rois.append(roi)
self.rois_num = len(rois)
self.rois = np.array(rois).astype("int64")
def setUp(self):
self.op_type = "roi_pool"
self.set_data()
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
if __name__ == '__main__':
unittest.main()
| 37.20979
| 80
| 0.543131
| 705
| 5,321
| 3.902128
| 0.236879
| 0.058161
| 0.058161
| 0.017448
| 0.217739
| 0.125772
| 0.047256
| 0.047256
| 0.047256
| 0.047256
| 0
| 0.017321
| 0.348995
| 5,321
| 142
| 81
| 37.471831
| 0.776848
| 0.111633
| 0
| 0
| 0
| 0
| 0.020382
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.153061
| 0.010204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed801190784fa836d2752af1e6b10b54a93fa361
| 2,518
|
py
|
Python
|
Day20.py
|
SheepiCagio/Advent-of-Code-2021
|
52f0035da2cb258810d8947cbf56b51b65a9fe8b
|
[
"MIT"
] | null | null | null |
Day20.py
|
SheepiCagio/Advent-of-Code-2021
|
52f0035da2cb258810d8947cbf56b51b65a9fe8b
|
[
"MIT"
] | null | null | null |
Day20.py
|
SheepiCagio/Advent-of-Code-2021
|
52f0035da2cb258810d8947cbf56b51b65a9fe8b
|
[
"MIT"
] | null | null | null |
import numpy as np
raw = open("inputs/20.txt","r").readlines()
input_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in raw]
test_raw = open("inputs/20_test.txt","r").readlines()
test_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in test_raw]
def addLayerZero(grid):
#if sum(np.asarray(grid)[:,0]) > 0:
grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid))
#if sum(np.asarray(grid)[0,:]) > 0:
grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid))
# if sum(np.asarray(grid)[:,-1]) > 0:
grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis]))
# if sum(np.asarray(grid)[-1,:]) > 0:
grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:]))
return grid
def addLayerOnes(grid):
#if sum(np.asarray(grid)[:,0]) > 0:
grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid))
#if sum(np.asarray(grid)[0,:]) > 0:
grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid))
# if sum(np.asarray(grid)[:,-1]) > 0:
grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis]))
# if sum(np.asarray(grid)[-1,:]) > 0:
grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:]))
return grid
def pictureEnhancer(input_array,iter):
splitvalue = False
index_string = ''
grid = []
for i in input_array:
if i == '':
splitvalue = True
continue
if not splitvalue:
index_string += i
else:
grid.append(list(i))
grid = [[int(i) for i in row] for row in grid]
for x in range(1,iter+1):
grid = enhancer(grid, index_string,x)
print('The number of lit pixels is:', sum(sum(grid)))
def enhancer(grid, index_string,iter):
print(iter)
if iter == 1 or index_string[0] == '0' or (iter % 2 == 1 and index_string[511] == '0'):
grid = addLayerZero(grid)
output_grid = np.zeros((len(grid),len(grid[0])),dtype=int)
grid = addLayerZero(grid)
elif (index_string[0] == '1' and index_string [511] == '1') or (iter % 2 == 0 and index_string[511] == '0'):
grid = addLayerOnes(grid)
output_grid = np.ones((len(grid),len(grid[0])),dtype=int)
grid = addLayerOnes(grid)
for i in range(1,len(grid)-1):
for j in range(1, len(grid[i])-1):
binStr = ''
for k in range(-1,2):
for l in range(-1,2):
binStr += str(grid[i+k][j+l])
output_grid[i-1][j-1] = index_string[int(binStr,2)]
return output_grid
#pictureEnhancer(test_array,2)
#pictureEnhancer(input_array,2)
pictureEnhancer(test_array,50)
pictureEnhancer(input_array,50)
| 35.464789
| 110
| 0.635822
| 423
| 2,518
| 3.725768
| 0.170213
| 0.053299
| 0.035533
| 0.071066
| 0.513959
| 0.469543
| 0.441624
| 0.441624
| 0.395939
| 0.384518
| 0
| 0.032243
| 0.137808
| 2,518
| 71
| 111
| 35.464789
| 0.69369
| 0.136219
| 0
| 0.113208
| 0
| 0
| 0.036011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075472
| false
| 0
| 0.018868
| 0
| 0.150943
| 0.037736
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed8134024179e7e4607f23c5ef95e9da1da3820b
| 1,674
|
py
|
Python
|
questions/53349623/main.py
|
sesu089/stackoverflow
|
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
|
[
"MIT"
] | 302
|
2017-03-04T00:05:23.000Z
|
2022-03-28T22:51:29.000Z
|
questions/53349623/main.py
|
sesu089/stackoverflow
|
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
|
[
"MIT"
] | 30
|
2017-12-02T19:26:43.000Z
|
2022-03-28T07:40:36.000Z
|
questions/53349623/main.py
|
sesu089/stackoverflow
|
6fae69be6fa74fba9d554e6b5f387e5d3c1aad73
|
[
"MIT"
] | 388
|
2017-07-04T16:53:12.000Z
|
2022-03-18T22:20:19.000Z
|
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
class Demo(QtWidgets.QWidget):
def __init__(self):
super(Demo, self).__init__()
self.button = QtWidgets.QPushButton()
self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter)
self.combo = QtWidgets.QComboBox(self)
self.combo.currentIndexChanged.connect(self.change_func)
self.trans = QtCore.QTranslator(self)
self.v_layout = QtWidgets.QVBoxLayout(self)
self.v_layout.addWidget(self.combo)
self.v_layout.addWidget(self.button)
self.v_layout.addWidget(self.label)
options = ([('English', ''), ('franรงais', 'eng-fr' ), ('ไธญๆ', 'eng-chs'), ])
for i, (text, lang) in enumerate(options):
self.combo.addItem(text)
self.combo.setItemData(i, lang)
self.retranslateUi()
@QtCore.pyqtSlot(int)
def change_func(self, index):
data = self.combo.itemData(index)
if data:
self.trans.load(data)
QtWidgets.QApplication.instance().installTranslator(self.trans)
else:
QtWidgets.QApplication.instance().removeTranslator(self.trans)
def changeEvent(self, event):
if event.type() == QtCore.QEvent.LanguageChange:
self.retranslateUi()
super(Demo, self).changeEvent(event)
def retranslateUi(self):
self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start'))
self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World'))
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
demo = Demo()
demo.show()
sys.exit(app.exec_())
| 32.823529
| 84
| 0.642772
| 183
| 1,674
| 5.754098
| 0.42623
| 0.051282
| 0.041785
| 0.05698
| 0.146249
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00077
| 0.224612
| 1,674
| 51
| 85
| 32.823529
| 0.810478
| 0
| 0
| 0.051282
| 0
| 0
| 0.037612
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102564
| false
| 0
| 0.051282
| 0
| 0.179487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed81492b65a1f232ede7d038b4670a415f3f191c
| 1,638
|
py
|
Python
|
tests/test_error_descriptions_from_raises.py
|
iterait/apistrap
|
e83460fa97f13a95a928971b0d2defe0ac611911
|
[
"MIT"
] | 6
|
2018-09-06T18:32:48.000Z
|
2021-05-28T01:03:32.000Z
|
tests/test_error_descriptions_from_raises.py
|
iterait/apistrap
|
e83460fa97f13a95a928971b0d2defe0ac611911
|
[
"MIT"
] | 53
|
2018-09-06T16:16:53.000Z
|
2021-05-19T14:36:58.000Z
|
tests/test_error_descriptions_from_raises.py
|
iterait/apistrap
|
e83460fa97f13a95a928971b0d2defe0ac611911
|
[
"MIT"
] | null | null | null |
import pytest
from apistrap.flask import FlaskApistrap
from apistrap.schemas import ErrorResponse
@pytest.fixture()
def app_with_raises(app):
oapi = FlaskApistrap()
@app.route("/", methods=["GET"])
def view():
"""
Something something.
:raises KeyError: KeyError description
"""
oapi.init_app(app)
@pytest.fixture()
def app_with_raises_and_handler(app):
oapi = FlaskApistrap()
oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse())
@app.route("/", methods=["GET"])
def view():
"""
Something something.
:raises KeyError: KeyError description
"""
oapi.init_app(app)
def test_error_descriptions_from_raises(app_with_raises, client):
response = client.get("/spec.json")
assert response.json["paths"]["/"]["get"]["responses"] == {
"500": {
"description": "KeyError description",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
def test_http_code_from_handler(app_with_raises_and_handler, client):
response = client.get("/spec.json")
assert response.json["paths"]["/"]["get"]["responses"] == {
"515": {
"description": "KeyError description",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
| 23.4
| 69
| 0.527473
| 141
| 1,638
| 5.957447
| 0.326241
| 0.033333
| 0.061905
| 0.045238
| 0.696429
| 0.657143
| 0.588095
| 0.588095
| 0.588095
| 0.588095
| 0
| 0.008242
| 0.333333
| 1,638
| 69
| 70
| 23.73913
| 0.760989
| 0.073871
| 0
| 0.545455
| 0
| 0
| 0.182944
| 0.046768
| 0
| 0
| 0
| 0
| 0.045455
| 1
| 0.136364
| false
| 0
| 0.068182
| 0
| 0.204545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed82dc9fed173aeada3cbab76076165a4c9b3932
| 1,126
|
py
|
Python
|
projects/api/UsersApi.py
|
chamathshashika/projects-python-wrappers
|
33e9f6bccba16a581b115c582033a93d43bb159c
|
[
"MIT"
] | null | null | null |
projects/api/UsersApi.py
|
chamathshashika/projects-python-wrappers
|
33e9f6bccba16a581b115c582033a93d43bb159c
|
[
"MIT"
] | null | null | null |
projects/api/UsersApi.py
|
chamathshashika/projects-python-wrappers
|
33e9f6bccba16a581b115c582033a93d43bb159c
|
[
"MIT"
] | null | null | null |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.UsersParser import UsersParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = UsersParser()
class UsersApi:
"""Users Api class is used to
1.Get all the users in the given project.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Users api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_users(self, project_id):
"""Get all the users in the given project.
Args:
project_id(long): Project id.
Returns:
list of instance: List of users object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/users/'
response = zoho_http_client.get(url, self.details)
return parser.get_users(response)
| 25.590909
| 101
| 0.617229
| 139
| 1,126
| 4.848921
| 0.345324
| 0.083086
| 0.041543
| 0.041543
| 0.091988
| 0.091988
| 0.091988
| 0.091988
| 0
| 0
| 0
| 0.001241
| 0.284192
| 1,126
| 43
| 102
| 26.186047
| 0.834988
| 0.312611
| 0
| 0
| 0
| 0
| 0.049475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.1875
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed82e608ff9e5d51a3d3e7cab08afa27210afbdb
| 11,340
|
py
|
Python
|
useless/tuck_arms.py
|
leader1313/Baxter_teleoperation_system
|
856d999acd73e6c1dc15a342cb6c4fcd1a482863
|
[
"Apache-2.0"
] | null | null | null |
useless/tuck_arms.py
|
leader1313/Baxter_teleoperation_system
|
856d999acd73e6c1dc15a342cb6c4fcd1a482863
|
[
"Apache-2.0"
] | 2
|
2019-10-15T07:24:24.000Z
|
2019-10-15T07:28:19.000Z
|
useless/tuck_arms.py
|
leader1313/Baxter_teleoperation_system
|
856d999acd73e6c1dc15a342cb6c4fcd1a482863
|
[
"Apache-2.0"
] | 1
|
2020-09-15T12:37:13.000Z
|
2020-09-15T12:37:13.000Z
|
#!/usr/bin/env python
# Copyright (c) 2013-2015, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Tool to tuck/untuck Baxter's arms to/from the shipping pose
"""
import argparse
from copy import deepcopy
import rospy
from std_msgs.msg import (
Empty,
Bool,
)
import baxter_interface
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_interface import CHECK_VERSION
class Tuck(object):
def __init__(self, tuck_cmd):
self._done = False
self._limbs = ('left', 'right')
self._arms = {
'left': baxter_interface.Limb('left'),
'right': baxter_interface.Limb('right'),
}
self._tuck = tuck_cmd
self._tuck_rate = rospy.Rate(20.0) # Hz
self._tuck_threshold = 0.2 # radians
self._peak_angle = -1.6 # radians
self._arm_state = {
'tuck': {'left': 'none', 'right': 'none'},
'collide': {'left': False, 'right': False},
'flipped': {'left': False, 'right': False}
}
self._joint_moves = {
'tuck': {
'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],
'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]
},
'untuck': {
'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],
'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]
}
}
self._collide_lsub = rospy.Subscriber(
'robot/limb/left/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'left')
self._collide_rsub = rospy.Subscriber(
'robot/limb/right/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'right')
self._disable_pub = {
'left': rospy.Publisher(
'robot/limb/left/suppress_collision_avoidance',
Empty, queue_size=10),
'right': rospy.Publisher(
'robot/limb/right/suppress_collision_avoidance',
Empty, queue_size=10)
}
self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
self._enable_pub = rospy.Publisher('robot/set_super_enable',
Bool, queue_size=10)
def _update_collision(self, data, limb):
self._arm_state['collide'][limb] = len(data.collision_object) > 0
self._check_arm_state()
def _check_arm_state(self):
"""
Check for goals and behind collision field.
If s1 joint is over the peak, collision will need to be disabled
to get the arm around the head-arm collision force-field.
"""
diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold
for limb in self._limbs:
angles = [self._arms[limb].joint_angle(joint)
for joint in self._arms[limb].joint_names()]
# Check if in a goal position
untuck_goal = map(diff_check, angles,
self._joint_moves['untuck'][limb])
tuck_goal = map(diff_check, angles[0:2],
self._joint_moves['tuck'][limb][0:2])
if all(untuck_goal):
self._arm_state['tuck'][limb] = 'untuck'
elif all(tuck_goal):
self._arm_state['tuck'][limb] = 'tuck'
else:
self._arm_state['tuck'][limb] = 'none'
# Check if shoulder is flipped over peak
self._arm_state['flipped'][limb] = (
self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)
def _prepare_to_tuck(self):
# If arms are in "tucked" state, disable collision avoidance
# before enabling robot, to avoid arm jerking from "force-field".
head = baxter_interface.Head()
start_disabled = not self._rs.state().enabled
at_goal = lambda: (abs(head.pan()) <=
baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)
rospy.loginfo("Moving head to neutral position")
while not at_goal() and not rospy.is_shutdown():
if start_disabled:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
if not self._rs.state().enabled:
self._enable_pub.publish(True)
head.set_pan(0.0, 0.5, timeout=0)
self._tuck_rate.sleep()
if start_disabled:
while self._rs.state().enabled == True and not rospy.is_shutdown():
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def _move_to(self, tuck, disabled):
if any(disabled.values()):
[pub.publish(Empty()) for pub in self._disable_pub.values()]
while (any(self._arm_state['tuck'][limb] != goal
for limb, goal in tuck.viewitems())
and not rospy.is_shutdown()):
if self._rs.state().enabled == False:
self._enable_pub.publish(True)
for limb in self._limbs:
if disabled[limb]:
self._disable_pub[limb].publish(Empty())
if limb in tuck:
self._arms[limb].set_joint_positions(dict(zip(
self._arms[limb].joint_names(),
self._joint_moves[tuck[limb]][limb])))
self._check_arm_state()
self._tuck_rate.sleep()
if any(self._arm_state['collide'].values()):
self._rs.disable()
return
def supervised_tuck(self):
# Update our starting state to check if arms are tucked
self._prepare_to_tuck()
self._check_arm_state()
# Tuck Arms
if self._tuck == True:
# If arms are already tucked, report this to user and exit.
if all(self._arm_state['tuck'][limb] == 'tuck'
for limb in self._limbs):
rospy.loginfo("Tucking: Arms already in 'Tucked' position.")
self._done = True
return
else:
rospy.loginfo("Tucking: One or more arms not Tucked.")
any_flipped = not all(self._arm_state['flipped'].values())
if any_flipped:
rospy.loginfo(
"Moving to neutral start position with collision %s.",
"on" if any_flipped else "off")
# Move to neutral pose before tucking arms to avoid damage
self._check_arm_state()
actions = dict()
disabled = {'left': True, 'right': True}
for limb in self._limbs:
if not self._arm_state['flipped'][limb]:
actions[limb] = 'untuck'
disabled[limb] = False
self._move_to(actions, disabled)
# Disable collision and Tuck Arms
rospy.loginfo("Tucking: Tucking with collision avoidance off.")
actions = {'left': 'tuck', 'right': 'tuck'}
disabled = {'left': True, 'right': True}
self._move_to(actions, disabled)
self._done = True
return
# Untuck Arms
else:
# If arms are tucked disable collision and untuck arms
if any(self._arm_state['flipped'].values()):
rospy.loginfo("Untucking: One or more arms Tucked;"
" Disabling Collision Avoidance and untucking.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
# If arms already untucked, move to neutral location
else:
rospy.loginfo("Untucking: Arms already Untucked;"
" Moving to neutral position.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
def clean_shutdown(self):
"""Handles ROS shutdown (Ctrl-C) safely."""
if not self._done:
rospy.logwarn('Aborting: Shutting down safely...')
if any(self._arm_state['collide'].values()):
while self._rs.state().enabled != False:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def main():
parser = argparse.ArgumentParser()
tuck_group = parser.add_mutually_exclusive_group(required=True)
tuck_group.add_argument("-t","--tuck", dest="tuck",
action='store_true', default=False, help="tuck arms")
tuck_group.add_argument("-u", "--untuck", dest="untuck",
action='store_true', default=False, help="untuck arms")
args = parser.parse_args(rospy.myargv()[1:])
tuck = args.tuck
rospy.loginfo("Initializing node... ")
rospy.init_node("rsdk_tuck_arms")
rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",))
tucker = Tuck(tuck)
rospy.on_shutdown(tucker.clean_shutdown)
tucker.supervised_tuck()
rospy.loginfo("Finished tuck")
if __name__ == "__main__":
main()
| 42.47191
| 79
| 0.568078
| 1,320
| 11,340
| 4.699242
| 0.237879
| 0.028373
| 0.029018
| 0.015476
| 0.312429
| 0.197324
| 0.167661
| 0.11575
| 0.11575
| 0.11575
| 0
| 0.01454
| 0.326808
| 11,340
| 266
| 80
| 42.631579
| 0.798009
| 0.20485
| 0
| 0.267016
| 0
| 0
| 0.1171
| 0.021739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041885
| false
| 0
| 0.036649
| 0
| 0.109948
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed849abb775e5f57be3b9019dbf370e35893e0b2
| 606
|
py
|
Python
|
Python/leetcode.031.next-permutation.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | 4
|
2015-10-10T00:30:55.000Z
|
2020-07-27T19:45:54.000Z
|
Python/leetcode.031.next-permutation.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
Python/leetcode.031.next-permutation.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
class Solution(object):
def nextPermutation(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
if not nums: return
n = len(nums)-1
while n > 0 and nums[n-1] >= nums[n]:
n -= 1
t = n
if t == 0:
nums[:] = nums[::-1]
return
x = nums[n-1]
while t < len(nums) and x < nums[t]:
t += 1
temp = nums[t-1]
nums[t-1] = nums[n-1]
nums[n-1] = temp
nums[n:] = nums[n:][::-1]
return
| 27.545455
| 74
| 0.429043
| 84
| 606
| 3.095238
| 0.357143
| 0.134615
| 0.115385
| 0.076923
| 0.084615
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.420792
| 606
| 22
| 75
| 27.545455
| 0.703704
| 0.145215
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed86102b88fe53e5292e7840680746dc239293e9
| 4,883
|
py
|
Python
|
test/unit/app/tools/test_select_parameters.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
test/unit/app/tools/test_select_parameters.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | 6
|
2021-11-11T20:57:49.000Z
|
2021-12-10T15:30:33.000Z
|
test/unit/app/tools/test_select_parameters.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
from unittest.mock import Mock
import pytest
from galaxy import model
from galaxy.tools.parameters import basic
from .util import BaseParameterTestCase
class SelectToolParameterTestCase(BaseParameterTestCase):
def test_validated_values(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
with pytest.raises(ValueError) as exc_info:
self.param.from_json("42", self.trans, {"input_bam": model.HistoryDatasetAssociation()})
assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined"
def test_validated_values_missing_dependency(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
with pytest.raises(ValueError) as exc_info:
self.param.from_json("42", self.trans)
assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined"
def test_unvalidated_values(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
self.trans.workflow_building_mode = True
assert self.param.from_json("42", self.trans) == "42"
def test_validated_datasets(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
with pytest.raises(ValueError) as exc_info:
self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": None})
assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined"
def test_unvalidated_datasets(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
self.trans.workflow_building_mode = True
assert isinstance(
self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": basic.RuntimeValue()}),
model.HistoryDatasetAssociation,
)
def test_filter_param_value(self):
self.options_xml = """<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="0" /></options>"""
assert ("testname1", "testpath1", False) in self.param.get_options(self.trans, {"input_bam": "testname1"})
assert ("testname2", "testpath2", False) in self.param.get_options(self.trans, {"input_bam": "testname2"})
assert len(self.param.get_options(self.trans, {"input_bam": "testname3"})) == 0
def test_filter_param_value2(self):
# Same test as above, but filtering on a different column.
self.options_xml = """<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="1" /></options>"""
assert ("testname1", "testpath1", False) in self.param.get_options(self.trans, {"input_bam": "testpath1"})
assert ("testname2", "testpath2", False) in self.param.get_options(self.trans, {"input_bam": "testpath2"})
assert len(self.param.get_options(self.trans, {"input_bam": "testpath3"})) == 0
# TODO: Good deal of overlap here with DataToolParameterTestCase,
# refactor.
def setUp(self):
super().setUp()
self.test_history = model.History()
self.app.model.context.add(self.test_history)
self.app.model.context.flush()
self.app.tool_data_tables["test_table"] = MockToolDataTable()
self.trans = Mock(
app=self.app,
get_history=lambda: self.test_history,
get_current_user_roles=lambda: [],
workflow_building_mode=False,
webapp=Mock(name="galaxy"),
)
self.type = "select"
self.set_data_ref = False
self.multiple = False
self.optional = False
self.options_xml = ""
self._param = None
@property
def param(self):
if not self._param:
multi_text = ""
if self.multiple:
multi_text = 'multiple="True"'
optional_text = ""
if self.optional:
optional_text = 'optional="True"'
options_text = self.options_xml
data_ref_text = ""
if self.set_data_ref:
data_ref_text = 'data_ref="input_bam"'
template_xml = """<param name="my_name" type="%s" %s %s %s>%s</param>"""
param_str = template_xml % (self.type, data_ref_text, multi_text, optional_text, options_text)
self._param = self._parameter_for(xml=param_str)
return self._param
class MockToolDataTable:
def __init__(self):
self.columns = dict(
name=0,
value=1,
)
self.missing_index_file = None
def get_fields(self):
return [["testname1", "testpath1"], ["testname2", "testpath2"]]
| 45.635514
| 137
| 0.643662
| 592
| 4,883
| 5.089527
| 0.212838
| 0.045138
| 0.041819
| 0.05078
| 0.537006
| 0.51842
| 0.51842
| 0.509127
| 0.509127
| 0.509127
| 0
| 0.008698
| 0.223019
| 4,883
| 106
| 138
| 46.066038
| 0.785451
| 0.026623
| 0
| 0.149425
| 0
| 0.034483
| 0.241103
| 0.039377
| 0
| 0
| 0
| 0.009434
| 0.126437
| 1
| 0.126437
| false
| 0
| 0.057471
| 0.011494
| 0.229885
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ed8c04e174410b92850aae3e034c73bb05a4abae
| 4,351
|
py
|
Python
|
src/selfdroid/appstorage/crud/AppAdder.py
|
vitlabuda/selfdroid-web-app
|
9eac9ee2c34038de13e179b6afb3d530a086e7b2
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2022-03-13T14:57:04.000Z
|
2022-03-13T14:57:04.000Z
|
src/selfdroid/appstorage/crud/AppAdder.py
|
vitlabuda/selfdroid-web-app
|
9eac9ee2c34038de13e179b6afb3d530a086e7b2
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/selfdroid/appstorage/crud/AppAdder.py
|
vitlabuda/selfdroid-web-app
|
9eac9ee2c34038de13e179b6afb3d530a086e7b2
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
#
# Copyright (c) 2021 Vรญt Labuda. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sqlalchemy.exc
from selfdroid.appstorage.AppMetadata import AppMetadata
from selfdroid.appstorage.AppMetadataDBModel import AppMetadataDBModel
from selfdroid.appstorage.AppStorageConsistencyEnsurer import AppStorageConsistencyEnsurer
from selfdroid.appstorage.apk.APKParser import APKParser
from selfdroid.appstorage.apk.ParsedAPK import ParsedAPK
from selfdroid.appstorage.crud.AppAdderException import AppAdderException
from selfdroid.web.WebStatusMessageCollector import WebStatusMessageCollector
from selfdroid import db
class AppAdder:
"""
This class must be instantiated and have its public methods called in a locked context!
"""
def __init__(self, uploaded_apk_path: str):
self._uploaded_apk_path: str = uploaded_apk_path
self._parsed_apk: ParsedAPK = APKParser(self._uploaded_apk_path).parsed_apk
def add_app_while_locked(self) -> AppMetadata:
"""
:return: The metadata of the added app.
"""
try:
app_metadata = self._add_app_while_locked_with_exceptions_handled()
except (sqlalchemy.exc.SQLAlchemyError, OSError):
db.session.rollback()
raise AppAdderException("An error occurred while adding the app!")
finally:
AppStorageConsistencyEnsurer().ensure_consistency_while_locked()
return app_metadata
def _add_app_while_locked_with_exceptions_handled(self) -> AppMetadata:
self._check_if_app_can_be_added()
return self._perform_app_addition()
def _check_if_app_can_be_added(self) -> None:
an_app_with_the_same_package_name = AppMetadataDBModel.query.filter_by(package_name=self._parsed_apk.package_name).first()
if an_app_with_the_same_package_name is not None:
html_message = WebStatusMessageCollector.format_html_message("An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!", self._parsed_apk.package_name)
raise AppAdderException(html_message)
def _perform_app_addition(self) -> AppMetadata:
# An UserReadableException mustn't be raised in this method!
# 1. Database
db_model = self._parsed_apk.create_new_db_model_with_metadata()
db.session.add(db_model)
db.session.commit()
assert isinstance(db_model.id, int)
app_metadata = AppMetadata.from_db_model(db_model)
# 2. APK
apk_path = app_metadata.get_apk_path()
os.rename(self._uploaded_apk_path, apk_path)
# 3. Icon
icon_path = app_metadata.get_icon_path()
with open(icon_path, "wb") as icon_file:
icon_file.write(self._parsed_apk.uniform_png_app_icon)
return app_metadata
| 44.397959
| 234
| 0.75385
| 576
| 4,351
| 5.493056
| 0.401042
| 0.03287
| 0.043616
| 0.02402
| 0.156764
| 0.105247
| 0.092604
| 0.042984
| 0.042984
| 0.042984
| 0
| 0.00311
| 0.187083
| 4,351
| 97
| 235
| 44.85567
| 0.891433
| 0.400598
| 0
| 0.046512
| 0
| 0.023256
| 0.065986
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 1
| 0.116279
| false
| 0
| 0.232558
| 0
| 0.44186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ebf7fd79d9cbf3e546f3b0a0480b99be5ed04d
| 3,549
|
py
|
Python
|
websockets.py
|
ejojmjn/indiana-phone
|
5d666ac651d3e02291806f24c265564002912e00
|
[
"MIT"
] | null | null | null |
websockets.py
|
ejojmjn/indiana-phone
|
5d666ac651d3e02291806f24c265564002912e00
|
[
"MIT"
] | null | null | null |
websockets.py
|
ejojmjn/indiana-phone
|
5d666ac651d3e02291806f24c265564002912e00
|
[
"MIT"
] | null | null | null |
#from gevent import monkey
#monkey.patch_all()
from flask import Flask, render_template, json
from flask_socketio import SocketIO, emit
from pydbus import SystemBus
from gi.repository import GLib
import threading
import json
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode='threading')
#socketio = SocketIO(app)
#Message: (':1.654', '/hfp/org/bluez/hci0/dev_94_65_2D_84_61_99', 'org.ofono.Modem', 'PropertyChanged', ('Powered', False))
#Data: Powered
bus = SystemBus()
def cb_server_signal_emission(*args):
print("Message: ", args)
makedev = lambda path : path.split('/')[-1]
iface = args[2]
if 'org.ofono.Modem' in iface:
if 'PropertyChanged' in args[3]:
message = { 'source': 'modem', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] }
else:
message = {'unknown_signal': args }
elif 'org.ofono.NetworkRegistration' in iface:
if 'PropertyChanged' in args[3]:
message = { 'source': 'network', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] }
else:
message = {'unknown_signal': args }
elif 'ofono.VoiceCallManager' in iface:
if 'CallAdded' in args[3]:
message = { 'source': 'callmgr', 'event': 'call_added', 'device': makedev(args[1]), 'properties': args[4][1] }
elif 'CallRemoved' in args[3]:
message = { 'source': 'callmgr', 'event': 'call_removed', 'device': makedev(args[1]) }
else:
message = {'unknown_signal': args }
elif 'ofono.VoiceCall' in iface:
if 'PropertyChanged' in args[3]:
message = { 'source': 'call', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] }
else:
message = {'unknown_signal': args }
socketio.emit('message', json.dumps(message))
def dbus_monitor():
bus.subscribe(iface = 'org.ofono.Modem',
signal_fired = cb_server_signal_emission)
bus.subscribe(iface = 'org.ofono.NetworkRegistration',
signal_fired = cb_server_signal_emission)
print(bus)
bus.subscribe(iface = 'org.ofono.VoiceCallManager',
signal_fired = cb_server_signal_emission)
print(bus)
bus.subscribe(iface = 'org.ofono.VoiceCall',
signal_fired = cb_server_signal_emission)
loop = GLib.MainLoop()
loop.run()
@app.route('/')
def index():
return '''
<html>
<head>
<script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/socket.io/1.3.6/socket.io.min.js"></script>
<script type="text/javascript" charset="utf-8">
var socket = io.connect('http://' + document.domain + ':' + location.port);
socket.on('connect', function() {
socket.emit('connected', {data: 'Client connected!'});
});
socket.on('message', function(message) {
console.log('The server has a message for you: ' + message);
var t = document.getElementById("logbox");
t.value = t.value + 'MESSAGE: ' + message + '\\n';
});
</script>
</head>
<body>
<textarea id="logbox" width="100" rows="10"></textarea>
<br>
<button onclick="document.getElementById('logbox').value='';">Clear</button>
</body>
</html>
'''
@socketio.on('my event')
def handle_my_custom_event(arg1):
emit('message', {'data': 42})
if __name__ == '__main__':
t = threading.Thread(target=dbus_monitor)
t.daemon = True
t.start()
socketio.run(app, host='0.0.0.0', port=5001)
| 32.263636
| 151
| 0.641589
| 446
| 3,549
| 4.973094
| 0.360987
| 0.025248
| 0.03156
| 0.049594
| 0.359333
| 0.33679
| 0.307033
| 0.307033
| 0.255185
| 0.195672
| 0
| 0.021372
| 0.182587
| 3,549
| 109
| 152
| 32.559633
| 0.743192
| 0.057481
| 0
| 0.228916
| 0
| 0.012048
| 0.419539
| 0.113275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048193
| false
| 0
| 0.072289
| 0.012048
| 0.13253
| 0.036145
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ecbe30760d8ff6d5c97c8f6cb9ae037d64dc1b
| 39,956
|
py
|
Python
|
sc2/bot_ai.py
|
Lexa307/PhotonDefender
|
a08dc652e5c64e3ccb33b7cfa206846dca0575bd
|
[
"MIT"
] | 2
|
2019-07-17T13:00:32.000Z
|
2019-07-17T13:09:30.000Z
|
sc2/bot_ai.py
|
Lexa307/PhotonDefender
|
a08dc652e5c64e3ccb33b7cfa206846dca0575bd
|
[
"MIT"
] | null | null | null |
sc2/bot_ai.py
|
Lexa307/PhotonDefender
|
a08dc652e5c64e3ccb33b7cfa206846dca0575bd
|
[
"MIT"
] | null | null | null |
import itertools
import logging
import math
import random
from collections import Counter
from typing import Any, Dict, List, Optional, Set, Tuple, Union # mypy type checking
from .cache import property_cache_forever, property_cache_once_per_frame
from .data import ActionResult, Alert, Race, Result, Target, race_gas, race_townhalls, race_worker
from .data import ActionResult, Attribute, Race, race_worker, race_townhalls, race_gas, Target, Result
from .game_data import AbilityData, GameData
# imports for mypy and pycharm autocomplete
from .game_state import GameState
from .game_data import GameData, AbilityData
from .ids.ability_id import AbilityId
from .ids.unit_typeid import UnitTypeId
from .ids.upgrade_id import UpgradeId
from .pixel_map import PixelMap
from .position import Point2, Point3
from .unit import Unit
from .units import Units
logger = logging.getLogger(__name__)
class BotAI:
"""Base class for bots."""
EXPANSION_GAP_THRESHOLD = 15
def __init__(self):
# Specific opponent bot ID used in sc2ai ladder games http://sc2ai.net/
# The bot ID will stay the same each game so your bot can "adapt" to the opponent
self.opponent_id: int = None
self.units: Units = None
self.workers: Units = None
self.townhalls: Units = None
self.geysers: Units = None
self.minerals: int = None
self.vespene: int = None
self.supply_army: Union[float, int] = None
self.supply_workers: Union[float, int] = None # Doesn't include workers in production
self.supply_cap: Union[float, int] = None
self.supply_used: Union[float, int] = None
self.supply_left: Union[float, int] = None
self.idle_worker_count: int = None
self.army_count: int = None
self.warp_gate_count: int = None
self.larva_count: int = None
self.cached_known_enemy_structures = None
self.cached_known_enemy_units = None
@property
def enemy_race(self) -> Race:
assert len(self._game_info.player_races) == 2, "enemy_race not available"
self.enemy_id = 3 - self.player_id
return Race(self._game_info.player_races[self.enemy_id])
@property
def time(self) -> Union[int, float]:
""" Returns time in seconds, assumes the game is played on 'faster' """
return self.state.game_loop / 22.4 # / (1/1.4) * (1/16)
@property
def time_formatted(self) -> str:
""" Returns time as string in min:sec format """
t = self.time
return f"{int(t // 60):02}:{int(t % 60):02}"
@property
def game_info(self) -> "GameInfo":
return self._game_info
def alert(self, alert_code: Alert) -> bool:
"""
Check if alert is triggered in the current step.
Example use:
from sc2.data import Alert
if self.alert(Alert.AddOnComplete):
print("Addon Complete")
Alert codes:
AlertError
AddOnComplete
BuildingComplete
BuildingUnderAttack
LarvaHatched
MergeComplete
MineralsExhausted
MorphComplete
MothershipComplete
MULEExpired
NuclearLaunchDetected
NukeComplete
NydusWormDetected
ResearchComplete
TrainError
TrainUnitComplete
TrainWorkerComplete
TransformationComplete
UnitUnderAttack
UpgradeComplete
VespeneExhausted
WarpInComplete
"""
assert isinstance(alert_code, Alert), f"alert_code {alert_code} is no Alert"
return alert_code.value in self.state.alerts
@property
def start_location(self) -> Point2:
return self._game_info.player_start_location
@property
def enemy_start_locations(self) -> List[Point2]:
"""Possible start locations for enemies."""
return self._game_info.start_locations
@property_cache_once_per_frame
def known_enemy_units(self) -> Units:
"""List of known enemy units, including structures."""
return self.state.enemy_units
@property_cache_once_per_frame
def known_enemy_structures(self) -> Units:
"""List of known enemy units, structures only."""
return self.state.enemy_units.structure
@property
def main_base_ramp(self) -> "Ramp":
""" Returns the Ramp instance of the closest main-ramp to start location.
Look in game_info.py for more information """
if hasattr(self, "cached_main_base_ramp"):
return self.cached_main_base_ramp
# The reason for len(ramp.upper) in {2, 5} is:
# ParaSite map has 5 upper points, and most other maps have 2 upper points at the main ramp.
# The map Acolyte has 4 upper points at the wrong ramp (which is closest to the start position).
try:
self.cached_main_base_ramp = min(
(ramp for ramp in self.game_info.map_ramps if len(ramp.upper) in {2, 5}),
key=lambda r: self.start_location.distance_to(r.top_center),
)
except ValueError:
# Hardcoded hotfix for Honorgrounds LE map, as that map has a large main base ramp with inbase natural
self.cached_main_base_ramp = min(
(ramp for ramp in self.game_info.map_ramps if len(ramp.upper) in {4, 9}),
key=lambda r: self.start_location.distance_to(r.top_center),
)
return self.cached_main_base_ramp
@property_cache_forever
def expansion_locations(self) -> Dict[Point2, Units]:
"""
Returns dict with the correct expansion position Point2 object as key,
resources (mineral field and vespene geyser) as value.
"""
# Idea: create a group for every resource, then merge these groups if
# any resource in a group is closer than 6 to any resource of another group
# Distance we group resources by
RESOURCE_SPREAD_THRESHOLD = 8.5
geysers = self.state.vespene_geyser
# Create a group for every resource
resource_groups = [[resource] for resource in self.state.resources]
# Loop the merging process as long as we change something
found_something = True
while found_something:
found_something = False
# Check every combination of two groups
for group_a, group_b in itertools.combinations(resource_groups, 2):
# Check if any pair of resource of these groups is closer than threshold together
if any(
resource_a.distance_to(resource_b) <= RESOURCE_SPREAD_THRESHOLD
for resource_a, resource_b in itertools.product(group_a, group_b)
):
# Remove the single groups and add the merged group
resource_groups.remove(group_a)
resource_groups.remove(group_b)
resource_groups.append(group_a + group_b)
found_something = True
break
# Distance offsets we apply to center of each resource group to find expansion position
offset_range = 7
offsets = [
(x, y)
for x, y in itertools.product(range(-offset_range, offset_range + 1), repeat=2)
if math.hypot(x, y) <= 8
]
# Dict we want to return
centers = {}
# For every resource group:
for resources in resource_groups:
# Possible expansion points
amount = len(resources)
# Calculate center, round and add 0.5 because expansion location will have (x.5, y.5)
# coordinates because bases have size 5.
center_x = int(sum(resource.position.x for resource in resources) / amount) + 0.5
center_y = int(sum(resource.position.y for resource in resources) / amount) + 0.5
possible_points = (Point2((offset[0] + center_x, offset[1] + center_y)) for offset in offsets)
# Filter out points that are too near
possible_points = (
point
for point in possible_points
# Check if point can be built on
if self._game_info.placement_grid[point.rounded] == 1
# Check if all resources have enough space to point
and all(point.distance_to(resource) > (7 if resource in geysers else 6) for resource in resources)
)
# Choose best fitting point
result = min(possible_points, key=lambda point: sum(point.distance_to(resource) for resource in resources))
centers[result] = resources
return centers
def _correct_zerg_supply(self):
""" The client incorrectly rounds zerg supply down instead of up (see
https://github.com/Blizzard/s2client-proto/issues/123), so self.supply_used
and friends return the wrong value when there are an odd number of zerglings
and banelings. This function corrects the bad values. """
# TODO: remove when Blizzard/sc2client-proto#123 gets fixed.
half_supply_units = {
UnitTypeId.ZERGLING,
UnitTypeId.ZERGLINGBURROWED,
UnitTypeId.BANELING,
UnitTypeId.BANELINGBURROWED,
UnitTypeId.BANELINGCOCOON,
}
correction = self.units(half_supply_units).amount % 2
self.supply_used += correction
self.supply_army += correction
self.supply_left -= correction
async def get_available_abilities(
self, units: Union[List[Unit], Units], ignore_resource_requirements=False
) -> List[List[AbilityId]]:
""" Returns available abilities of one or more units. Right know only checks cooldown, energy cost, and whether the ability has been researched.
Example usage:
units_abilities = await self.get_available_abilities(self.units)
or
units_abilities = await self.get_available_abilities([self.units.random]) """
return await self._client.query_available_abilities(units, ignore_resource_requirements)
async def expand_now(
self, building: UnitTypeId = None, max_distance: Union[int, float] = 10, location: Optional[Point2] = None
):
""" Not recommended as this function uses 'self.do' (reduces performance).
Finds the next possible expansion via 'self.get_next_expansion()'. If the target expansion is blocked (e.g. an enemy unit), it will misplace the expansion. """
if not building:
# self.race is never Race.Random
start_townhall_type = {
Race.Protoss: UnitTypeId.NEXUS,
Race.Terran: UnitTypeId.COMMANDCENTER,
Race.Zerg: UnitTypeId.HATCHERY,
}
building = start_townhall_type[self.race]
assert isinstance(building, UnitTypeId)
if not location:
location = await self.get_next_expansion()
await self.build(building, near=location, max_distance=max_distance, random_alternative=False, placement_step=1)
async def get_next_expansion(self) -> Optional[Point2]:
"""Find next expansion location."""
closest = None
distance = math.inf
for el in self.expansion_locations:
def is_near_to_expansion(t):
return t.distance_to(el) < self.EXPANSION_GAP_THRESHOLD
if any(map(is_near_to_expansion, self.townhalls)):
# already taken
continue
startp = self._game_info.player_start_location
d = await self._client.query_pathing(startp, el)
if d is None:
continue
if d < distance:
distance = d
closest = el
return closest
async def distribute_workers(self, resource_ratio: float = 2):
"""
Distributes workers across all the bases taken.
Keyword `resource_ratio` takes a float. If the current minerals to gas
ratio is bigger than `resource_ratio`, this function prefer filling geysers
first, if it is lower, it will prefer sending workers to minerals first.
This is only for workers that need to be moved anyways, it will NOT will
geysers on its own.
NOTE: This function is far from optimal, if you really want to have
refined worker control, you should write your own distribution function.
For example long distance mining control and moving workers if a base was killed
are not being handled.
WARNING: This is quite slow when there are lots of workers or multiple bases.
"""
if not self.state.mineral_field or not self.workers or not self.townhalls.ready:
return
actions = []
worker_pool = [worker for worker in self.workers.idle]
bases = self.townhalls.ready
geysers = self.geysers.ready
# list of places that need more workers
deficit_mining_places = []
for mining_place in bases | geysers:
difference = mining_place.surplus_harvesters
# perfect amount of workers, skip mining place
if not difference:
continue
if mining_place.is_vespene_geyser:
# get all workers that target the gas extraction site
# or are on their way back from it
local_workers = self.workers.filter(
lambda unit: unit.order_target == mining_place.tag
or (unit.is_carrying_vespene and unit.order_target == bases.closest_to(mining_place).tag)
)
else:
# get tags of minerals around expansion
local_minerals_tags = {
mineral.tag for mineral in self.state.mineral_field if mineral.distance_to(mining_place) <= 8
}
# get all target tags a worker can have
# tags of the minerals he could mine at that base
# get workers that work at that gather site
local_workers = self.workers.filter(
lambda unit: unit.order_target in local_minerals_tags
or (unit.is_carrying_minerals and unit.order_target == mining_place.tag)
)
# too many workers
if difference > 0:
for worker in local_workers[:difference]:
worker_pool.append(worker)
# too few workers
# add mining place to deficit bases for every missing worker
else:
deficit_mining_places += [mining_place for _ in range(-difference)]
# prepare all minerals near a base if we have too many workers
# and need to send them to the closest patch
if len(worker_pool) > len(deficit_mining_places):
all_minerals_near_base = [
mineral
for mineral in self.state.mineral_field
if any(mineral.distance_to(base) <= 8 for base in self.townhalls.ready)
]
# distribute every worker in the pool
for worker in worker_pool:
# as long as have workers and mining places
if deficit_mining_places:
# choose only mineral fields first if current mineral to gas ratio is less than target ratio
if self.vespene and self.minerals / self.vespene < resource_ratio:
possible_mining_places = [place for place in deficit_mining_places if not place.vespene_contents]
# else prefer gas
else:
possible_mining_places = [place for place in deficit_mining_places if place.vespene_contents]
# if preferred type is not available any more, get all other places
if not possible_mining_places:
possible_mining_places = deficit_mining_places
# find closest mining place
current_place = min(deficit_mining_places, key=lambda place: place.distance_to(worker))
# remove it from the list
deficit_mining_places.remove(current_place)
# if current place is a gas extraction site, go there
if current_place.vespene_contents:
actions.append(worker.gather(current_place))
# if current place is a gas extraction site,
# go to the mineral field that is near and has the most minerals left
else:
local_minerals = [
mineral for mineral in self.state.mineral_field if mineral.distance_to(current_place) <= 8
]
target_mineral = max(local_minerals, key=lambda mineral: mineral.mineral_contents)
actions.append(worker.gather(target_mineral))
# more workers to distribute than free mining spots
# send to closest if worker is doing nothing
elif worker.is_idle and all_minerals_near_base:
target_mineral = min(all_minerals_near_base, key=lambda mineral: mineral.distance_to(worker))
actions.append(worker.gather(target_mineral))
else:
# there are no deficit mining places and worker is not idle
# so dont move him
pass
await self.do_actions(actions)
@property
def owned_expansions(self) -> Dict[Point2, Unit]:
"""List of expansions owned by the player."""
owned = {}
for el in self.expansion_locations:
def is_near_to_expansion(t):
return t.distance_to(el) < self.EXPANSION_GAP_THRESHOLD
th = next((x for x in self.townhalls if is_near_to_expansion(x)), None)
if th:
owned[el] = th
return owned
def can_feed(self, unit_type: UnitTypeId) -> bool:
""" Checks if you have enough free supply to build the unit """
required = self._game_data.units[unit_type.value]._proto.food_required
return required == 0 or self.supply_left >= required
def can_afford(
self, item_id: Union[UnitTypeId, UpgradeId, AbilityId], check_supply_cost: bool = True
) -> "CanAffordWrapper":
"""Tests if the player has enough resources to build a unit or cast an ability."""
enough_supply = True
if isinstance(item_id, UnitTypeId):
unit = self._game_data.units[item_id.value]
cost = self._game_data.calculate_ability_cost(unit.creation_ability)
if check_supply_cost:
enough_supply = self.can_feed(item_id)
elif isinstance(item_id, UpgradeId):
cost = self._game_data.upgrades[item_id.value].cost
else:
cost = self._game_data.calculate_ability_cost(item_id)
return CanAffordWrapper(cost.minerals <= self.minerals, cost.vespene <= self.vespene, enough_supply)
async def can_cast(
self,
unit: Unit,
ability_id: AbilityId,
target: Optional[Union[Unit, Point2, Point3]] = None,
only_check_energy_and_cooldown: bool = False,
cached_abilities_of_unit: List[AbilityId] = None,
) -> bool:
"""Tests if a unit has an ability available and enough energy to cast it.
See data_pb2.py (line 161) for the numbers 1-5 to make sense"""
assert isinstance(unit, Unit)
assert isinstance(ability_id, AbilityId)
assert isinstance(target, (type(None), Unit, Point2, Point3))
# check if unit has enough energy to cast or if ability is on cooldown
if cached_abilities_of_unit:
abilities = cached_abilities_of_unit
else:
abilities = (await self.get_available_abilities([unit]))[0]
if ability_id in abilities:
if only_check_energy_and_cooldown:
return True
cast_range = self._game_data.abilities[ability_id.value]._proto.cast_range
ability_target = self._game_data.abilities[ability_id.value]._proto.target
# Check if target is in range (or is a self cast like stimpack)
if (
ability_target == 1
or ability_target == Target.PointOrNone.value
and isinstance(target, (Point2, Point3))
and unit.distance_to(target) <= cast_range
): # cant replace 1 with "Target.None.value" because ".None" doesnt seem to be a valid enum name
return True
# Check if able to use ability on a unit
elif (
ability_target in {Target.Unit.value, Target.PointOrUnit.value}
and isinstance(target, Unit)
and unit.distance_to(target) <= cast_range
):
return True
# Check if able to use ability on a position
elif (
ability_target in {Target.Point.value, Target.PointOrUnit.value}
and isinstance(target, (Point2, Point3))
and unit.distance_to(target) <= cast_range
):
return True
return False
def select_build_worker(self, pos: Union[Unit, Point2, Point3], force: bool = False) -> Optional[Unit]:
"""Select a worker to build a building with."""
workers = (
self.workers.filter(lambda w: (w.is_gathering or w.is_idle) and w.distance_to(pos) < 20) or self.workers
)
if workers:
for worker in workers.sorted_by_distance_to(pos).prefer_idle:
if (
not worker.orders
or len(worker.orders) == 1
and worker.orders[0].ability.id in {AbilityId.MOVE, AbilityId.HARVEST_GATHER}
):
return worker
return workers.random if force else None
async def can_place(self, building: Union[AbilityData, AbilityId, UnitTypeId], position: Point2) -> bool:
"""Tests if a building can be placed in the given location."""
building_type = type(building)
assert building_type in {AbilityData, AbilityId, UnitTypeId}
if building_type == UnitTypeId:
building = self._game_data.units[building.value].creation_ability
elif building_type == AbilityId:
building = self._game_data.abilities[building.value]
r = await self._client.query_building_placement(building, [position])
return r[0] == ActionResult.Success
async def find_placement(
self,
building: UnitTypeId,
near: Union[Unit, Point2, Point3],
max_distance: int = 20,
random_alternative: bool = True,
placement_step: int = 2,
) -> Optional[Point2]:
"""Finds a placement location for building."""
assert isinstance(building, (AbilityId, UnitTypeId))
assert isinstance(near, Point2)
if isinstance(building, UnitTypeId):
building = self._game_data.units[building.value].creation_ability
else: # AbilityId
building = self._game_data.abilities[building.value]
if await self.can_place(building, near):
return near
if max_distance == 0:
return None
for distance in range(placement_step, max_distance, placement_step):
possible_positions = [
Point2(p).offset(near).to2
for p in (
[(dx, -distance) for dx in range(-distance, distance + 1, placement_step)]
+ [(dx, distance) for dx in range(-distance, distance + 1, placement_step)]
+ [(-distance, dy) for dy in range(-distance, distance + 1, placement_step)]
+ [(distance, dy) for dy in range(-distance, distance + 1, placement_step)]
)
]
res = await self._client.query_building_placement(building, possible_positions)
possible = [p for r, p in zip(res, possible_positions) if r == ActionResult.Success]
if not possible:
continue
if random_alternative:
return random.choice(possible)
else:
return min(possible, key=lambda p: p.distance_to_point2(near))
return None
def already_pending_upgrade(self, upgrade_type: UpgradeId) -> Union[int, float]:
""" Check if an upgrade is being researched
Return values:
0: not started
0 < x < 1: researching
1: finished
"""
assert isinstance(upgrade_type, UpgradeId)
if upgrade_type in self.state.upgrades:
return 1
level = None
if "LEVEL" in upgrade_type.name:
level = upgrade_type.name[-1]
creationAbilityID = self._game_data.upgrades[upgrade_type.value].research_ability.id
for structure in self.units.filter(lambda unit: unit.is_structure and unit.is_ready):
for order in structure.orders:
if order.ability.id is creationAbilityID:
if level and order.ability.button_name[-1] != level:
return 0
return order.progress
return 0
@property_cache_once_per_frame
def _abilities_all_units(self) -> Counter:
""" Cache for the already_pending function, includes protoss units warping in, and all units in production, and all structures, and all morphs """
abilities_amount = Counter()
for unit in self.units: # type: Unit
for order in unit.orders:
abilities_amount[order.ability] += 1
if not unit.is_ready:
if self.race != Race.Terran or not unit.is_structure:
# If an SCV is constructing a building, already_pending would count this structure twice (once from the SCV order, and once from "not structure.is_ready")
abilities_amount[self._game_data.units[unit.type_id.value].creation_ability] += 1
return abilities_amount
@property_cache_once_per_frame
def _abilities_workers_and_eggs(self) -> Counter:
""" Cache for the already_pending function, includes all worker orders (including pending).
Zerg units in production (except queens and morphing units) and structures in production,
counts double for terran """
abilities_amount = Counter()
for worker in self.workers: # type: Unit
for order in worker.orders:
abilities_amount[order.ability] += 1
if self.race == Race.Zerg:
for egg in self.units(UnitTypeId.EGG): # type: Unit
for order in egg.orders:
abilities_amount[order.ability] += 1
if self.race != Race.Terran:
# If an SCV is constructing a building, already_pending would count this structure twice
# (once from the SCV order, and once from "not structure.is_ready")
for unit in self.units.structure.not_ready: # type: Unit
abilities_amount[self._game_data.units[unit.type_id.value].creation_ability] += 1
return abilities_amount
def already_pending(self, unit_type: Union[UpgradeId, UnitTypeId], all_units: bool = True) -> int:
"""
Returns a number of buildings or units already in progress, or if a
worker is en route to build it. This also includes queued orders for
workers and build queues of buildings.
If all_units==True, then build queues of other units (such as Carriers
(Interceptors) or Oracles (Stasis Ward)) are also included.
"""
# TODO / FIXME: SCV building a structure might be counted as two units
if isinstance(unit_type, UpgradeId):
return self.already_pending_upgrade(unit_type)
ability = self._game_data.units[unit_type.value].creation_ability
amount = len(self.units(unit_type).not_ready)
if all_units:
amount += sum([o.ability == ability for u in self.units for o in u.orders])
else:
amount += sum([o.ability == ability for w in self.workers for o in w.orders])
amount += sum([egg.orders[0].ability == ability for egg in self.units(UnitTypeId.EGG)])
return amount
async def build(self, building: UnitTypeId, near: Union[Point2, Point3], max_distance: int=20, unit: Optional[Unit]=None, random_alternative: bool=True, placement_step: int=2):
"""Build a building."""
if isinstance(near, Unit):
near = near.position.to2
elif near is not None:
near = near.to2
else:
return
p = await self.find_placement(building, near.rounded, max_distance, random_alternative, placement_step)
if p is None:
return ActionResult.CantFindPlacementLocation
unit = unit or self.select_build_worker(p)
if unit is None or not self.can_afford(building):
return ActionResult.Error
return await self.do(unit.build(building, p))
async def do(self, action):
if not self.can_afford(action):
logger.warning(f"Cannot afford action {action}")
return ActionResult.Error
r = await self._client.actions(action)
if not r: # success
cost = self._game_data.calculate_ability_cost(action.ability)
self.minerals -= cost.minerals
self.vespene -= cost.vespene
else:
logger.error(f"Error: {r} (action: {action})")
return r
async def do_actions(self, actions: List["UnitCommand"], prevent_double=True):
""" Unlike 'self.do()', this function does not instantly subtract minerals and vespene. """
if not actions:
return None
if prevent_double:
actions = list(filter(self.prevent_double_actions, actions))
for action in actions:
cost = self._game_data.calculate_ability_cost(action.ability)
self.minerals -= cost.minerals
self.vespene -= cost.vespene
return await self._client.actions(actions)
def prevent_double_actions(self, action):
# always add actions if queued
if action.queue:
return True
if action.unit.orders:
# action: UnitCommand
# current_action: UnitOrder
current_action = action.unit.orders[0]
if current_action.ability.id != action.ability:
# different action, return true
return True
try:
if current_action.target == action.target.tag:
# same action, remove action if same target unit
return False
except AttributeError:
pass
try:
if action.target.x == current_action.target.x and action.target.y == current_action.target.y:
# same action, remove action if same target position
return False
except AttributeError:
pass
return True
return True
async def chat_send(self, message: str):
""" Send a chat message. """
assert isinstance(message, str), f"{message} is no string"
await self._client.chat_send(message, False)
# For the functions below, make sure you are inside the boundries of the map size.
def get_terrain_height(self, pos: Union[Point2, Point3, Unit]) -> int:
""" Returns terrain height at a position.
Caution: terrain height is different from a unit's z-coordinate.
"""
assert isinstance(pos, (Point2, Point3, Unit)), f"pos is not of type Point2, Point3 or Unit"
pos = pos.position.to2.rounded
return self._game_info.terrain_height[pos] # returns int
def get_terrain_z_height(self, pos: Union[Point2, Point3, Unit]) -> int:
""" Returns terrain z-height at a position. """
assert isinstance(pos, (Point2, Point3, Unit)), f"pos is not of type Point2, Point3 or Unit"
pos = pos.position.to2.rounded
return -16 + 32 * self._game_info.terrain_height[pos] / 255
def in_placement_grid(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if you can place something at a position.
Remember, buildings usually use 2x2, 3x3 or 5x5 of these grid points.
Caution: some x and y offset might be required, see ramp code:
https://github.com/Dentosal/python-sc2/blob/master/sc2/game_info.py#L17-L18 """
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self._game_info.placement_grid[pos] == 1
def in_pathing_grid(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if a unit can pass through a grid point. """
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self._game_info.pathing_grid[pos] == 1
def is_visible(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if you have vision on a grid point. """
# more info: https://github.com/Blizzard/s2client-proto/blob/9906df71d6909511907d8419b33acc1a3bd51ec0/s2clientprotocol/spatial.proto#L19
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self.state.visibility[pos] == 2
def has_creep(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if there is creep on the grid point. """
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self.state.creep[pos] == 1
def _prepare_start(self, client, player_id, game_info, game_data):
"""Ran until game start to set game and player data."""
self._client: "Client" = client
self._game_info: "GameInfo" = game_info
self._game_data: GameData = game_data
self.player_id: int = player_id
self.race: Race = Race(self._game_info.player_races[self.player_id])
self._units_previous_map: dict = dict()
self._previous_upgrades: Set[UpgradeId] = set()
self.units: Units = Units([])
def _prepare_first_step(self):
"""First step extra preparations. Must not be called before _prepare_step."""
if self.townhalls:
self._game_info.player_start_location = self.townhalls.first.position
self._game_info.map_ramps, self._game_info.vision_blockers = self._game_info._find_ramps_and_vision_blockers()
def _prepare_step(self, state, proto_game_info):
# Set attributes from new state before on_step."""
self.state: GameState = state # See game_state.py
# update pathing grid
self._game_info.pathing_grid: PixelMap = PixelMap(
proto_game_info.game_info.start_raw.pathing_grid, in_bits=True, mirrored=False
)
# Required for events
self._units_previous_map: Dict = {unit.tag: unit for unit in self.units}
self.units: Units = state.own_units
self.workers: Units = self.units(race_worker[self.race])
self.townhalls: Units = self.units(race_townhalls[self.race])
self.geysers: Units = self.units(race_gas[self.race])
self.minerals: int = state.common.minerals
self.vespene: int = state.common.vespene
self.supply_army: int = state.common.food_army
self.supply_workers: int = state.common.food_workers # Doesn't include workers in production
self.supply_cap: int = state.common.food_cap
self.supply_used: int = state.common.food_used
self.supply_left: int = self.supply_cap - self.supply_used
if self.race == Race.Zerg:
self.larva_count: int = state.common.larva_count
# Workaround Zerg supply rounding bug
self._correct_zerg_supply()
elif self.race == Race.Protoss:
self.warp_gate_count: int = state.common.warp_gate_count
self.idle_worker_count: int = state.common.idle_worker_count
self.army_count: int = state.common.army_count
# reset cached values
self.cached_known_enemy_structures = None
self.cached_known_enemy_units = None
async def issue_events(self):
""" This function will be automatically run from main.py and triggers the following functions:
- on_unit_created
- on_unit_destroyed
- on_building_construction_complete
"""
await self._issue_unit_dead_events()
await self._issue_unit_added_events()
for unit in self.units.structure:
await self._issue_building_complete_event(unit)
if len(self._previous_upgrades) != len(self.state.upgrades):
for upgrade_completed in self.state.upgrades - self._previous_upgrades:
await self.on_upgrade_complete(upgrade_completed)
self._previous_upgrades = self.state.upgrades
async def _issue_unit_added_events(self):
for unit in self.units.not_structure:
if unit.tag not in self._units_previous_map:
await self.on_unit_created(unit)
for unit in self.units.structure:
if unit.tag not in self._units_previous_map:
await self.on_building_construction_started(unit)
async def _issue_building_complete_event(self, unit):
if unit.build_progress < 1:
return
if unit.tag not in self._units_previous_map:
return
unit_prev = self._units_previous_map[unit.tag]
if unit_prev.build_progress < 1:
await self.on_building_construction_complete(unit)
async def _issue_unit_dead_events(self):
for unit_tag in self.state.dead_units:
await self.on_unit_destroyed(unit_tag)
async def on_unit_destroyed(self, unit_tag):
""" Override this in your bot class.
Note that this function uses unit tags because the unit does not exist any more. """
async def on_unit_created(self, unit: Unit):
""" Override this in your bot class. """
async def on_building_construction_started(self, unit: Unit):
""" Override this in your bot class. """
async def on_building_construction_complete(self, unit: Unit):
""" Override this in your bot class. Note that this function is also
triggered at the start of the game for the starting base building."""
async def on_upgrade_complete(self, upgrade: UpgradeId):
""" Override this in your bot class. """
def on_start(self):
""" Allows initializing the bot when the game data is available. """
async def on_start_async(self):
""" This function is run after "on_start". At this point, game_data, game_info and
the first iteration of game_state (self.state) are available. """
async def on_step(self, iteration: int):
"""Ran on every game step (looped in realtime mode)."""
raise NotImplementedError
def on_end(self, game_result: Result):
""" Triggered at the end of a game. """
class CanAffordWrapper:
def __init__(self, can_afford_minerals, can_afford_vespene, have_enough_supply):
self.can_afford_minerals = can_afford_minerals
self.can_afford_vespene = can_afford_vespene
self.have_enough_supply = have_enough_supply
def __bool__(self):
return self.can_afford_minerals and self.can_afford_vespene and self.have_enough_supply
@property
def action_result(self):
if not self.can_afford_vespene:
return ActionResult.NotEnoughVespene
elif not self.can_afford_minerals:
return ActionResult.NotEnoughMinerals
elif not self.have_enough_supply:
return ActionResult.NotEnoughFood
else:
return None
| 44.150276
| 180
| 0.633922
| 4,991
| 39,956
| 4.908636
| 0.140453
| 0.012735
| 0.009796
| 0.004857
| 0.273562
| 0.23401
| 0.198498
| 0.169517
| 0.155557
| 0.13772
| 0
| 0.008333
| 0.29122
| 39,956
| 904
| 181
| 44.199115
| 0.856744
| 0.178947
| 0
| 0.237113
| 0
| 0
| 0.011323
| 0.000712
| 0
| 0
| 0
| 0.002212
| 0.02921
| 1
| 0.065292
| false
| 0.005155
| 0.032646
| 0.008591
| 0.219931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ecddbb1bd02f445d97d4e77a4a4128c68a4abe
| 4,260
|
py
|
Python
|
src/python/pants/jvm/resolve/lockfile_metadata.py
|
xyzst/pants
|
d6a357fe67ee7e8e1aefeae625e107f5609f1717
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/jvm/resolve/lockfile_metadata.py
|
xyzst/pants
|
d6a357fe67ee7e8e1aefeae625e107f5609f1717
|
[
"Apache-2.0"
] | 6
|
2022-01-25T15:49:26.000Z
|
2022-02-09T11:21:13.000Z
|
src/python/pants/jvm/resolve/lockfile_metadata.py
|
thejcannon/pants
|
7c24f42cb78cc462b63698cef736eda7a85c40e0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import Any, Iterable, cast
from pants.core.util_rules.lockfile_metadata import (
LockfileMetadata,
LockfileMetadataValidation,
LockfileScope,
_get_metadata,
lockfile_metadata_registrar,
)
from pants.jvm.resolve.common import ArtifactRequirement
from pants.util.ordered_set import FrozenOrderedSet
_jvm_lockfile_metadata = lockfile_metadata_registrar(LockfileScope.JVM)
class InvalidJVMLockfileReason(Enum):
REQUIREMENTS_MISMATCH = "requirements_mismatch"
@dataclass(frozen=True)
class JVMLockfileMetadata(LockfileMetadata):
scope = LockfileScope.JVM
@staticmethod
def new(
requirements: Iterable[ArtifactRequirement],
) -> JVMLockfileMetadata:
"""Call the most recent version of the `LockfileMetadata` class to construct a concrete
instance.
This static method should be used in place of the `LockfileMetadata` constructor. This gives
calling sites a predictable method to call to construct a new `LockfileMetadata` for
writing, while still allowing us to support _reading_ older, deprecated metadata versions.
"""
return JVMLockfileMetadataV1.from_artifact_requirements(requirements)
@classmethod
def from_lockfile(
cls, lockfile: bytes, lockfile_path: str | None = None, resolve_name: str | None = None
) -> JVMLockfileMetadataV1:
return cast(
JVMLockfileMetadataV1,
LockfileMetadata.from_lockfile_for_scope(
LockfileScope.JVM, lockfile, lockfile_path, resolve_name
),
)
def is_valid_for(
self,
requirements: Iterable[ArtifactRequirement] | None,
) -> LockfileMetadataValidation:
"""Returns Truthy if this `JVMLockfileMetadata` can be used in the current execution
context."""
raise NotImplementedError("call `is_valid_for` on subclasses only")
@_jvm_lockfile_metadata(1)
@dataclass(frozen=True)
class JVMLockfileMetadataV1(JVMLockfileMetadata):
"""Lockfile version that permits specifying a requirements as a set rather than a digest.
Validity is tested by the set of requirements strings being the same in the user requirements as
those in the stored requirements.
"""
requirements: FrozenOrderedSet[str]
@classmethod
def from_artifact_requirements(
cls, requirements: Iterable[ArtifactRequirement]
) -> JVMLockfileMetadataV1:
return cls(FrozenOrderedSet(i.to_metadata_str() for i in requirements))
@classmethod
def _from_json_dict(
cls: type[JVMLockfileMetadataV1],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> JVMLockfileMetadataV1:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
requirements = metadata(
"generated_with_requirements",
FrozenOrderedSet[str],
FrozenOrderedSet,
)
return JVMLockfileMetadataV1(requirements)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(JVMLockfileMetadataV1, instance)
return {
"generated_with_requirements": (
sorted(instance.requirements) if instance.requirements is not None else None
)
}
def is_valid_for(
self,
requirements: Iterable[ArtifactRequirement] | None,
) -> LockfileMetadataValidation:
"""Returns a truthy object if the request requirements match the metadata requirements.
For this version, "match" is defined as the request requirements being a non-strict subset
of the metadata requirements.
"""
failure_reasons: set[InvalidJVMLockfileReason] = set()
if not self.requirements.issuperset(i.to_metadata_str() for i in requirements or []):
failure_reasons.add(InvalidJVMLockfileReason.REQUIREMENTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
| 33.809524
| 100
| 0.711268
| 431
| 4,260
| 6.87703
| 0.361949
| 0.026991
| 0.052632
| 0.022267
| 0.084345
| 0.084345
| 0.084345
| 0.084345
| 0.062753
| 0.062753
| 0
| 0.004841
| 0.224178
| 4,260
| 125
| 101
| 34.08
| 0.891982
| 0.236854
| 0
| 0.217949
| 0
| 0
| 0.035964
| 0.02387
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089744
| false
| 0
| 0.089744
| 0.025641
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ece1b40046a77ed95f80492a330f22d42912ee
| 1,528
|
py
|
Python
|
generator/generator.py
|
GregorKikelj/opendbc
|
a20ed24ea2593e5d019adf538dc0cecfc7ef8709
|
[
"MIT"
] | 1,059
|
2017-05-31T06:33:27.000Z
|
2022-03-31T23:02:29.000Z
|
generator/generator.py
|
DIMO-Network/opendbc
|
9a1fbe581846f9d0191f142f498ef3f1c35826ea
|
[
"MIT"
] | 248
|
2017-07-14T01:45:40.000Z
|
2022-03-21T17:55:26.000Z
|
generator/generator.py
|
DIMO-Network/opendbc
|
9a1fbe581846f9d0191f142f498ef3f1c35826ea
|
[
"MIT"
] | 940
|
2017-06-02T16:40:42.000Z
|
2022-03-29T16:49:58.000Z
|
#!/usr/bin/env python3
import os
import re
cur_path = os.path.dirname(os.path.realpath(__file__))
opendbc_root = os.path.join(cur_path, '../')
include_pattern = re.compile(r'CM_ "IMPORT (.*?)";')
def read_dbc(src_dir, filename):
with open(os.path.join(src_dir, filename)) as file_in:
return file_in.read()
def create_dbc(src_dir, filename, output_path):
dbc_file_in = read_dbc(src_dir, filename)
includes = include_pattern.findall(dbc_file_in)
output_filename = filename.replace('.dbc', '_generated.dbc')
output_file_location = os.path.join(output_path, output_filename)
with open(output_file_location, 'w') as dbc_file_out:
dbc_file_out.write('CM_ "AUTOGENERATED FILE, DO NOT EDIT";\n')
for include_filename in includes:
include_file_header = '\n\nCM_ "Imported file %s starts here";\n' % include_filename
dbc_file_out.write(include_file_header)
include_file = read_dbc(src_dir, include_filename)
dbc_file_out.write(include_file)
dbc_file_out.write('\nCM_ "%s starts here";\n' % filename)
core_dbc = include_pattern.sub('', dbc_file_in)
dbc_file_out.write(core_dbc)
def create_all(output_path):
for src_dir, _, filenames in os.walk(cur_path):
if src_dir == cur_path:
continue
#print(src_dir)
for filename in filenames:
if filename.startswith('_') or not filename.endswith('.dbc'):
continue
#print(filename)
create_dbc(src_dir, filename, output_path)
if __name__ == "__main__":
create_all(opendbc_root)
| 28.296296
| 90
| 0.716623
| 228
| 1,528
| 4.429825
| 0.289474
| 0.053465
| 0.059406
| 0.074257
| 0.188119
| 0.146535
| 0.146535
| 0.081188
| 0
| 0
| 0
| 0.000781
| 0.161649
| 1,528
| 53
| 91
| 28.830189
| 0.787666
| 0.032723
| 0
| 0.060606
| 0
| 0
| 0.108475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.121212
| 0
| 0.242424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ef13879f7d9412c115fe5712bcdcce5a10b758
| 4,067
|
py
|
Python
|
src/ripper.py
|
jg-rivera/cert-ripper
|
2bab5e02cd2da8e92a1c308640917b6f5ee729cb
|
[
"MIT"
] | null | null | null |
src/ripper.py
|
jg-rivera/cert-ripper
|
2bab5e02cd2da8e92a1c308640917b6f5ee729cb
|
[
"MIT"
] | null | null | null |
src/ripper.py
|
jg-rivera/cert-ripper
|
2bab5e02cd2da8e92a1c308640917b6f5ee729cb
|
[
"MIT"
] | null | null | null |
from dotenv import load_dotenv
from PyPDF2 import PdfFileReader, PdfFileWriter
import os
import json
class CertRipper:
def __init__(
self,
start_page_index=0,
master_pdf_path=None,
json_points_path=None,
ripped_certs_path=None,
ripped_cert_file_name=None,
):
self.start_page_index = start_page_index
self.master_pdf_path = master_pdf_path
self.pdf = PdfFileReader(master_pdf_path)
self.pdf_length = self.pdf.getNumPages()
self.json_points_path = json_points_path
self.ripped_certs_path = ripped_certs_path
self.ripped_cert_file_name = ripped_cert_file_name
def process(self):
recipient_groups = self.get_recipient_groups_from_points()
self.extract_pdf_from_master(recipient_groups)
def extract_pdf_from_master(self, recipient_groups):
current_page_index = self.start_page_index
process_index = 0
for recipient_group in recipient_groups:
recipient_group_name = recipient_group["name"]
recipient_group_tag = recipient_group["tag"]
recipient_slugs = recipient_group["recipient_slugs"]
print(
f"[*] Ripping \x1b[93m{recipient_group_name}\x1b[0m group ...")
for recipient_slug in recipient_slugs:
page = self.pdf.getPage(current_page_index)
file_name = self.ripped_cert_file_name.format(
index=current_page_index + 1,
tag=recipient_group_tag,
recipient=recipient_slug
)
pdf_writer = PdfFileWriter()
pdf_writer.addPage(page)
output_file_name = f"{self.ripped_certs_path}\\{file_name}.pdf"
with open(output_file_name, "wb") as out:
pdf_writer.write(out)
print(
f"\x1b[95m[{process_index}]\x1b[0m Ripped \x1b[92m[{file_name}]\x1b[0m from \x1b[94mpage {current_page_index + 1}\x1b[0m of master")
current_page_index += 1
process_index += 1
def get_recipient_groups_from_points(self):
recipient_groups = []
total_recipients = 0
with open(self.json_points_path, "r") as json_file:
points = json.load(json_file)
for point in points:
point_name = point["name"]
point_tag = point["tag"]
point_recipients = point["recipients"]
point_recipient_slugs = []
for point_recipient in point_recipients:
recipient_name = point_recipient["name"]
recipient_name_slug = "_".join(recipient_name.split())
point_recipient_slugs.append(recipient_name_slug)
total_recipients += 1
recipient_groups.append({
"name": point_name,
"tag": point_tag,
"recipient_slugs": point_recipient_slugs
})
total_groups = len(recipient_groups)
self.__check_pdf_length(total_recipients)
print(
f"Read \x1b[95m{total_groups} groups(s)\x1b[0m and \x1b[95m{total_recipients} recipient(s)\x1b[0m from JSON points")
return recipient_groups
def __check_pdf_length(self, recipients_length):
pdf_length = self.pdf_length - (self.start_page_index)
if pdf_length != recipients_length:
raise ValueError(
f"Number of recipients ({recipients_length}) does not match with PDF length ({pdf_length})"
)
if __name__ == "__main__":
load_dotenv()
ripper = CertRipper(
start_page_index=os.getenv("START_PAGE_INDEX"),
master_pdf_path=os.getenv("MASTER_PDF_PATH"),
json_points_path=os.getenv("JSON_POINTS_PATH"),
ripped_certs_path=os.getenv("RIPPED_CERTS_PATH"),
ripped_cert_file_name=os.getenv("RIPPED_CERT_FILE_NAME"),
)
ripper.process()
| 35.060345
| 152
| 0.614212
| 469
| 4,067
| 4.921109
| 0.185501
| 0.046794
| 0.042461
| 0.046794
| 0.110919
| 0.02773
| 0
| 0
| 0
| 0
| 0
| 0.013786
| 0.304401
| 4,067
| 115
| 153
| 35.365217
| 0.80205
| 0
| 0
| 0.033708
| 0
| 0.022472
| 0.14507
| 0.056061
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05618
| false
| 0
| 0.044944
| 0
| 0.123596
| 0.033708
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ef7b545410fc717e2f36b835e68675481e1947
| 2,192
|
py
|
Python
|
venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py
|
shehzadulislam/Assignment4
|
a9cced70be6ae5d2685027d68032d5849f638301
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py
|
shehzadulislam/Assignment4
|
a9cced70be6ae5d2685027d68032d5849f638301
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py
|
shehzadulislam/Assignment4
|
a9cced70be6ae5d2685027d68032d5849f638301
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed Materials - Property of IBM
#
# (c) Copyright IBM Corp. 2007-2008
#
import unittest, sys
import ibm_db
import config
from testfunctions import IbmDbTestFunctions
class IbmDbTestCase(unittest.TestCase):
def test_111_FieldNumAddCol(self):
obj = IbmDbTestFunctions()
obj.assert_expect(self.run_test_111)
def run_test_111(self):
conn = ibm_db.connect(config.database, config.user, config.password)
server = ibm_db.server_info( conn )
if conn:
ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
insert = "INSERT INTO animals values (7, 'cat', 'Benji', 5.1)"
ibm_db.exec_immediate(conn, insert)
stmt = ibm_db.exec_immediate(conn, "SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed")
if (server.DBMS_NAME[0:3] == 'IDS'):
num1 = ibm_db.field_num(stmt, "id")
num2 = ibm_db.field_num(stmt, "breed")
num3 = ibm_db.field_num(stmt, "number")
num4 = ibm_db.field_num(stmt, "NUMBER")
num5 = ibm_db.field_num(stmt, "bREED")
num6 = ibm_db.field_num(stmt, 8)
num7 = ibm_db.field_num(stmt, 1)
num8 = ibm_db.field_num(stmt, "WEIGHT")
else:
num1 = ibm_db.field_num(stmt, "ID")
num2 = ibm_db.field_num(stmt, "BREED")
num3 = ibm_db.field_num(stmt, "NUMBER")
num4 = ibm_db.field_num(stmt, "number")
num5 = ibm_db.field_num(stmt, "Breed")
num6 = ibm_db.field_num(stmt, 8)
num7 = ibm_db.field_num(stmt, 1)
num8 = ibm_db.field_num(stmt, "weight")
print("%s" % num1)
print("int(%d)" % num2)
print("int(%d)" % num3)
print("%s" % num4)
print("%s" % num5)
print("%s" % num6)
print("int(%d)" % num7)
print("%s" % num8)
ibm_db.rollback(conn)
else:
print("Connection failed.")
#__END__
#__LUW_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__ZOS_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__SYSTEMI_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__IDS_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
| 21.92
| 123
| 0.629562
| 317
| 2,192
| 4.11041
| 0.293375
| 0.092095
| 0.122794
| 0.159632
| 0.478895
| 0.445127
| 0.445127
| 0.445127
| 0.445127
| 0.445127
| 0
| 0.0366
| 0.22719
| 2,192
| 99
| 124
| 22.141414
| 0.732586
| 0.145073
| 0
| 0.133333
| 0
| 0
| 0.132313
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 1
| 0.044444
| false
| 0.022222
| 0.088889
| 0
| 0.155556
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f05726793fa3f17f84622c2fdc4b1adae30d42
| 2,382
|
py
|
Python
|
foundation/djangocms_pagebanner/cms_toolbar.py
|
Mindelirium/foundation
|
2d07e430915d696ca7376afea633692119c4d30e
|
[
"MIT"
] | null | null | null |
foundation/djangocms_pagebanner/cms_toolbar.py
|
Mindelirium/foundation
|
2d07e430915d696ca7376afea633692119c4d30e
|
[
"MIT"
] | null | null | null |
foundation/djangocms_pagebanner/cms_toolbar.py
|
Mindelirium/foundation
|
2d07e430915d696ca7376afea633692119c4d30e
|
[
"MIT"
] | null | null | null |
from cms.api import get_page_draft
from cms.toolbar_pool import toolbar_pool
from cms.toolbar_base import CMSToolbar
from cms.utils import get_cms_setting
from cms.utils.permissions import has_page_change_permission
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.translation import ugettext_lazy as _
from .models import PageBannerExtension
_banner_change_url = 'admin:djangocms_pagebanner_pagebannerextension_change'
_banner_add_url = 'admin:djangocms_pagebanner_pagebannerextension_add'
@toolbar_pool.register
class PageBannerExtensionToolbar(CMSToolbar):
def populate(self):
# always use draft if we have a page
self.page = get_page_draft(self.request.current_page)
if not self.page:
# Nothing to do
return
# check global permissions if CMS_PERMISSIONS is active
if get_cms_setting('PERMISSION'):
has_global_current_page_change_permission = \
has_page_change_permission(self.request)
else:
has_global_current_page_change_permission = False
# check if user has page edit permission
can_change = (self.request.current_page and
self.request.current_page.has_change_permission(
self.request))
if has_global_current_page_change_permission or can_change:
try:
page_banner_extension = PageBannerExtension.objects.get(
extended_object_id=self.page.id)
except PageBannerExtension.DoesNotExist:
page_banner_extension = None
try:
if page_banner_extension:
url = reverse(_banner_change_url,
args=(page_banner_extension.pk,))
else:
url = (reverse(_banner_add_url) +
'?extended_object=%s' % self.page.pk)
except NoReverseMatch:
# not in urls
pass
else:
not_edit_mode = not self.toolbar.edit_mode
current_page_menu = self.toolbar.get_or_create_menu('page')
current_page_menu.add_modal_item(_('Page banner'),
url=url,
disabled=not_edit_mode)
| 41.789474
| 76
| 0.625105
| 257
| 2,382
| 5.455253
| 0.330739
| 0.062767
| 0.071327
| 0.047076
| 0.142653
| 0.077033
| 0
| 0
| 0
| 0
| 0
| 0
| 0.321998
| 2,382
| 56
| 77
| 42.535714
| 0.868111
| 0.064232
| 0
| 0.111111
| 0
| 0
| 0.066127
| 0.046334
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0.022222
| 0.177778
| 0
| 0.244444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f0694c1e4dfc112a543268e37e44700697d2ed
| 7,346
|
py
|
Python
|
tasks/lm/models/lm.py
|
etri-edgeai/nn-comp-discblock
|
6e00a019c223508797ca91a7d5ffec7917b12c6d
|
[
"Apache-2.0"
] | 10
|
2021-11-19T06:24:51.000Z
|
2022-02-09T15:44:00.000Z
|
tasks/lm/models/lm.py
|
etri-edgeai/nn-comp-discblock
|
6e00a019c223508797ca91a7d5ffec7917b12c6d
|
[
"Apache-2.0"
] | 9
|
2021-10-01T11:06:27.000Z
|
2021-12-23T02:10:52.000Z
|
tasks/lm/models/lm.py
|
etri-edgeai/nn-comp-discblock
|
6e00a019c223508797ca91a7d5ffec7917b12c6d
|
[
"Apache-2.0"
] | 2
|
2021-09-14T04:08:36.000Z
|
2021-11-19T06:24:54.000Z
|
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
class RNNModel(nn.Module):
"""Container module with an encoder, a recurrent module, and a decoder."""
def __init__(self, rnn_type, ntoken, ninp, nhid, nlayers, dropout=0.5, tie_weights=False, encoder=None, decoder=None):
super(RNNModel, self).__init__()
self.ntoken = ntoken
self.drop = nn.Dropout(dropout)
if encoder is None:
self.encoder = nn.Embedding(ntoken, ninp)
else:
self.encoder = encoder
if rnn_type in ['LSTM', 'GRU']:
self.rnn = getattr(nn, rnn_type)(ninp, nhid, nlayers, dropout=dropout)
else:
try:
nonlinearity = {'RNN_TANH': 'tanh', 'RNN_RELU': 'relu'}[rnn_type]
except KeyError:
raise ValueError( """An invalid option for `--model` was supplied,
options are ['LSTM', 'GRU', 'RNN_TANH' or 'RNN_RELU']""")
self.rnn = nn.RNN(ninp, nhid, nlayers, nonlinearity=nonlinearity, dropout=dropout)
if decoder is None:
self.decoder = nn.Linear(nhid, ntoken)
else:
self.decoder = decoder
# Optionally tie weights as in:
# "Using the Output Embedding to Improve Language Models" (Press & Wolf 2016)
# https://arxiv.org/abs/1608.05859
# and
# "Tying Word Vectors and Word Classifiers: A Loss Framework for Language Modeling" (Inan et al. 2016)
# https://arxiv.org/abs/1611.01462
if tie_weights:
if nhid != ninp:
raise ValueError('When using the tied flag, nhid must be equal to emsize')
self.decoder.weight = self.encoder.weight
self.rnn_type = rnn_type
self.nhid = nhid
self.nlayers = nlayers
def init_weights(self):
initrange = 0.1
if self.encoder.__class__.__name__ == "Embedding":
self.encoder.weight.data.uniform_(-initrange, initrange)
else:
self.encoder.init_weights()
if self.decoder.__class__.__name__ == "Linear":
self.decoder.bias.data.zero_()
self.decoder.weight.data.uniform_(-initrange, initrange)
else:
self.decoder.init_weights()
def forward(self, input, hidden):
emb = self.drop(self.encoder(input))
hidden_ = []
for h in hidden:
if isinstance(h, torch.LongTensor) or isinstance(h, torch.cuda.LongTensor):
h = h.to(torch.float)
hidden_.append(h)
output, hidden = self.rnn(emb, tuple(hidden_))
output = self.drop(output)
decoded = self.decoder(output)
decoded = decoded.view(-1, self.ntoken)
return F.log_softmax(decoded, dim=1), hidden
def init_hidden(self, bsz):
weight = next(self.parameters())
if self.rnn_type == 'LSTM':
return (weight.new_zeros(self.nlayers, bsz, self.nhid),
weight.new_zeros(self.nlayers, bsz, self.nhid))
else:
return weight.new_zeros(self.nlayers, bsz, self.nhid)
# Temporarily leave PositionalEncoding module here. Will be moved somewhere else.
class PositionalEncoding(nn.Module):
r"""Inject some information about the relative or absolute position of the tokens
in the sequence. The positional encodings have the same dimension as
the embeddings, so that the two can be summed. Here, we use sine and cosine
functions of different frequencies.
.. math::
\text{PosEncoder}(pos, 2i) = sin(pos/10000^(2i/d_model))
\text{PosEncoder}(pos, 2i+1) = cos(pos/10000^(2i/d_model))
\text{where pos is the word position and i is the embed idx)
Args:
d_model: the embed dim (required).
dropout: the dropout value (default=0.1).
max_len: the max. length of the incoming sequence (default=5000).
Examples:
>>> pos_encoder = PositionalEncoding(d_model)
"""
def __init__(self, d_model, dropout=0.1, max_len=5000):
super(PositionalEncoding, self).__init__()
self.dropout = nn.Dropout(p=dropout)
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
self.register_buffer('pe', pe)
def forward(self, x):
r"""Inputs of forward function
Args:
x: the sequence fed to the positional encoder model (required).
Shape:
x: [sequence length, batch size, embed dim]
output: [sequence length, batch size, embed dim]
Examples:
>>> output = pos_encoder(x)
"""
x = x + self.pe[:x.size(0), :]
return self.dropout(x)
class TransformerModel(nn.Module):
"""Container module with an encoder, a recurrent or transformer module, and a decoder."""
def __init__(self, ntoken, ninp, nhead, nhid, nlayers, dropout=0.5, encoder=None, decoder=None):
super(TransformerModel, self).__init__()
try:
from torch.nn import TransformerEncoder, TransformerEncoderLayer
except:
raise ImportError('TransformerEncoder module does not exist in PyTorch 1.1 or lower.')
self.model_type = 'Transformer'
self.src_mask = None
self.pos_encoder = PositionalEncoding(ninp, dropout)
encoder_layers = TransformerEncoderLayer(ninp, nhead, nhid, dropout)
self.transformer_encoder = TransformerEncoder(encoder_layers, nlayers)
if encoder is None:
self.encoder = nn.Embedding(ntoken, ninp)
else:
self.encoder = encoder
self.ninp = ninp
if decoder is None:
self.decoder = nn.Linear(nhid, ntoken)
else:
self.decoder = decoder
def _generate_square_subsequent_mask(self, sz):
mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
def init_weights(self):
initrange = 0.1
if self.encoder.__class__.__name__ == "Embedding":
self.encoder.weight.data.uniform_(-initrange, initrange)
else:
self.encoder.init_weights()
if self.decoder.__class__.__name__ == "Linear":
self.decoder.bias.data.zero_()
self.decoder.weight.data.uniform_(-initrange, initrange)
else:
self.decoder.init_weights()
def forward(self, src, has_mask=True):
if has_mask:
device = src.device
if self.src_mask is None or self.src_mask.size(0) != len(src):
mask = self._generate_square_subsequent_mask(len(src)).to(device)
self.src_mask = mask
else:
self.src_mask = None
src = self.encoder(src) * math.sqrt(self.ninp)
src = self.pos_encoder(src)
output = self.transformer_encoder(src, self.src_mask)
output = self.decoder(output)
return F.log_softmax(output, dim=-1)
| 40.585635
| 122
| 0.611081
| 918
| 7,346
| 4.745098
| 0.253813
| 0.037879
| 0.015152
| 0.023875
| 0.301194
| 0.270432
| 0.247016
| 0.23416
| 0.225895
| 0.185491
| 0
| 0.017345
| 0.277974
| 7,346
| 180
| 123
| 40.811111
| 0.803922
| 0.197659
| 0
| 0.351563
| 0
| 0
| 0.057963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078125
| false
| 0
| 0.046875
| 0
| 0.195313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f3dc5d5c4a8e087378f18d43a8168ef202c67c
| 30,964
|
py
|
Python
|
pytype/analyze.py
|
hatal175/pytype
|
22150dd56c2a11f3d385a1cbb28eed985df31d72
|
[
"Apache-2.0"
] | null | null | null |
pytype/analyze.py
|
hatal175/pytype
|
22150dd56c2a11f3d385a1cbb28eed985df31d72
|
[
"Apache-2.0"
] | null | null | null |
pytype/analyze.py
|
hatal175/pytype
|
22150dd56c2a11f3d385a1cbb28eed985df31d72
|
[
"Apache-2.0"
] | null | null | null |
"""Code for checking and inferring types."""
import collections
import logging
import re
import subprocess
from typing import Any, Dict, Union
from pytype import abstract
from pytype import abstract_utils
from pytype import convert_structural
from pytype import debug
from pytype import function
from pytype import metrics
from pytype import output
from pytype import special_builtins
from pytype import state as frame_state
from pytype import vm
from pytype.overlays import typing_overlay
from pytype.pytd import builtins
from pytype.pytd import escape
from pytype.pytd import optimize
from pytype.pytd import pytd
from pytype.pytd import pytd_utils
from pytype.pytd import visitors
from pytype.typegraph import cfg
log = logging.getLogger(__name__)
# Most interpreter functions (including lambdas) need to be analyzed as
# stand-alone functions. The exceptions are comprehensions and generators, which
# have names like "<listcomp>" and "<genexpr>".
_SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$")
CallRecord = collections.namedtuple(
"CallRecord", ["node", "function", "signatures", "positional_arguments",
"keyword_arguments", "return_value"])
# How deep to follow call chains:
INIT_MAXIMUM_DEPTH = 4 # during module loading
MAXIMUM_DEPTH = 3 # during non-quick analysis
QUICK_CHECK_MAXIMUM_DEPTH = 2 # during quick checking
QUICK_INFER_MAXIMUM_DEPTH = 1 # during quick inference
class _Initializing:
pass
class CallTracer(vm.VirtualMachine):
"""Virtual machine that records all function calls.
Attributes:
exitpoint: A CFG node representing the program exit. Needs to be set before
analyze_types.
"""
_CONSTRUCTORS = ("__new__", "__init__")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._unknowns = {}
self._calls = set()
self._method_calls = set()
# Used by init_class.
self._instance_cache: Dict[Any, Union[_Initializing, cfg.Variable]] = {}
# Used by call_init. Can differ from _instance_cache because we also call
# __init__ on classes not initialized via init_class.
self._initialized_instances = set()
self._interpreter_functions = []
self._interpreter_classes = []
self._analyzed_functions = set()
self._analyzed_classes = set()
self._generated_classes = {}
self.exitpoint = None
def create_varargs(self, node):
value = abstract.Instance(self.convert.tuple_type, self)
value.merge_instance_type_parameter(
node, abstract_utils.T, self.convert.create_new_unknown(node))
return value.to_variable(node)
def create_kwargs(self, node):
key_type = self.convert.primitive_class_instances[str].to_variable(node)
value_type = self.convert.create_new_unknown(node)
kwargs = abstract.Instance(self.convert.dict_type, self)
kwargs.merge_instance_type_parameter(node, abstract_utils.K, key_type)
kwargs.merge_instance_type_parameter(node, abstract_utils.V, value_type)
return kwargs.to_variable(node)
def create_method_arguments(self, node, method, use_defaults=False):
"""Create arguments for the given method.
Creates Unknown objects as arguments for the given method. Note that we
don't need to take parameter annotations into account as
InterpreterFunction.call() will take care of that.
Args:
node: The current node.
method: An abstract.InterpreterFunction.
use_defaults: Whether to use parameter defaults for arguments. When True,
unknown arguments are created with force=False, as it is fine to use
Unsolvable rather than Unknown objects for type-checking defaults.
Returns:
A tuple of a node and a function.Args object.
"""
args = []
num_posargs = method.argcount(node)
num_posargs_no_default = num_posargs - len(method.defaults)
for i in range(num_posargs):
default_idx = i - num_posargs_no_default
if use_defaults and default_idx >= 0:
arg = method.defaults[default_idx]
else:
arg = self.convert.create_new_unknown(node, force=not use_defaults)
args.append(arg)
kws = {}
for key in method.signature.kwonly_params:
if use_defaults and key in method.kw_defaults:
kws[key] = method.kw_defaults[key]
else:
kws[key] = self.convert.create_new_unknown(node, force=not use_defaults)
starargs = self.create_varargs(node) if method.has_varargs() else None
starstarargs = self.create_kwargs(node) if method.has_kwargs() else None
return node, function.Args(posargs=tuple(args),
namedargs=kws,
starargs=starargs,
starstarargs=starstarargs)
def call_function_with_args(self, node, val, args):
"""Call a function.
Args:
node: The given node.
val: A cfg.Binding containing the function.
args: A function.Args object.
Returns:
A tuple of (1) a node and (2) a cfg.Variable of the return value.
"""
fvar = val.AssignToNewVariable(node)
with val.data.record_calls():
new_node, ret = self.call_function_in_frame(node, fvar, *args)
return new_node, ret
def call_function_in_frame(self, node, var, args, kwargs,
starargs, starstarargs):
frame = frame_state.SimpleFrame(node=node)
self.push_frame(frame)
log.info("Analyzing %r", [v.name for v in var.data])
state = frame_state.FrameState.init(node, self)
state, ret = self.call_function_with_state(
state, var, args, kwargs, starargs, starstarargs)
self.pop_frame(frame)
return state.node, ret
def _maybe_fix_classmethod_cls_arg(self, node, cls, func, args):
sig = func.signature
if (args.posargs and sig.param_names and
(sig.param_names[0] not in sig.annotations)):
# fix "cls" parameter
return args._replace(
posargs=(cls.AssignToNewVariable(node),) + args.posargs[1:])
else:
return args
def maybe_analyze_method(self, node, val, cls=None):
method = val.data
fname = val.data.name
if isinstance(method, abstract.INTERPRETER_FUNCTION_TYPES):
self._analyzed_functions.add(method.get_first_opcode())
if (not self.options.analyze_annotated and
(method.signature.has_return_annotation or method.has_overloads) and
fname.rsplit(".", 1)[-1] not in self._CONSTRUCTORS):
log.info("%r has annotations, not analyzing further.", fname)
else:
for f in method.iter_signature_functions():
node, args = self.create_method_arguments(node, f)
if f.is_classmethod and cls:
args = self._maybe_fix_classmethod_cls_arg(node, cls, f, args)
node, _ = self.call_function_with_args(node, val, args)
return node
def _call_with_fake_args(self, node0, funcv):
"""Attempt to call the given function with made-up arguments."""
# TODO(tsudol): If expand this beyond __init__, need to handle
# DictKeyMissing
nodes = []
rets = []
for funcb in funcv.bindings:
func = funcb.data
log.info("Trying %s with fake arguments", func)
if isinstance(func, abstract.INTERPRETER_FUNCTION_TYPES):
node1, args = self.create_method_arguments(node0, func)
# Once the args are generated, try calling the function.
# call_function will check fallback_to_unsolvable if a DictKeyMissing or
# FailedFunctionCall error is raised when the target function is called.
# DictKeyMissing doesn't trigger call_with_fake_args, so that shouldn't
# be raised again, and generating fake arguments should avoid any
# FailedFunctionCall errors. To prevent an infinite recursion loop, set
# fallback_to_unsolvable to False just in case.
# This means any additional errors that may be raised will be passed to
# the call_function that called this method in the first place.
node2, ret = self.call_function(node1,
funcb.AssignToNewVariable(),
args,
fallback_to_unsolvable=False)
nodes.append(node2)
rets.append(ret)
if nodes:
ret = self.join_variables(node0, rets)
node = self.join_cfg_nodes(nodes)
if ret.bindings:
return node, ret
else:
node = node0
log.info("Unable to generate fake arguments for %s", funcv)
return node, self.new_unsolvable(node)
def analyze_method_var(self, node0, name, var, cls=None):
log.info("Analyzing %s", name)
node1 = node0.ConnectNew(name)
for val in var.bindings:
node2 = self.maybe_analyze_method(node1, val, cls)
node2.ConnectTo(node0)
return node0
def bind_method(self, node, name, methodvar, instance_var):
bound = self.program.NewVariable()
for m in methodvar.Data(node):
if isinstance(m, special_builtins.ClassMethodInstance):
m = m.func.data[0]
is_cls = True
else:
is_cls = (m.isinstance_InterpreterFunction() and m.is_classmethod)
bound.AddBinding(m.property_get(instance_var, is_cls), [], node)
return bound
def _instantiate_binding(self, node0, cls, container):
"""Instantiate a class binding."""
node1, new = cls.data.get_own_new(node0, cls)
if not new or (
any(not isinstance(f, abstract.InterpreterFunction) for f in new.data)):
# This assumes that any inherited __new__ method defined in a pyi file
# returns an instance of the current class.
return node0, cls.data.instantiate(node0, container=container)
instance = self.program.NewVariable()
nodes = []
for b in new.bindings:
self._analyzed_functions.add(b.data.get_first_opcode())
node2, args = self.create_method_arguments(node1, b.data)
args = self._maybe_fix_classmethod_cls_arg(node0, cls, b.data, args)
node3 = node2.ConnectNew()
node4, ret = self.call_function_with_args(node3, b, args)
instance.PasteVariable(ret)
nodes.append(node4)
return self.join_cfg_nodes(nodes), instance
def _instantiate_var(self, node, clsv, container):
"""Build an (dummy) instance from a class, for analyzing it."""
n = self.program.NewVariable()
for cls in clsv.Bindings(node, strict=False):
node, var = self._instantiate_binding(node, cls, container)
n.PasteVariable(var)
return node, n
def _mark_maybe_missing_members(self, values):
"""Set maybe_missing_members to True on these values and their type params.
Args:
values: A list of BaseValue objects. On every instance among
the values, recursively set maybe_missing_members to True on the
instance and its type parameters.
"""
values = list(values)
seen = set()
while values:
v = values.pop(0)
if v not in seen:
seen.add(v)
if isinstance(v, abstract.SimpleValue):
v.maybe_missing_members = True
for child in v.instance_type_parameters.values():
values.extend(child.data)
def init_class(self, node, cls, container=None, extra_key=None):
"""Instantiate a class, and also call __init__.
Calling __init__ can be expensive, so this method caches its created
instances. If you don't need __init__ called, use cls.instantiate instead.
Args:
node: The current node.
cls: The class to instantiate.
container: Optionally, a container to pass to the class's instantiate()
method, so that type parameters in the container's template are
instantiated to TypeParameterInstance.
extra_key: Optionally, extra information about the location at which the
instantion occurs. By default, this method keys on the current opcode
and the class, which sometimes isn't enough to disambiguate callers
that shouldn't get back the same cached instance.
Returns:
A tuple of node and instance variable.
"""
key = (self.frame and self.frame.current_opcode, extra_key, cls)
instance = self._instance_cache.get(key)
if not instance or isinstance(instance, _Initializing):
clsvar = cls.to_variable(node)
node, instance = self._instantiate_var(node, clsvar, container)
if key in self._instance_cache:
# We've encountered a recursive pattern such as
# class A:
# def __init__(self, x: "A"): ...
# Calling __init__ again would lead to an infinite loop, so
# we instead create an incomplete instance that will be
# overwritten later. Note that we have to create a new
# instance rather than using the one that we're already in
# the process of initializing - otherwise, setting
# maybe_missing_members to True would cause pytype to ignore
# all attribute errors on self in __init__.
self._mark_maybe_missing_members(instance.data)
else:
self._instance_cache[key] = _Initializing()
node = self.call_init(node, instance)
self._instance_cache[key] = instance
return node, instance
def _call_method(self, node, binding, method_name):
node, method = self.attribute_handler.get_attribute(
node, binding.data.get_class(), method_name, binding)
if method:
bound_method = self.bind_method(
node, method_name, method, binding.AssignToNewVariable())
node = self.analyze_method_var(node, method_name, bound_method)
return node
def _call_init_on_binding(self, node, b):
if isinstance(b.data, abstract.SimpleValue):
for param in b.data.instance_type_parameters.values():
node = self.call_init(node, param)
node = self._call_method(node, b, "__init__")
cls = b.data.get_class()
if isinstance(cls, abstract.InterpreterClass):
# Call any additional initalizers the class has registered.
for method in cls.additional_init_methods:
node = self._call_method(node, b, method)
return node
def call_init(self, node, instance):
# Call __init__ on each binding.
for b in instance.bindings:
if b.data in self._initialized_instances:
continue
self._initialized_instances.add(b.data)
node = self._call_init_on_binding(node, b)
return node
def reinitialize_if_initialized(self, node, instance):
if instance in self._initialized_instances:
self._call_init_on_binding(node, instance.to_binding(node))
def analyze_class(self, node, val):
self._analyzed_classes.add(val.data)
node, instance = self.init_class(node, val.data)
good_instances = [b for b in instance.bindings if val.data == b.data.cls]
if not good_instances:
# __new__ returned something that's not an instance of our class.
instance = val.data.instantiate(node)
node = self.call_init(node, instance)
elif len(good_instances) != len(instance.bindings):
# __new__ returned some extra possibilities we don't need.
instance = self.join_bindings(node, good_instances)
for instance_value in instance.data:
val.data.register_canonical_instance(instance_value)
for name, methodvar in sorted(val.data.members.items()):
if name in self._CONSTRUCTORS:
continue # We already called this method during initialization.
b = self.bind_method(node, name, methodvar, instance)
node = self.analyze_method_var(node, name, b, val)
return node
def analyze_function(self, node0, val):
if val.data.is_attribute_of_class:
# We'll analyze this function as part of a class.
log.info("Analyze functions: Skipping class method %s", val.data.name)
else:
node1 = node0.ConnectNew(val.data.name)
node2 = self.maybe_analyze_method(node1, val)
node2.ConnectTo(node0)
return node0
def _should_analyze_as_interpreter_function(self, data):
# We record analyzed functions by opcode rather than function object. The
# two ways of recording are equivalent except for closures, which are
# re-generated when the variables they close over change, but we don't want
# to re-analyze them.
return (isinstance(data, abstract.InterpreterFunction) and
not data.is_overload and
not data.is_class_builder and
data.get_first_opcode() not in self._analyzed_functions and
not _SKIP_FUNCTION_RE.search(data.name))
def analyze_toplevel(self, node, defs):
for name, var in sorted(defs.items()): # sort, for determinicity
if not self._is_typing_member(name, var):
for value in var.bindings:
if isinstance(value.data, abstract.InterpreterClass):
new_node = self.analyze_class(node, value)
elif (isinstance(value.data, abstract.INTERPRETER_FUNCTION_TYPES) and
not value.data.is_overload):
new_node = self.analyze_function(node, value)
else:
continue
if new_node is not node:
new_node.ConnectTo(node)
# Now go through all functions and classes we haven't analyzed yet.
# These are typically hidden under a decorator.
# Go through classes first so that the `is_attribute_of_class` will
# be set for all functions in class.
for c in self._interpreter_classes:
for value in c.bindings:
if (isinstance(value.data, abstract.InterpreterClass) and
value.data not in self._analyzed_classes):
node = self.analyze_class(node, value)
for f in self._interpreter_functions:
for value in f.bindings:
if self._should_analyze_as_interpreter_function(value.data):
node = self.analyze_function(node, value)
return node
def analyze(self, node, defs, maximum_depth):
assert not self.frame
self.maximum_depth = maximum_depth
self._analyzing = True
node = node.ConnectNew(name="Analyze")
return self.analyze_toplevel(node, defs)
def trace_unknown(self, name, unknown_binding):
self._unknowns[name] = unknown_binding
def trace_call(self, node, func, sigs, posargs, namedargs, result):
"""Add an entry into the call trace.
Args:
node: The CFG node right after this function call.
func: A cfg.Binding of a function that was called.
sigs: The signatures that the function might have been called with.
posargs: The positional arguments, an iterable over cfg.Value.
namedargs: The keyword arguments, a dict mapping str to cfg.Value.
result: A Variable of the possible result values.
"""
log.debug("Logging call to %r with %d args, return %r",
func, len(posargs), result)
args = tuple(posargs)
kwargs = tuple((namedargs or {}).items())
record = CallRecord(node, func, sigs, args, kwargs, result)
if isinstance(func.data, abstract.BoundPyTDFunction):
self._method_calls.add(record)
elif isinstance(func.data, abstract.PyTDFunction):
self._calls.add(record)
def trace_functiondef(self, f):
self._interpreter_functions.append(f)
def trace_classdef(self, c):
self._interpreter_classes.append(c)
def trace_namedtuple(self, nt):
# All namedtuple instances with the same name are equal, so it's fine to
# overwrite previous instances.
self._generated_classes[nt.name] = nt
def pytd_classes_for_unknowns(self):
classes = []
for name, val in self._unknowns.items():
if val in val.variable.Filter(self.exitpoint, strict=False):
classes.append(val.data.to_structural_def(self.exitpoint, name))
return classes
def pytd_for_types(self, defs):
# If a variable is annotated, we'll always output that type.
annotated_names = set()
data = []
pytd_convert = self.convert.pytd_convert
annots = abstract_utils.get_annotations_dict(defs)
for name, t in pytd_convert.annotations_to_instance_types(
self.exitpoint, annots):
annotated_names.add(name)
data.append(pytd.Constant(name, t))
for name, var in defs.items():
if (name in output.TOP_LEVEL_IGNORE or name in annotated_names or
self._is_typing_member(name, var)):
continue
options = var.FilteredData(self.exitpoint, strict=False)
if (len(options) > 1 and
not all(isinstance(o, abstract.FUNCTION_TYPES) for o in options)):
# It's ambiguous whether this is a type, a function or something
# else, so encode it as a constant.
combined_types = pytd_utils.JoinTypes(t.to_type(self.exitpoint)
for t in options)
data.append(pytd.Constant(name, combined_types))
elif options:
for option in options:
try:
d = option.to_pytd_def(self.exitpoint, name) # Deep definition
except NotImplementedError:
d = option.to_type(self.exitpoint) # Type only
if isinstance(d, pytd.NothingType):
if isinstance(option, abstract.Empty):
d = pytd.AnythingType()
else:
assert isinstance(option, typing_overlay.NoReturn)
if isinstance(d, pytd.Type) and not isinstance(d, pytd.TypeParameter):
data.append(pytd.Constant(name, d))
else:
data.append(d)
else:
log.error("No visible options for %s", name)
data.append(pytd.Constant(name, pytd.AnythingType()))
return pytd_utils.WrapTypeDeclUnit("inferred", data)
@staticmethod
def _call_traces_to_function(call_traces, name_transform=lambda x: x):
funcs = collections.defaultdict(pytd_utils.OrderedSet)
for node, func, sigs, args, kws, retvar in call_traces:
# The lengths may be different in the presence of optional and kw args.
arg_names = max((sig.get_positional_names() for sig in sigs), key=len)
for i in range(len(arg_names)):
if not isinstance(func.data, abstract.BoundFunction) or i > 0:
arg_names[i] = function.argname(i)
arg_types = (a.data.to_type(node) for a in args)
ret = pytd_utils.JoinTypes(t.to_type(node) for t in retvar.data)
starargs = None
starstarargs = None
funcs[func.data.name].add(pytd.Signature(
tuple(pytd.Parameter(n, t, False, False, None)
for n, t in zip(arg_names, arg_types)) +
tuple(pytd.Parameter(name, a.data.to_type(node), False, False, None)
for name, a in kws),
starargs, starstarargs,
ret, exceptions=(), template=()))
functions = []
for name, signatures in funcs.items():
functions.append(pytd.Function(name_transform(name), tuple(signatures),
pytd.MethodTypes.METHOD))
return functions
def _is_typing_member(self, name, var):
for module_name in ("typing", "typing_extensions"):
if module_name not in self.loaded_overlays:
continue
module = self.loaded_overlays[module_name].get_module(name)
if name in module.members and module.members[name].data == var.data:
return True
return False
def pytd_functions_for_call_traces(self):
return self._call_traces_to_function(self._calls, escape.pack_partial)
def pytd_classes_for_call_traces(self):
class_to_records = collections.defaultdict(list)
for call_record in self._method_calls:
args = call_record.positional_arguments
if not any(isinstance(a.data, abstract.Unknown) for a in args):
# We don't need to record call signatures that don't involve
# unknowns - there's nothing to solve for.
continue
cls = args[0].data.get_class()
if isinstance(cls, abstract.PyTDClass):
class_to_records[cls].append(call_record)
classes = []
for cls, call_records in class_to_records.items():
full_name = cls.module + "." + cls.name if cls.module else cls.name
classes.append(pytd.Class(
name=escape.pack_partial(full_name),
metaclass=None,
parents=(pytd.NamedType("builtins.object"),), # not used in solver
methods=tuple(self._call_traces_to_function(call_records)),
constants=(),
classes=(),
decorators=(),
slots=None,
template=(),
))
return classes
def pytd_classes_for_namedtuple_instances(self):
return tuple(v.generate_ast() for v in self._generated_classes.values())
def compute_types(self, defs):
classes = (tuple(self.pytd_classes_for_unknowns()) +
tuple(self.pytd_classes_for_call_traces()) +
self.pytd_classes_for_namedtuple_instances())
functions = tuple(self.pytd_functions_for_call_traces())
aliases = () # aliases are instead recorded as constants
ty = pytd_utils.Concat(
self.pytd_for_types(defs),
pytd_utils.CreateModule("unknowns", classes=classes,
functions=functions, aliases=aliases))
ty = ty.Visit(optimize.CombineReturnsAndExceptions())
ty = ty.Visit(optimize.PullInMethodClasses())
ty = ty.Visit(visitors.DefaceUnresolved(
[ty, self.loader.concat_all()], escape.UNKNOWN))
return ty.Visit(visitors.AdjustTypeParameters())
def _check_return(self, node, actual, formal):
if not self.options.report_errors:
return True
views = abstract_utils.get_views([actual], node)
# Check for typevars in the return value first, since bad_matches
# expects not to get any.
bad = [view for view in views
if actual in view and view[actual].data.formal]
if not bad:
bad = self.matcher.bad_matches(actual, formal, node)
if bad:
self.errorlog.bad_return_type(
self.frames, node, formal, actual, bad)
return not bad
def check_types(src, filename, errorlog, options, loader,
deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
maximum_depth=None, **kwargs):
"""Verify the Python code."""
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=False, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:check_types:tracer")
if deep:
if maximum_depth is None:
maximum_depth = (
QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH)
tracer.analyze(loc, defs, maximum_depth=maximum_depth)
snapshotter.take_snapshot("analyze:check_types:post")
_maybe_output_debug(options, tracer.program)
def infer_types(src, errorlog, options, loader,
filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
show_library_calls=False, maximum_depth=None, tracer_vm=None,
**kwargs):
"""Given Python source return its types.
Args:
src: A string containing Python source code.
errorlog: Where error messages go. Instance of errors.ErrorLog.
options: config.Options object
loader: A load_pytd.Loader instance to load PYI information.
filename: Filename of the program we're parsing.
deep: If True, analyze all functions, even the ones not called by the main
execution flow.
init_maximum_depth: Depth of analysis during module loading.
show_library_calls: If True, call traces are kept in the output.
maximum_depth: Depth of the analysis. Default: unlimited.
tracer_vm: An instance of CallTracer, in case the caller wants to
instantiate and retain the vm used for type inference.
**kwargs: Additional parameters to pass to vm.VirtualMachine
Returns:
A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit)
Raises:
AssertionError: In case of a bad parameter combination.
"""
# If the caller has passed in a vm, use that.
if tracer_vm:
assert isinstance(tracer_vm, CallTracer)
tracer = tracer_vm
else:
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=options.protocols,
store_all_calls=not deep, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
log.info("===Done running definitions and module-level code===")
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:infer_types:tracer")
if deep:
if maximum_depth is None:
if not options.quick:
maximum_depth = MAXIMUM_DEPTH
elif options.analyze_annotated:
# Since there's no point in analyzing annotated functions for inference,
# the presence of this option means that the user wants checking, too.
maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH
else:
maximum_depth = QUICK_INFER_MAXIMUM_DEPTH
tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth)
else:
tracer.exitpoint = loc
snapshotter.take_snapshot("analyze:infer_types:post")
ast = tracer.compute_types(defs)
ast = tracer.loader.resolve_ast(ast)
if tracer.has_unknown_wildcard_imports or any(
a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS):
if "__getattr__" not in ast:
ast = pytd_utils.Concat(
ast, builtins.GetDefaultAst(options.python_version))
# If merged with other if statement, triggers a ValueError: Unresolved class
# when attempts to load from the protocols file
if options.protocols:
protocols_pytd = tracer.loader.import_name("protocols")
else:
protocols_pytd = None
builtins_pytd = tracer.loader.concat_all()
# Insert type parameters, where appropriate
ast = ast.Visit(visitors.CreateTypeParametersForSignatures())
if options.protocols:
log.info("=========== PyTD to solve =============\n%s",
pytd_utils.Print(ast))
ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd)
elif not show_library_calls:
log.info("Solving is turned off. Discarding call traces.")
# Rename remaining "~unknown" to "?"
ast = ast.Visit(visitors.RemoveUnknownClasses())
# Remove "~list" etc.:
ast = convert_structural.extract_local(ast)
_maybe_output_debug(options, tracer.program)
return ast, builtins_pytd
def _maybe_output_debug(options, program):
"""Maybe emit debugging output."""
if options.output_cfg or options.output_typegraph:
dot = debug.program_to_dot(program, set([]), bool(options.output_cfg))
svg_file = options.output_cfg or options.output_typegraph
proc = subprocess.Popen(["/usr/bin/dot", "-T", "svg", "-o", svg_file],
stdin=subprocess.PIPE, universal_newlines=True)
(_, stderr) = proc.communicate(dot)
if stderr:
log.info("Failed to create %s: %s", svg_file, stderr)
if options.output_debug:
text = debug.program_to_text(program)
if options.output_debug == "-":
log.info("=========== Program Dump =============\n%s", text)
else:
with options.open_function(options.output_debug, "w") as fi:
fi.write(text)
| 41.61828
| 80
| 0.687993
| 4,092
| 30,964
| 5.027615
| 0.159091
| 0.018082
| 0.007777
| 0.005833
| 0.158557
| 0.113401
| 0.061391
| 0.03733
| 0.022651
| 0.019054
| 0
| 0.002204
| 0.223324
| 30,964
| 743
| 81
| 41.674293
| 0.853258
| 0.226295
| 0
| 0.108159
| 0
| 0
| 0.033335
| 0.004241
| 0
| 0
| 0
| 0.001346
| 0.005693
| 1
| 0.077799
| false
| 0.001898
| 0.047438
| 0.005693
| 0.199241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f490ebabe7d689d377fda1a39b6fe3eaf67bee
| 3,979
|
py
|
Python
|
src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py
|
milescsmith/cDNA_Cupcake
|
776d841c69fc6d8b3dce95bb9f076546bc0429c0
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py
|
milescsmith/cDNA_Cupcake
|
776d841c69fc6d8b3dce95bb9f076546bc0429c0
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py
|
milescsmith/cDNA_Cupcake
|
776d841c69fc6d8b3dce95bb9f076546bc0429c0
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
#!/usr/bin/env python
__author__ = "etseng@pacb.com"
"""
Given a pooled input GFF + demux CSV file, write out per-{barcode group} GFFs
If input fasta/fastq is given, optionally also output per-{barcode group} FASTA/FASTQ
"""
import re
from collections import defaultdict
from csv import DictReader
from typing import Optional
import typer
from Bio import SeqIO
import cupcake.sequence.GFF as GFF
from cupcake import version_callback
from cupcake import cupcake_logger as logger
rex_pbid = re.compile(r"(PB.\d+.\d+)(|\S+)")
app = typer.Typer(name="cupcake.post_isoseq_cluster.demux_by_barcode_groups")
def get_type_fafq(in_filename):
in_filename = in_filename.upper()
if in_filename.endswith(".FA") or in_filename.endswith("FASTA"):
return "fasta"
elif in_filename.endswith(".FQ") or in_filename.endswith("FASTQ"):
return "fastq"
else:
raise Exception(
f"Unrecognized file suffix .{in_filename[in_filename.find('.'):]}! Must end with .fasta or .fastq!"
)
def regroup_gff(
pooled_gff, demux_count_file, output_prefix, out_group_dict, in_fafq=None
):
"""
:param pooled_sam: SAM file
:param demux_count_file: comma-delimited per-barcode count file
:param output_prefix: output prefix for GFF
:param out_group_dict: dict of barcode name --> group to be long in (ex: {'EM1':'EM', 'EM2':'EM'})
:param in_fafq: optional fasta/fastq that was input to SAM
"""
if in_fafq is not None:
type_fafq = get_type_fafq(in_fafq)
in_tissue = defaultdict(
lambda: set()
) # pbid --> list of tissue it is in (EM, END, R)
for r in DictReader(open(demux_count_file), delimiter=","):
for k, v in r.items():
if k != "id" and int(v) > 0:
in_tissue[r["id"]].add(k)
# in_tissue = dict(in_tissue)
handles = {}
handles_fafq = {}
for g in out_group_dict.values():
handles[g] = open(f"{output_prefix}_{g}_only.gff", "w")
if in_fafq is not None:
handles_fafq[g] = open(f"{output_prefix}_{g}_only.{type_fafq}", "w")
if in_fafq is not None:
fafq_dict = SeqIO.to_dict(SeqIO.parse(open(in_fafq), type_fafq))
fafq_dict_keys = list(fafq_dict.keys())
for k in fafq_dict_keys:
m = rex_pbid.match(k)
if m is not None:
fafq_dict[m.group(1)] = fafq_dict[k]
reader = GFF.collapseGFFReader(pooled_gff)
for r in reader:
groups_to_write_in = set()
pbid = r.seqid
if pbid not in in_tissue:
logger.info(
f"WARNING: {pbid} does not belong to any group indicated by outgroup_dict"
)
for tissue in in_tissue[pbid]:
groups_to_write_in.add(out_group_dict[tissue])
for g in groups_to_write_in:
GFF.write_collapseGFF_format(handles[g], r)
if in_fafq is not None:
SeqIO.write(fafq_dict[pbid], handles_fafq[g], type_fafq)
@app.command(name="")
def main(
pooled_gff: str = typer.Argument(..., help="Pooled GFF file"),
demux_count_file: str = typer.Argument(..., help="Demux count file"),
output_prefix: str = typer.Argument(..., help="Output prefix for GFF outputs"),
outgroup_dict: str = typer.Argument(..., help="Tuples indicating barcode grouping"),
pooled_fastx: Optional[str] = typer.Option(
None,
help="Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)",
),
version: bool = typer.Option(
None,
"--version",
callback=version_callback,
is_eager=True,
help="Prints the version of the SQANTI3 package.",
),
) -> None:
tmp = eval(outgroup_dict)
out_group_dict = dict([tmp]) if len(tmp) == 1 else dict(tmp)
regroup_gff(
pooled_gff,
demux_count_file,
output_prefix,
out_group_dict,
pooled_fastx,
)
if __name__ == "__main__":
typer.run(main)
| 32.349593
| 111
| 0.6381
| 568
| 3,979
| 4.255282
| 0.288732
| 0.037236
| 0.034754
| 0.016549
| 0.11626
| 0.095159
| 0.081092
| 0.047166
| 0.047166
| 0.047166
| 0
| 0.001998
| 0.245288
| 3,979
| 122
| 112
| 32.614754
| 0.802864
| 0.097763
| 0
| 0.088889
| 0
| 0
| 0.16839
| 0.045495
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.1
| 0
| 0.155556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f49ed361f20a67913d6d3671205fa5c226035f
| 5,168
|
py
|
Python
|
vll/data/circle_dataset.py
|
paulhfu/3dcv-students
|
f8d42c985cf33903170733b0c8f6a2199099553c
|
[
"MIT"
] | 4
|
2020-04-21T21:40:13.000Z
|
2022-02-13T18:18:13.000Z
|
vll/data/circle_dataset.py
|
paulhfu/3dcv-students
|
f8d42c985cf33903170733b0c8f6a2199099553c
|
[
"MIT"
] | 1
|
2022-02-03T11:24:07.000Z
|
2022-02-03T11:24:07.000Z
|
vll/data/circle_dataset.py
|
paulhfu/3dcv-students
|
f8d42c985cf33903170733b0c8f6a2199099553c
|
[
"MIT"
] | 8
|
2020-04-22T10:24:27.000Z
|
2022-01-13T16:25:52.000Z
|
import random
import numpy as np
import math
from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa
from skimage.io import imsave
from skimage.util import random_noise
maxSlope = 10 # restrict the maximum slope of generated lines for stability
minLength = 20 # restrict the minimum length of line segments
class ICircleDataset:
'''
Generator of circle segment images.
Images will have 1 random circle each, filled with noise and distractor lines.
Class also offers functionality for drawing line parameters, hypotheses and point predictions.
'''
def __init__(self, imgW = 64, imgH = 64, margin = -5, bg_clr = 0.5):
'''
Constructor.
imgW -- image width (default 64)
imgH -- image height (default 64)
margin -- lines segments are sampled within this margin, negative value means that a line segment can start or end outside the image (default -5)
bg_clr -- background intensity (default 0.5)
'''
self.imgW = imgW
self.imgH = imgH
self.margin = margin
self.bg_clr = bg_clr
def draw_circle(self, data, cX, cY, r, clr, alpha=1.0):
'''
Draw a circle with the given color and opacity.
data -- image to draw to
cX -- x value of circle center
cY -- y value of circle center
r -- radius of circle
clr -- line color, triple of values
alpha -- opacity (default 1.0)
'''
cY = int(cY * self.imgH)
cX = int(cX * self.imgW)
r = int(r * self.imgW)
rr, cc, val = circle_perimeter_aa(cY, cX, r)
set_color(data, (rr, cc), clr, val)
def draw_hyps(self, labels, scores, data=None):
'''
Draw a set of line hypothesis for a batch of images.
labels -- line parameters, array shape (NxMx2) where
N is the number of images in the batch
M is the number of hypotheses per image
2 is the number of line parameters (intercept, slope)
scores -- hypotheses scores, array shape (NxM), see above, higher score will be drawn with higher opacity
data -- batch of images to draw to, if empty a new batch wil be created according to the shape of labels
'''
n = labels.shape[0] # number of images
m = labels.shape[1] # number of hypotheses
if data is None: # create new batch of images
data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32)
data.fill(self.bg_clr)
clr = (0, 0, 1)
for i in range (0, n):
for j in range (0, m):
lY1 = int(labels[i, j, 0] * self.imgH)
lY2 = int(labels[i, j, 1] * self.imgW + labels[i, j, 0] * self.imgH)
self.draw_line(data[i], 0, lY1, self.imgW, lY2, clr, scores[i, j])
return data
def draw_models(self, labels, data=None, correct=None):
'''
Draw circles for a batch of images.
labels -- circle parameters, array shape (Nx3) where
N is the number of images in the batch
3 is the number of circles parameters (center x, center y, radius)
data -- batch of images to draw to, if empty a new batch wil be created according to the shape of labels
and circles will be green, circles will be blue otherwise
correct -- array of shape (N) indicating whether a circle estimate is correct
'''
n = labels.shape[0]
if data is None:
data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32)
data.fill(self.bg_clr)
clr = (0, 1, 0)
else:
clr = (0, 0, 1)
for i in range (0, n):
self.draw_circle(data[i], labels[i, 0], labels[i, 1], labels[i, 2], clr)
if correct is not None:
# draw border green if estiamte is correct, red otherwise
if correct[i]: borderclr = (0, 1, 0)
else: borderclr = (1, 0, 0)
set_color(data[i], line(0, 0, 0, self.imgW-1), borderclr)
set_color(data[i], line(0, 0, self.imgH-1, 0), borderclr)
set_color(data[i], line(self.imgH-1, 0, self.imgH-1, self.imgW-1), borderclr)
set_color(data[i], line(0, self.imgW-1, self.imgH-1, self.imgW-1), borderclr)
return data
def draw_points(self, points, data, inliers=None):
'''
Draw 2D points for a batch of images.
points -- 2D points, array shape (Nx2xM) where
N is the number of images in the batch
2 is the number of point dimensions (x, y)
M is the number of points
data -- batch of images to draw to
inliers -- soft inlier score for each point,
if given and score < 0.5 point will be drawn green, red otherwise
'''
n = points.shape[0] # number of images
m = points.shape[2] # number of points
for i in range (0, n):
for j in range(0, m):
clr = (0.2, 0.2, 0.2) # draw predicted points as dark circles
if inliers is not None and inliers[i, j] > 0.5:
clr = (0.7, 0.7, 0.7) # draw inliers as light circles
r = int(points[i, 0, j] * self.imgH)
c = int(points[i, 1, j] * self.imgW)
rr, cc = circle(r, c, 2)
set_color(data[i], (rr, cc), clr)
return data
def samples(self, n):
'''
Create new input images of random line segments and distractors along with ground truth parameters.
n -- number of images to create
'''
data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32)
data.fill(self.bg_clr)
labels = np.zeros((n, 3), dtype=np.float32)
for i in range (0, n):
data[i] = random_noise(data[i], mode='speckle')
return data, labels
| 31.13253
| 147
| 0.663893
| 880
| 5,168
| 3.863636
| 0.210227
| 0.032941
| 0.025882
| 0.030588
| 0.268529
| 0.247059
| 0.199706
| 0.175882
| 0.175882
| 0.175882
| 0
| 0.0287
| 0.224652
| 5,168
| 165
| 148
| 31.321212
| 0.819815
| 0.480844
| 0
| 0.295775
| 0
| 0
| 0.002692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084507
| false
| 0
| 0.084507
| 0
| 0.239437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f546859f563e461fa98070b804316bbbaa69c8
| 1,030
|
py
|
Python
|
anlogger/logger.py
|
anttin/anlogger
|
dfa7be7ba2f4651507b188f986c10bab9bd7460e
|
[
"MIT"
] | null | null | null |
anlogger/logger.py
|
anttin/anlogger
|
dfa7be7ba2f4651507b188f986c10bab9bd7460e
|
[
"MIT"
] | null | null | null |
anlogger/logger.py
|
anttin/anlogger
|
dfa7be7ba2f4651507b188f986c10bab9bd7460e
|
[
"MIT"
] | null | null | null |
import logging
import logging.handlers
import os
class Logger(object):
def __init__(self, name, default_loglevel='INFO', fmt=None, syslog=None):
self.name = name
self.syslog = syslog
self.fmt = fmt if fmt is not None else "%(asctime)-15s %(name)s %(levelname)s %(message)s"
if 'LOGLEVEL' in os.environ:
self.level = os.environ['LOGLEVEL'].upper()
else:
self.level = default_loglevel.upper()
logging.basicConfig(format=self.fmt)
self.logger = logging.getLogger(self.name)
self.logger.setLevel(self.level)
if self.syslog is not None and self.syslog not in (False, 0):
if isinstance(self.syslog, (list, tuple)):
_addr = tuple(self.syslog)
elif isinstance(self.syslog, str):
_addr = self.syslog
else:
_addr = "/dev/log" if os.path.exists("/dev/log") else None
if _addr is not None:
handler = logging.handlers.SysLogHandler(address=_addr)
self.logger.addHandler(handler)
def get(self):
return self.logger
| 28.611111
| 94
| 0.659223
| 141
| 1,030
| 4.737589
| 0.375887
| 0.10479
| 0.040419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003722
| 0.217476
| 1,030
| 35
| 95
| 29.428571
| 0.825062
| 0
| 0
| 0.074074
| 0
| 0
| 0.082524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.111111
| 0.037037
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f66963cc795d3d06ec835c6cc0e9a8392f9d65
| 729
|
py
|
Python
|
lib/env/trade/BaseTradeStrategy.py
|
devas123/Bitcoin-Trader-RL
|
097cb0ba7428b2c4f997bdb0425a6153c23f9c83
|
[
"MIT"
] | null | null | null |
lib/env/trade/BaseTradeStrategy.py
|
devas123/Bitcoin-Trader-RL
|
097cb0ba7428b2c4f997bdb0425a6153c23f9c83
|
[
"MIT"
] | null | null | null |
lib/env/trade/BaseTradeStrategy.py
|
devas123/Bitcoin-Trader-RL
|
097cb0ba7428b2c4f997bdb0425a6153c23f9c83
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from typing import Tuple, Callable
class BaseTradeStrategy(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self,
commissionPercent: float,
maxSlippagePercent: float,
base_precision: int,
asset_precision: int,
min_cost_limit: float,
min_amount_limit: float):
pass
@abstractmethod
def trade(self,
action: int,
n_discrete_actions: int,
balance: float,
asset_held: float,
current_price: Callable[[str], float]) -> Tuple[float, float, float, float]:
raise NotImplementedError()
| 30.375
| 90
| 0.577503
| 65
| 729
| 6.261538
| 0.6
| 0.07371
| 0.07371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.352538
| 729
| 23
| 91
| 31.695652
| 0.862288
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.05
| 0.1
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71f9440eb1c326307f7552af69580f96b76f02f9
| 3,283
|
py
|
Python
|
configs/_base_/datasets/stvqa_dataset.py
|
linxi1158/iMIX
|
af87a17275f02c94932bb2e29f132a84db812002
|
[
"Apache-2.0"
] | 23
|
2021-06-26T08:45:19.000Z
|
2022-03-02T02:13:33.000Z
|
configs/_base_/datasets/stvqa_dataset.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | null | null | null |
configs/_base_/datasets/stvqa_dataset.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 9
|
2021-06-10T02:36:20.000Z
|
2021-11-09T02:18:16.000Z
|
dataset_type = 'STVQADATASET'
data_root = '/home/datasets/mix_data/iMIX/'
feature_path = 'data/datasets/stvqa/defaults/features/'
ocr_feature_path = 'data/datasets/stvqa/defaults/ocr_features/'
annotation_path = 'data/datasets/stvqa/defaults/annotations/'
vocab_path = 'data/datasets/stvqa/defaults/extras/vocabs/'
train_datasets = ['train']
test_datasets = ['val']
reader_train_cfg = dict(
type='STVQAREADER',
card='default',
mix_features=dict(
train=data_root + feature_path + 'detectron.lmdb',
val=data_root + feature_path + 'detectron.lmdb',
test=data_root + feature_path + 'detectron.lmdb',
),
mix_ocr_features=dict(
train=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
val=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
test=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
),
mix_annotations=dict(
train=data_root + annotation_path + 'imdb_subtrain.npy',
val=data_root + annotation_path + 'imdb_subval.npy',
test=data_root + annotation_path + 'imdb_test_task3.npy',
),
datasets=train_datasets)
reader_test_cfg = dict(
type='STVQAREADER',
card='default',
mix_features=dict(
train=data_root + feature_path + 'detectron.lmdb',
val=data_root + feature_path + 'detectron.lmdb',
test=data_root + feature_path + 'detectron.lmdb',
),
mix_ocr_features=dict(
train=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
val=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
test=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
),
mix_annotations=dict(
train=data_root + annotation_path + 'imdb_subtrain.npy',
val=data_root + annotation_path + 'imdb_subval.npy',
test=data_root + annotation_path + 'imdb_test_task3.npy',
),
datasets=train_datasets)
info_cpler_cfg = dict(
type='STVQAInfoCpler',
glove_weights=dict(
glove6b50d=data_root + 'glove/glove.6B.50d.txt.pt',
glove6b100d=data_root + 'glove/glove.6B.100d.txt.pt',
glove6b200d=data_root + 'glove/glove.6B.200d.txt.pt',
glove6b300d=data_root + 'glove/glove.6B.300d.txt.pt',
),
fasttext_weights=dict(
wiki300d1m=data_root + 'fasttext/wiki-news-300d-1M.vec',
wiki300d1msub=data_root + 'fasttext/wiki-news-300d-1M-subword.vec',
wiki_bin=data_root + 'fasttext/wiki.en.bin',
),
tokenizer='/home/datasets/VQA/bert/' + 'bert-base-uncased-vocab.txt',
mix_vocab=dict(
answers_st_5k=data_root + vocab_path + 'fixed_answer_vocab_stvqa_5k.txt',
vocabulary_100k=data_root + vocab_path + 'vocabulary_100k.txt',
),
max_seg_lenth=20,
max_ocr_lenth=10,
word_mask_ratio=0.0,
vocab_name='vocabulary_100k',
vocab_answer_name='answers_st_5k',
glove_name='glove6b300d',
fasttext_name='wiki_bin',
if_bert=True,
)
train_data = dict(
samples_per_gpu=16,
workers_per_gpu=1,
data=dict(type=dataset_type, reader=reader_train_cfg, info_cpler=info_cpler_cfg, limit_nums=800))
test_data = dict(
samples_per_gpu=16,
workers_per_gpu=1,
data=dict(type=dataset_type, reader=reader_test_cfg, info_cpler=info_cpler_cfg),
)
| 36.88764
| 101
| 0.696924
| 443
| 3,283
| 4.810384
| 0.216704
| 0.105115
| 0.045988
| 0.047865
| 0.663538
| 0.59878
| 0.542468
| 0.514313
| 0.514313
| 0.514313
| 0
| 0.029346
| 0.180018
| 3,283
| 88
| 102
| 37.306818
| 0.762259
| 0
| 0
| 0.52439
| 0
| 0
| 0.285714
| 0.181541
| 0.012195
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71fa3b7f37795303ef477432b9138d2cfeb1171c
| 320
|
py
|
Python
|
code/Line.py
|
manno-xx/FutureLearnRobotBuggy
|
d5f0172597ad88d6a8b883b0b16d425a76edfb0b
|
[
"MIT"
] | null | null | null |
code/Line.py
|
manno-xx/FutureLearnRobotBuggy
|
d5f0172597ad88d6a8b883b0b16d425a76edfb0b
|
[
"MIT"
] | null | null | null |
code/Line.py
|
manno-xx/FutureLearnRobotBuggy
|
d5f0172597ad88d6a8b883b0b16d425a76edfb0b
|
[
"MIT"
] | null | null | null |
#LineSensor test
from gpiozero import LineSensor
from time import sleep
from signal import pause
def lineDetected():
print('line detected')
def noLineDetected():
print('no line detected')
sensor = LineSensor(14)
sensor.when_line = lineDetected
sensor.when_no_line = noLineDetected
pause()
sensor.close()
| 14.545455
| 36
| 0.759375
| 40
| 320
| 6
| 0.5
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007407
| 0.15625
| 320
| 21
| 37
| 15.238095
| 0.881481
| 0.046875
| 0
| 0
| 0
| 0
| 0.095395
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.25
| 0
| 0.416667
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71fafdbd17b589475cd452d3837ecd482b296f0c
| 8,468
|
py
|
Python
|
combo/search/discrete/policy.py
|
zhangkunliang/BayesOptimization
|
6d78c9e9f96239b0dbb85650a0d878e9410158ec
|
[
"MIT"
] | 139
|
2016-02-18T02:31:04.000Z
|
2022-02-18T10:38:06.000Z
|
combo/search/discrete/policy.py
|
zhangkunliang/BayesOptimization
|
6d78c9e9f96239b0dbb85650a0d878e9410158ec
|
[
"MIT"
] | 8
|
2016-04-18T08:10:44.000Z
|
2020-12-30T08:49:33.000Z
|
combo/search/discrete/policy.py
|
zhangkunliang/BayesOptimization
|
6d78c9e9f96239b0dbb85650a0d878e9410158ec
|
[
"MIT"
] | 50
|
2016-05-21T01:17:23.000Z
|
2022-02-18T01:27:41.000Z
|
import numpy as np
import copy
import combo.misc
import cPickle as pickle
from results import history
from .. import utility
from ...variable import variable
from ..call_simulator import call_simulator
from ... import predictor
from ...gp import predictor as gp_predictor
from ...blm import predictor as blm_predictor
import combo.search.score
MAX_SEACH = int(20000)
class policy:
def __init__(self, test_X, config=None):
self.predictor = None
self.training = variable()
self.test = self._set_test(test_X)
self.actions = np.arange(0, self.test.X.shape[0])
self.history = history()
self.config = self._set_config(config)
def set_seed(self, seed):
self.seed = seed
np.random.seed(self.seed)
def delete_actions(self, index, actions=None):
actions = self._set_unchosed_actions(actions)
return np.delete(actions, index)
def write(self, action, t, X=None):
if X is None:
X = self.test.X[action, :]
Z = self.test.Z[action, :] if self.test.Z is not None else None
else:
Z = self.predictor.get_basis(X) \
if self.predictor is not None else None
self.new_data = variable(X, t, Z)
self.history.write(t, action)
self.training.add(X=X, t=t, Z=Z)
def random_search(self, max_num_probes, num_search_each_probe=1,
simulator=None, is_disp=True):
N = int(num_search_each_probe)
if int(max_num_probes) * N > len(self.actions):
raise ValueError('max_num_probes * num_search_each_probe must \
be smaller than the length of candidates')
if is_disp:
utility.show_interactive_mode(simulator, self.history)
for n in xrange(0, max_num_probes):
if is_disp and N > 1:
utility.show_start_message_multi_search(self.history.num_runs)
action = self.get_random_action(N)
if simulator is None:
return action
t, X = call_simulator(simulator, action)
self.write(action, t, X)
if is_disp:
utility.show_search_results(self.history, N)
return copy.deepcopy(self.history)
def bayes_search(self, training=None, max_num_probes=None,
num_search_each_probe=1,
predictor=None, is_disp=True,
simulator=None, score='TS', interval=0,
num_rand_basis=0):
if max_num_probes is None:
max_num_probes = 1
simulator = None
is_rand_expans = False if num_rand_basis == 0 else True
self.training = self._set_training(training)
if predictor is None:
self.predictor = self._init_predictor(is_rand_expans)
else:
self.predictor = predictor
N = int(num_search_each_probe)
for n in xrange(max_num_probes):
if utility.is_learning(n, interval):
self.predictor.fit(self.training, num_rand_basis)
self.test.Z = self.predictor.get_basis(self.test.X)
self.training.Z = self.predictor.get_basis(self.training.X)
self.predictor.prepare(self.training)
else:
try:
self.predictor.update(self.training, self.new_data)
except:
self.predictor.prepare(self.training)
if num_search_each_probe != 1:
utility.show_start_message_multi_search(self.history.num_runs,
score)
K = self.config.search.multi_probe_num_sampling
alpha = self.config.search.alpha
action = self.get_actions(score, N, K, alpha)
if simulator is None:
return action
t, X = call_simulator(simulator, action)
self.write(action, t, X)
if is_disp:
utility.show_search_results(self.history, N)
return copy.deepcopy(self.history)
def get_score(self, mode, predictor=None, training=None, alpha=1):
self._set_training(training)
self._set_predictor(predictor)
actions = self.actions
test = self.test.get_subset(actions)
if mode == 'EI':
f = combo.search.score.EI(predictor, training, test)
elif mode == 'PI':
f = combo.search.score.PI(predictor, training, test)
elif mode == 'TS':
f = combo.search.score.TS(predictor, training, test, alpha)
else:
raise NotImplementedError('mode must be EI, PI or TS.')
return f
def get_marginal_score(self, mode, chosed_actions, N, alpha):
f = np.zeros((N, len(self.actions)))
new_test = self.test.get_subset(chosed_actions)
virtual_t \
= self.predictor.get_predict_samples(self.training, new_test, N)
for n in xrange(N):
predictor = copy.deepcopy(self.predictor)
train = copy.deepcopy(self.training)
virtual_train = new_test
virtual_train.t = virtual_t[n, :]
if virtual_train.Z is None:
train.add(virtual_train.X, virtual_train.t)
else:
train.add(virtual_train.X, virtual_train.t, virtual_train.Z)
try:
predictor.update(train, virtual_train)
except:
predictor.prepare(train)
f[n, :] = self.get_score(mode, predictor, train)
return f
def get_actions(self, mode, N, K, alpha):
f = self.get_score(mode, self.predictor, self.training, alpha)
temp = np.argmax(f)
action = self.actions[temp]
self.actions = self.delete_actions(temp)
chosed_actions = np.zeros(N, dtype=int)
chosed_actions[0] = action
for n in xrange(1, N):
f = self.get_marginal_score(mode, chosed_actions[0:n], K, alpha)
temp = np.argmax(np.mean(f, 0))
chosed_actions[n] = self.actions[temp]
self.actions = self.delete_actions(temp)
return chosed_actions
def get_random_action(self, N):
random_index = np.random.permutation(xrange(self.actions.shape[0]))
index = random_index[0:N]
action = self.actions[index]
self.actions = self.delete_actions(index)
return action
def load(self, file_history, file_training=None, file_predictor=None):
self.history.load(file_history)
if file_training is None:
N = self.history.total_num_search
X = self.test.X[self.history.chosed_actions[0:N], :]
t = self.history.fx[0:N]
self.training = variable(X=X, t=t)
else:
self.training = variable()
self.training.load(file_training)
if file_predictor is not None:
with open(file_predictor) as f:
self.predictor = pickle.load(f)
def export_predictor(self):
return self.predictor
def export_training(self):
return self.training
def export_history(self):
return self.history
def _set_predictor(self, predictor=None):
if predictor is None:
predictor = self.predictor
return predictor
def _init_predictor(self, is_rand_expans, predictor=None):
self.predictor = self._set_predictor(predictor)
if self.predictor is None:
if is_rand_expans:
self.predictor = blm_predictor(self.config)
else:
self.predictor = gp_predictor(self.config)
return self.predictor
def _set_training(self, training=None):
if training is None:
training = self.training
return training
def _set_unchosed_actions(self, actions=None):
if actions is None:
actions = self.actions
return actions
def _set_test(self, test_X):
if isinstance(test_X, np.ndarray):
test = variable(X=test_X)
elif isinstance(test_X, variable):
test = test_X
else:
raise TypeError('The type of test_X must \
take ndarray or combo.variable')
return test
def _set_config(self, config=None):
if config is None:
config = combo.misc.set_config()
return config
| 32.694981
| 78
| 0.594473
| 1,066
| 8,468
| 4.542214
| 0.12758
| 0.061751
| 0.019827
| 0.022305
| 0.213342
| 0.150351
| 0.130525
| 0.118133
| 0.104089
| 0.084676
| 0
| 0.004313
| 0.315541
| 8,468
| 258
| 79
| 32.821705
| 0.83109
| 0
| 0
| 0.22
| 0
| 0
| 0.004015
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.06
| 0.015
| 0.255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71fb4a0f65f1788e4523139873b94749e767304c
| 693
|
py
|
Python
|
source.py
|
s403o/tw_bot
|
fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03
|
[
"MIT"
] | null | null | null |
source.py
|
s403o/tw_bot
|
fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03
|
[
"MIT"
] | null | null | null |
source.py
|
s403o/tw_bot
|
fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03
|
[
"MIT"
] | null | null | null |
import requests
from bs4 import BeautifulSoup as bs
import os
#source
url = '' # the source you want the bot take images from
#down page
page = requests.get(url)
html = bs(page.text, 'html.parser')
#locate
image_loc = html.findAll('img')
#create folder for located imgs
if not os.path.exists('imgs'):
os.makedirs('imgs')
#open the new folder
os.chdir('imgs')
image0 = 0 #img name
#get images
for image in image_loc:
try:
url = image['src']
source = requests.get(url)
if source.status_code == 200:
with open('img-' + str(image0) + '.jpg', 'png') as mkimg:
mkimg.write(requests.get(url).content)
mkimg.close()
image0 += 1
except:
pass
| 19.25
| 63
| 0.65368
| 105
| 693
| 4.285714
| 0.571429
| 0.073333
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016544
| 0.215007
| 693
| 35
| 64
| 19.8
| 0.810662
| 0.193362
| 0
| 0
| 0
| 0
| 0.072595
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.045455
| 0.136364
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71fc1181a50c5c3f0fee92d2187d798fa7535036
| 2,403
|
py
|
Python
|
lib/appController.py
|
QIAOANGeo/BZB_ydzw
|
8c11e9797cca31d1fab26be7eb0a71666cfac15f
|
[
"MIT"
] | 2
|
2019-12-06T14:49:34.000Z
|
2021-06-10T15:57:59.000Z
|
lib/appController.py
|
QIAOANGeo/BZB_ydzw
|
8c11e9797cca31d1fab26be7eb0a71666cfac15f
|
[
"MIT"
] | null | null | null |
lib/appController.py
|
QIAOANGeo/BZB_ydzw
|
8c11e9797cca31d1fab26be7eb0a71666cfac15f
|
[
"MIT"
] | null | null | null |
'''
1ใๅฏๅจappiumๆๅก
subproccess
้
็ฝฎๆไปถ
1.1ใๆ ก้ชๆๅกๆฏๅฆๅฏๅจ
1.2ใๆๆไธไธๆฌก็ๆๅก
2ใๅฏๅจdriver
'''
from lib.tools import Tool
import subprocess
from lib.path import SYSTEMPATH, ERRORPATH
import time
from appium import webdriver
import queue
# ๅฃฐๆไธไธชpython้ๅ
driver_queue = queue.Queue()
class Controller(object):
def __init__(self):
# ่ทๅ้
็ฝฎไฟกๆฏ
self.config = Tool().get_config
self.tester = self.config.get('tester')
self.device_type = self.config.get('device_type')
# ่ทๅๅฐๆๆ็ๆๆบไฟกๆฏ
self.devices = self.config.get('devices')
self.device = self.devices.get(self.device_type)[0]
# port ็จไบๆ ก้ชๆๅกๆฏๅฆๅฏๅจ
self.port = self.device.get('port')
self.name = self.device.get('name')
def kill_server(self):
mac = '''ps -ef|grep appium|grep -v grep|grep %s|awk '{print "kill -9 " $2}'|sh''' % self.port
win = 'taskkill /F /IM node.exe /t'
subprocess.getoutput(win)
def start_server(self):
self.kill_server()
command = 'appium -a {ip} -p {port} -U {deviceName}'.format(ip=self.device.get('ip'),
port=self.device.get('port'),
deviceName=self.device.get('deviceName'))
print('command : %s' % command)
subprocess.Popen(command, stdout=open(SYSTEMPATH, 'a+'), stderr=open(ERRORPATH, 'a+'), shell=True)
def test_server(self):
# mac = 'ps -ef|grep appium|grep -v grep|grep %s' % self.port
win = 'netstat -ano | findstr %s' % self.port
time.sleep(3)
while True:
data = subprocess.getoutput(win)
if data:
time.sleep(10)
print('%s ็ซฏๅฃๅฏๅจๆๅใ' % self.port)
break
else:
print('%s ็ซฏๅฃๅฏๅจๅคฑ่ดฅใ5็งๅ้่ฏใ' % self.port)
time.sleep(5)
return True
def start_driver(self):
url = 'http://{ip}:{port}/wd/hub'.format(ip=self.device.get('ip'),
port=self.port)
# ๅๅนถๆๆบไฟกๆฏๅๅ
ๅๅ
ฅๅฃ
self.device.update(self.tester)
driver = webdriver.Remote(url, self.device)
driver_queue.put(driver)
if __name__ == '__main__':
controller = Controller()
controller.start_server()
if controller.test_server():
controller.start_driver()
| 30.417722
| 109
| 0.553891
| 279
| 2,403
| 4.677419
| 0.362007
| 0.084291
| 0.05977
| 0.026054
| 0.136398
| 0.110345
| 0.110345
| 0.110345
| 0.062835
| 0.062835
| 0
| 0.008526
| 0.316687
| 2,403
| 79
| 110
| 30.417722
| 0.786236
| 0.080732
| 0
| 0
| 0
| 0.019608
| 0.130692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098039
| false
| 0
| 0.117647
| 0
| 0.254902
| 0.078431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71fcdf4c5cb6b34edec79a08f7d295031300d28a
| 2,226
|
py
|
Python
|
pondSizes.py
|
passionzhan/LeetCode
|
c4d33b64b9da15ca7a9b0d41e645d86a697694fe
|
[
"MIT"
] | 1
|
2019-08-29T01:12:47.000Z
|
2019-08-29T01:12:47.000Z
|
pondSizes.py
|
passionzhan/LeetCode
|
c4d33b64b9da15ca7a9b0d41e645d86a697694fe
|
[
"MIT"
] | null | null | null |
pondSizes.py
|
passionzhan/LeetCode
|
c4d33b64b9da15ca7a9b0d41e645d86a697694fe
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
'''
@project : LeetCode
@File : pondSizes.py
@Contact : 9824373@qq.com
@Desc :
ไฝ ๆไธไธช็จไบ่กจ็คบไธ็ๅๅฐ็ๆดๆฐ็ฉ้ตland๏ผ่ฏฅ็ฉ้ตไธญๆฏไธช็น็ๅผไปฃ่กจๅฏนๅบๅฐ็น็ๆตทๆ้ซๅบฆใ่ฅๅผไธบ0ๅ่กจ็คบๆฐดๅใ็ฑๅ็ดใๆฐดๅนณๆๅฏน่ง่ฟๆฅ็ๆฐดๅไธบๆฑ ๅกใๆฑ ๅก็ๅคงๅฐๆฏๆ็ธ่ฟๆฅ็ๆฐดๅ็ไธชๆฐใ็ผๅไธไธชๆนๆณๆฅ่ฎก็ฎ็ฉ้ตไธญๆๆๆฑ ๅก็ๅคงๅฐ๏ผ่ฟๅๅผ้่ฆไปๅฐๅฐๅคงๆๅบใ
็คบไพ๏ผ
่พๅ
ฅ๏ผ
[
[0,2,1,0],
[0,1,0,1],
[1,1,0,1],
[0,1,0,1]
]
่พๅบ๏ผ [1,2,4]
ๆ็คบ๏ผ
0 < len(land) <= 1000
0 < len(land[i]) <= 1000
ๆฅๆบ๏ผๅๆฃ๏ผLeetCode๏ผ
้พๆฅ๏ผhttps://leetcode-cn.com/problems/pond-sizes-lcci
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2020-03-07 zhan 1.0 None
'''
from typing import List
from collections import deque
class Solution:
def pondSizes(self, land: List[List[int]]) -> List[int]:
def neighbors(iR,iC,flag):
ans = set()
if (iR-1,iC-1) in flag:
ans.add((iR-1,iC-1))
if (iR-1,iC) in flag:
ans.add((iR-1,iC))
if (iR-1,iC+1) in flag:
ans.add((iR-1,iC+1))
if (iR,iC-1) in flag:
ans.add((iR,iC-1))
if (iR, iC + 1) in flag:
ans.add((iR, iC + 1))
if (iR + 1, iC-1) in flag:
ans.add((iR + 1, iC-1))
if (iR + 1, iC) in flag:
ans.add((iR + 1, iC))
if (iR+1, iC + 1) in flag:
ans.add((iR+1, iC + 1))
return ans
flag = {(i,j) for j in range(len(land[0])) for i in range(len(land)) if land[i][j] == 0}
ans = []
while flag:
tmpArea = 0
mydueque = deque()
mydueque.append(flag.pop())
while mydueque:
curEle = mydueque.popleft()
tmpArea +=1
for neighbor in neighbors(curEle[0], curEle[1], flag):
mydueque.append(neighbor)
flag.remove(neighbor)
ans.append(tmpArea)
ans.sort()
return ans
if __name__ == '__main__':
a = [
[0,2,1,0],
[0,1,0,1],
[1,1,0,1],
[0,1,0,1]
]
ans = Solution().pondSizes(a)
print(ans)
| 25.011236
| 124
| 0.444295
| 288
| 2,226
| 3.40625
| 0.3125
| 0.036697
| 0.061162
| 0.04893
| 0.236493
| 0.236493
| 0.236493
| 0.236493
| 0.236493
| 0.236493
| 0
| 0.069373
| 0.391285
| 2,226
| 88
| 125
| 25.295455
| 0.654613
| 0.297844
| 0
| 0.042553
| 0
| 0
| 0.005148
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.042553
| 0
| 0.148936
| 0.021277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71fdcd33231ded5dc2f9d6d67f26d46eb50eca3d
| 5,990
|
py
|
Python
|
geolucidate/functions.py
|
kurtraschke/geolucidate
|
827195a90d972fa5efce5a03bdbe53d8395d94ba
|
[
"MIT"
] | 3
|
2015-09-17T01:01:53.000Z
|
2019-09-10T14:30:43.000Z
|
geolucidate/functions.py
|
kurtraschke/geolucidate
|
827195a90d972fa5efce5a03bdbe53d8395d94ba
|
[
"MIT"
] | null | null | null |
geolucidate/functions.py
|
kurtraschke/geolucidate
|
827195a90d972fa5efce5a03bdbe53d8395d94ba
|
[
"MIT"
] | 5
|
2018-09-11T21:54:36.000Z
|
2020-06-25T19:05:45.000Z
|
# -*- coding: utf-8 -*-
from decimal import Decimal, setcontext, ExtendedContext
from geolucidate.links.google import google_maps_link
from geolucidate.links.tools import MapLink
from geolucidate.parser import parser_re
setcontext(ExtendedContext)
def _cleanup(parts):
"""
Normalize up the parts matched by :obj:`parser.parser_re` to
degrees, minutes, and seconds.
>>> _cleanup({'latdir': 'south', 'longdir': 'west',
... 'latdeg':'60','latmin':'30',
... 'longdeg':'50','longmin':'40'})
['S', '60', '30', '00', 'W', '50', '40', '00']
>>> _cleanup({'latdir': 'south', 'longdir': 'west',
... 'latdeg':'60','latmin':'30', 'latdecsec':'.50',
... 'longdeg':'50','longmin':'40','longdecsec':'.90'})
['S', '60', '30.50', '00', 'W', '50', '40.90', '00']
"""
latdir = (parts['latdir'] or parts['latdir2']).upper()[0]
longdir = (parts['longdir'] or parts['longdir2']).upper()[0]
latdeg = parts.get('latdeg')
longdeg = parts.get('longdeg')
latmin = parts.get('latmin', '00') or '00'
longmin = parts.get('longmin', '00') or '00'
latdecsec = parts.get('latdecsec', '')
longdecsec = parts.get('longdecsec', '')
if (latdecsec and longdecsec):
latmin += latdecsec
longmin += longdecsec
latsec = '00'
longsec = '00'
else:
latsec = parts.get('latsec', '') or '00'
longsec = parts.get('longsec', '') or '00'
return [latdir, latdeg, latmin, latsec, longdir, longdeg, longmin, longsec]
def _convert(latdir, latdeg, latmin, latsec,
longdir, longdeg, longmin, longsec):
"""
Convert normalized degrees, minutes, and seconds to decimal degrees.
Quantize the converted value based on the input precision and
return a 2-tuple of strings.
>>> _convert('S','50','30','30','W','50','30','30')
('-50.508333', '-50.508333')
>>> _convert('N','50','27','55','W','127','27','65')
('50.459167', '-127.460833')
"""
if (latsec != '00' or longsec != '00'):
precision = Decimal('0.000001')
elif (latmin != '00' or longmin != '00'):
precision = Decimal('0.001')
else:
precision = Decimal('1')
latitude = Decimal(latdeg)
latmin = Decimal(latmin)
latsec = Decimal(latsec)
longitude = Decimal(longdeg)
longmin = Decimal(longmin)
longsec = Decimal(longsec)
if latsec > 59 or longsec > 59:
# Assume that 'seconds' greater than 59 are actually a decimal
# fraction of minutes
latitude += (latmin +
(latsec / Decimal('100'))) / Decimal('60')
longitude += (longmin +
(longsec / Decimal('100'))) / Decimal('60')
else:
latitude += (latmin +
(latsec / Decimal('60'))) / Decimal('60')
longitude += (longmin +
(longsec / Decimal('60'))) / Decimal('60')
if latdir == 'S':
latitude *= Decimal('-1')
if longdir == 'W':
longitude *= Decimal('-1')
lat_str = str(latitude.quantize(precision))
long_str = str(longitude.quantize(precision))
return (lat_str, long_str)
def replace(string, sub_function=google_maps_link()):
"""
Replace detected coordinates with a map link, using the given substitution
function.
The substitution function will be passed a :class:`~.MapLink` instance, and
should return a string which will be substituted by :func:`re.sub` in place
of the detected coordinates.
>>> replace("58147N/07720W")
'<a href="http://maps.google.com/maps?q=58.235278%2C-77.333333+%2858147N%2F07720W%29&ll=58.235278%2C-77.333333&t=h" title="58147N/07720W (58.235278, -77.333333)">58147N/07720W</a>'
>>> replace("5814N/07720W", google_maps_link('satellite'))
'<a href="http://maps.google.com/maps?q=58.233%2C-77.333+%285814N%2F07720W%29&ll=58.233%2C-77.333&t=k" title="5814N/07720W (58.233, -77.333)">5814N/07720W</a>'
>>> from geolucidate.links.bing import bing_maps_link
>>> replace("58N/077W", bing_maps_link('map'))
'<a href="http://bing.com/maps/default.aspx?style=r&cp=58~-77&sp=Point.58_-77_58N%2F077W&v=2" title="58N/077W (58, -77)">58N/077W</a>'
"""
def do_replace(match):
original_string = match.group()
(latitude, longitude) = _convert(*_cleanup(match.groupdict()))
return sub_function(MapLink(original_string, latitude, longitude))
return parser_re.sub(do_replace, string)
def get_replacements(string, sub_function=google_maps_link()):
"""
Return a dict whose keys are instances of :class:`re.Match` and
whose values are the corresponding replacements. Use
:func:`get_replacements` when the replacement cannot be performed
through ordinary string substitution by :func:`re.sub`, as in
:func:`replace`.
>>> get_replacements("4630 NORTH 5705 WEST 58147N/07720W")
... #doctest: +ELLIPSIS
{<re.Match object...>: '<a href="..." title="...">4630 NORTH 5705 WEST</a>', <re.Match object...>: '<a href="..." title="...">58147N/07720W</a>'}
>>> test_string = "4630 NORTH 5705 WEST 58147N/07720W"
>>> replacements = get_replacements(test_string)
>>> offset = 0
>>> out = bytearray(test_string, encoding="ascii", errors="replace")
>>> for (match, link) in replacements.items():
... start = match.start() + offset
... end = match.end() + offset
... out[start:end] = bytearray(link, encoding="ascii", errors="replace")
... offset += (len(link) - len(match.group()))
>>> out.decode(encoding="ascii") == replace(test_string)
True
"""
substitutions = {}
matches = parser_re.finditer(string)
for match in matches:
(latitude, longitude) = _convert(*_cleanup(match.groupdict()))
substitutions[match] = sub_function(MapLink(match.group(),
latitude, longitude))
return substitutions
| 35.235294
| 184
| 0.602337
| 723
| 5,990
| 4.923928
| 0.278008
| 0.017978
| 0.01573
| 0.014326
| 0.180337
| 0.164607
| 0.070787
| 0.070787
| 0.041573
| 0
| 0
| 0.092331
| 0.220701
| 5,990
| 169
| 185
| 35.443787
| 0.670308
| 0.478631
| 0
| 0.132353
| 0
| 0
| 0.051371
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073529
| false
| 0
| 0.058824
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71feb582dac7753ba6da1c6d33e4df9c56ad4249
| 17,900
|
py
|
Python
|
lingvo/tasks/asr/encoder.py
|
j-luo93/lingvo
|
7398974078391362f0c1b027164a8f33f88cf86b
|
[
"Apache-2.0"
] | 4
|
2019-01-08T02:59:38.000Z
|
2022-02-18T11:31:37.000Z
|
lingvo/tasks/asr/encoder.py
|
j-luo93/lingvo
|
7398974078391362f0c1b027164a8f33f88cf86b
|
[
"Apache-2.0"
] | null | null | null |
lingvo/tasks/asr/encoder.py
|
j-luo93/lingvo
|
7398974078391362f0c1b027164a8f33f88cf86b
|
[
"Apache-2.0"
] | 1
|
2019-07-02T14:09:42.000Z
|
2019-07-02T14:09:42.000Z
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encoders for the speech model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import range
from six.moves import zip
import tensorflow as tf
from tensorflow.python.ops import inplace_ops
from lingvo.core import base_encoder
from lingvo.core import base_layer
from lingvo.core import layers
from lingvo.core import plot
from lingvo.core import py_utils
from lingvo.core import rnn_cell
from lingvo.core import rnn_layers
from lingvo.core import summary_utils
from lingvo.core import model_helper
ConvLSTMBlock = collections.namedtuple('ConvLSTMBlock', ('rnn', 'cnn'))
class AsrEncoder(base_encoder.BaseEncoder):
"""Speech encoder version 1."""
@classmethod
def Params(cls):
"""Configs for AsrEncoder."""
p = super(AsrEncoder, cls).Params()
p.Define('lstm_tpl', rnn_cell.LSTMCellSimple.Params(),
'Configs template for the RNN layer.')
p.Define('cnn_tpl', layers.ConvLayer.Params(),
'Configs template for the conv layer.')
p.Define('proj_tpl', layers.ProjectionLayer.Params(),
'Configs template for the projection layer.')
p.Define(
'highway_skip', False,
'If set, residual connections from different layers are gated. '
'Will only be used if residual_start is enabled.')
p.Define('highway_skip_tpl', layers.HighwaySkipLayer.Params(),
'Configs template for the highway skip layer.')
p.Define('conv_lstm_tpl', rnn_cell.ConvLSTMCell.Params(),
'Configs template for ConvLSTMCell.')
p.Define(
'after_conv_lstm_cnn_tpl', layers.ConvLayer.Params(),
'Configs template for the cnn layer immediately follow the'
' convlstm layer.')
p.Define('conv_filter_shapes', None, 'Filter shapes for each conv layer.')
p.Define('conv_filter_strides', None, 'Filter strides for each conv layer.')
p.Define('input_shape', [None, None, None, None],
'Shape of the input. This should a TensorShape with rank 4.')
p.Define('lstm_cell_size', 256, 'LSTM cell size for the RNN layer.')
p.Define('num_cnn_layers', 2, 'Number of conv layers to create.')
p.Define('num_conv_lstm_layers', 1, 'Number of conv lstm layers to create.')
p.Define('num_lstm_layers', 3, 'Number of rnn layers to create')
p.Define('project_lstm_output', True,
'Include projection layer after each encoder LSTM layer.')
p.Define('pad_steps', 6,
'Extra zero-padded timesteps to add to the input sequence. ')
p.Define(
'residual_start', 0, 'Start residual connections from this lstm layer. '
'Disabled if 0 or greater than num_lstm_layers.')
p.Define('residual_stride', 1,
'Number of lstm layers to skip per residual connection.')
p.Define(
'bidi_rnn_type', 'func', 'Options: func, native_cudnn. '
'func: BidirectionalFRNN, '
'native_cudnn: BidirectionalNativeCuDNNLSTM.')
# TODO(yonghui): Maybe move those configs to a separate file.
# Set some reasonable default values.
#
# NOTE(yonghui): The default config below assumes the following encoder
# architecture:
#
# cnn/batch-norm/relu ->
# cnn/batch-norm/relu ->
# bidirectional conv-lstm ->
# cnn/batch-norm/relu
# bidirectional lstm ->
# projection/batch-norm/relu ->
# bidirectional lstm ->
# projection/batch-norm/relu ->
# bidirectional lstm
#
# Default config for the rnn layer.
p.lstm_tpl.params_init = py_utils.WeightInit.Uniform(0.1)
# Default config for the convolution layer.
p.input_shape = [None, None, 80, 3]
p.conv_filter_shapes = [(3, 3, 3, 32), (3, 3, 32, 32)]
p.conv_filter_strides = [(2, 2), (2, 2)]
p.cnn_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1)
# TODO(yonghui): Disable variational noise logic.
# NOTE(yonghui): Fortunately, variational noise logic is currently not
# implemented for ConvLayer yet (as of sep 22, 2016).
# Default config for the projection layer.
p.proj_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1)
# TODO(yonghui): Disable variational noise logic.
# NOTE(yonghui): Fortunately, variational noise logic is currently not
# implemented for ProjectionLayer yet (as of sep 22, 2016).
p.conv_lstm_tpl.filter_shape = [1, 3] # height (time), width (frequency)
p.conv_lstm_tpl.inputs_shape = [None, None, None, None]
p.conv_lstm_tpl.cell_shape = [None, None, None, None]
p.conv_lstm_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1)
p.after_conv_lstm_cnn_tpl.filter_shape = [3, 3, None, None]
p.after_conv_lstm_cnn_tpl.params_init = (
py_utils.WeightInit.TruncatedGaussian(0.1))
p.after_conv_lstm_cnn_tpl.filter_stride = [1, 1]
return p
@base_layer.initializer
def __init__(self, params):
super(AsrEncoder, self).__init__(params)
p = self.params
assert p.packed_input is False, ('Packed inputs are not yet supported for '
'AsrEncoder.')
name = p.name
with tf.variable_scope(name):
# First create the conv layers.
assert p.num_cnn_layers == len(p.conv_filter_shapes)
assert p.num_cnn_layers == len(p.conv_filter_strides)
params_conv_layers = []
for i in range(p.num_cnn_layers):
conv_p = p.cnn_tpl.Copy()
conv_p.name = 'conv_L%d' % (i)
conv_p.filter_shape = p.conv_filter_shapes[i]
conv_p.filter_stride = p.conv_filter_strides[i]
conv_p.is_eval = p.is_eval
params_conv_layers.append(conv_p)
self.CreateChildren('conv', params_conv_layers)
conv_output_shape = tf.TensorShape(p.input_shape)
for i in range(p.num_cnn_layers):
conv_output_shape = self.conv[i].OutShape(conv_output_shape)
conv_output_shape = conv_output_shape.as_list()
assert len(conv_output_shape) == 4 # batch, height, width, channel.
params_conv_lstm_rnn = []
params_conv_lstm_cnn = []
for i in range(p.num_conv_lstm_layers):
# NOTE(yonghui): We assume that output from ConvLSTMBlock has the same
# shape as its input.
_, _, width, in_channel = conv_output_shape
f_conv_lstm_p = p.conv_lstm_tpl.Copy()
f_conv_lstm_p.name = 'f_conv_lstm_%d' % (i)
f_conv_lstm_p.inputs_shape = [None, 1, width, in_channel]
f_conv_lstm_p.cell_shape = [None, 1, width, in_channel]
b_conv_lstm_p = f_conv_lstm_p.Copy()
b_conv_lstm_p.name = 'b_conv_lstm_%d' % (i)
conv_lstm_rnn_p = self.CreateConvLstmLayerParams()
conv_lstm_rnn_p.name = 'conv_lstm_rnn'
conv_lstm_rnn_p.fwd = f_conv_lstm_p
conv_lstm_rnn_p.bak = b_conv_lstm_p
params_conv_lstm_rnn.append(conv_lstm_rnn_p)
cnn_p = p.after_conv_lstm_cnn_tpl.Copy()
cnn_p.name = 'conv_lstm_cnn_%d' % (i)
cnn_p.filter_shape[2] = 2 * in_channel
cnn_p.filter_shape[3] = in_channel
params_conv_lstm_cnn.append(cnn_p)
# TODO(yonghui): Refactor ConvLSTMBlock into a layer.
self.CreateChildren('conv_lstm_rnn', params_conv_lstm_rnn)
self.CreateChildren('conv_lstm_cnn', params_conv_lstm_cnn)
(self._first_lstm_input_dim,
self._first_lstm_input_dim_pad) = self.FirstLstmLayerInputDimAndPadding(
conv_output_shape, pad_to_multiple=16)
# Now create all the rnn layers and projection layers.
# TODO(yonghui): take care of device placement.
params_rnn_layers = []
params_proj_layers = []
params_highway_skip_layers = []
for i in range(p.num_lstm_layers):
if i == 0:
input_dim = self._first_lstm_input_dim
else:
input_dim = 2 * p.lstm_cell_size
forward_p = p.lstm_tpl.Copy()
forward_p.name = 'fwd_rnn_L%d' % (i)
forward_p.num_input_nodes = input_dim
forward_p.num_output_nodes = p.lstm_cell_size
backward_p = forward_p.Copy()
backward_p.name = 'bak_rnn_L%d' % (i)
rnn_p = self.CreateBidirectionalRNNParams(forward_p, backward_p)
rnn_p.name = 'brnn_L%d' % (i)
params_rnn_layers.append(rnn_p)
if p.project_lstm_output and (i < p.num_lstm_layers - 1):
proj_p = p.proj_tpl.Copy()
proj_p.input_dim = 2 * p.lstm_cell_size
proj_p.output_dim = 2 * p.lstm_cell_size
proj_p.name = 'proj_L%d' % (i)
proj_p.is_eval = p.is_eval
params_proj_layers.append(proj_p)
# add the skip layers
residual_index = i - p.residual_start + 1
if p.residual_start > 0 and residual_index >= 0 and p.highway_skip:
highway_skip = p.highway_skip_tpl.Copy()
highway_skip.name = 'enc_hwskip_%d' % len(params_highway_skip_layers)
highway_skip.input_dim = 2 * p.lstm_cell_size
params_highway_skip_layers.append(highway_skip)
self.CreateChildren('rnn', params_rnn_layers)
self.CreateChildren('proj', params_proj_layers)
self.CreateChildren('highway_skip', params_highway_skip_layers)
@property
def _use_functional(self):
return True
def CreateBidirectionalRNNParams(self, forward_p, backward_p):
return model_helper.CreateBidirectionalRNNParams(self.params, forward_p,
backward_p)
def CreateConvLstmLayerParams(self):
return rnn_layers.BidirectionalFRNN.Params()
def FirstLstmLayerInputDimAndPadding(self,
conv_output_shape,
pad_to_multiple=16):
lstm_input_shape = conv_output_shape
# Makes sure the lstm input dims is multiple of 16 (alignment
# requirement from FRNN).
first_lstm_input_dim_unpadded = lstm_input_shape[2] * lstm_input_shape[3]
if self._use_functional and (first_lstm_input_dim_unpadded % pad_to_multiple
!= 0):
first_lstm_input_dim = int(
(first_lstm_input_dim_unpadded + pad_to_multiple - 1) /
pad_to_multiple) * pad_to_multiple
else:
first_lstm_input_dim = first_lstm_input_dim_unpadded
first_lstm_input_dim_padding = (
first_lstm_input_dim - first_lstm_input_dim_unpadded)
return first_lstm_input_dim, first_lstm_input_dim_padding
@property
def supports_streaming(self):
return False
def zero_state(self, batch_size):
return py_utils.NestedMap()
def FProp(self, theta, batch, state0=None):
"""Encodes source as represented by 'inputs' and 'paddings'.
Args:
theta: A NestedMap object containing weights' values of this
layer and its children layers.
batch: A NestedMap with fields:
src_inputs - The inputs tensor. It is expected to be of shape [batch,
time, feature_dim, channels].
paddings - The paddings tensor. It is expected to be of shape [batch,
time].
state0: Recurrent input state. Not supported/ignored by this encoder.
Returns:
(outputs, out_paddings, state1) tuple. Outputs is of the shape
[time, batch, depth], and out_paddings is of the shape [time, batch]
"""
p = self.params
inputs, paddings = batch.src_inputs, batch.paddings
with tf.name_scope(p.name):
# Add a few extra padded timesteps at the end. This is for ensuring the
# correctness of the conv-layers at the edges.
if p.pad_steps > 0:
# inplace_update() is not supported by TPU for now. Since we have done
# padding on the input_generator, we may avoid this additional padding.
assert not py_utils.use_tpu()
inputs_pad = tf.zeros(
inplace_ops.inplace_update(tf.shape(inputs), 1, p.pad_steps),
inputs.dtype)
paddings_pad = tf.ones(
inplace_ops.inplace_update(tf.shape(paddings), 1, p.pad_steps),
paddings.dtype)
inputs = tf.concat([inputs, inputs_pad], 1, name='inputs')
paddings = tf.concat([paddings, paddings_pad], 1)
def ReshapeForPlot(tensor, padding, name):
"""Transposes and flattens channels to [batch, dim, seq_len] shape."""
# Flatten any dimensions beyond the third into the third.
batch_size = tf.shape(tensor)[0]
max_len = tf.shape(tensor)[1]
plot_tensor = tf.reshape(tensor, [batch_size, max_len, -1])
plot_tensor = tf.transpose(plot_tensor, [0, 2, 1], name=name)
return (plot_tensor, summary_utils.SequenceLength(padding))
plots = [
ReshapeForPlot(
tf.transpose(inputs, [0, 1, 3, 2]), paddings, 'inputs')
]
conv_out = inputs
out_padding = paddings
for i, conv_layer in enumerate(self.conv):
conv_out, out_padding = conv_layer.FProp(theta.conv[i], conv_out,
out_padding)
plots.append(
ReshapeForPlot(
tf.transpose(conv_out, [0, 1, 3, 2]), out_padding,
'conv_%d_out' % i))
def TransposeFirstTwoDims(t):
first_dim = tf.shape(t)[0]
second_dim = tf.shape(t)[1]
t_new = tf.transpose(
tf.reshape(t, [first_dim, second_dim, -1]), [1, 0, 2])
t_shape_new = tf.concat([[second_dim], [first_dim], tf.shape(t)[2:]], 0)
return tf.reshape(t_new, t_shape_new)
# Now the conv-lstm part.
conv_lstm_out = conv_out
conv_lstm_out_padding = out_padding
for i, (rnn, cnn) in enumerate(
zip(self.conv_lstm_rnn, self.conv_lstm_cnn)):
conv_lstm_in = conv_lstm_out
# Move time dimension to be the first.
conv_lstm_in = TransposeFirstTwoDims(conv_lstm_in)
conv_lstm_in = tf.expand_dims(conv_lstm_in, 2)
conv_lstm_in_padding = tf.expand_dims(
tf.transpose(conv_lstm_out_padding), 2)
lstm_out = rnn.FProp(theta.conv_lstm_rnn[i], conv_lstm_in,
conv_lstm_in_padding)
# Move time dimension to be the second.
cnn_in = TransposeFirstTwoDims(lstm_out)
cnn_in = tf.squeeze(cnn_in, 2)
cnn_in_padding = conv_lstm_out_padding
cnn_out, cnn_out_padding = cnn.FProp(theta.conv_lstm_cnn[i], cnn_in,
cnn_in_padding)
conv_lstm_out, conv_lstm_out_padding = cnn_out, cnn_out_padding
plots.append(
ReshapeForPlot(conv_lstm_out, conv_lstm_out_padding,
'conv_lstm_%d_out' % i))
# Need to do a reshape before starting the rnn layers.
conv_lstm_out = py_utils.HasRank(conv_lstm_out, 4)
conv_lstm_out_shape = tf.shape(conv_lstm_out)
new_shape = tf.concat([conv_lstm_out_shape[:2], [-1]], 0)
conv_lstm_out = tf.reshape(conv_lstm_out, new_shape)
if self._first_lstm_input_dim_pad:
conv_lstm_out = tf.pad(
conv_lstm_out,
[[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]])
conv_lstm_out = py_utils.HasShape(conv_lstm_out,
[-1, -1, self._first_lstm_input_dim])
# Transpose to move the time dimension to be the first.
rnn_in = tf.transpose(conv_lstm_out, [1, 0, 2])
rnn_padding = tf.expand_dims(tf.transpose(conv_lstm_out_padding), 2)
# rnn_in is of shape [time, batch, depth]
# rnn_padding is of shape [time, batch, 1]
# Now the rnn layers.
num_skips = 0
for i in range(p.num_lstm_layers):
rnn_out = self.rnn[i].FProp(theta.rnn[i], rnn_in, rnn_padding)
residual_index = i - p.residual_start + 1
if p.residual_start > 0 and residual_index >= 0:
if residual_index % p.residual_stride == 0:
residual_in = rnn_in
if residual_index % p.residual_stride == p.residual_stride - 1:
# Highway skip connection.
if p.highway_skip:
rnn_out = self.highway_skip[num_skips].FProp(
theta.highway_skip[num_skips], residual_in, rnn_out)
num_skips += 1
else:
# Residual skip connection.
rnn_out += py_utils.HasShape(residual_in, tf.shape(rnn_out))
if p.project_lstm_output and (i < p.num_lstm_layers - 1):
# Projection layers.
rnn_out = self.proj[i].FProp(theta.proj[i], rnn_out, rnn_padding)
if i == p.num_lstm_layers - 1:
rnn_out *= (1.0 - rnn_padding)
plots.append(
ReshapeForPlot(
tf.transpose(rnn_out, [1, 0, 2]),
tf.transpose(rnn_padding, [1, 0, 2]), 'rnn_%d_out' % i))
rnn_in = rnn_out
final_out = rnn_in
if self.cluster.add_summary:
fig = plot.MatplotlibFigureSummary(
'encoder_example', figsize=(8, len(plots) * 3.5))
# Order layers from bottom to top.
plots.reverse()
for tensor, seq_len in plots:
fig.AddSubplot(
[tensor, seq_len],
summary_utils.TrimPaddingAndPlotSequence,
title=tensor.name,
xlabel='Time')
fig.Finalize()
rnn_padding = tf.squeeze(rnn_padding, [2])
return final_out, rnn_padding, py_utils.NestedMap()
| 42.517815
| 80
| 0.655587
| 2,493
| 17,900
| 4.431207
| 0.155235
| 0.055762
| 0.021906
| 0.026161
| 0.314112
| 0.24776
| 0.178148
| 0.151625
| 0.133158
| 0.087082
| 0
| 0.011995
| 0.250168
| 17,900
| 420
| 81
| 42.619048
| 0.811056
| 0.188547
| 0
| 0.084746
| 0
| 0
| 0.108894
| 0.003616
| 0
| 0
| 0
| 0.002381
| 0.016949
| 1
| 0.037288
| false
| 0
| 0.057627
| 0.016949
| 0.132203
| 0.00339
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71feffbb5e24e7f37afedbf05e8ccd4bc8d2a4ea
| 1,898
|
py
|
Python
|
pos_neg_graph/graph_ratio.py
|
Yudabin/Review_Project
|
b924199d6845defeb4cd243a99426070c014d8d8
|
[
"MIT"
] | null | null | null |
pos_neg_graph/graph_ratio.py
|
Yudabin/Review_Project
|
b924199d6845defeb4cd243a99426070c014d8d8
|
[
"MIT"
] | null | null | null |
pos_neg_graph/graph_ratio.py
|
Yudabin/Review_Project
|
b924199d6845defeb4cd243a99426070c014d8d8
|
[
"MIT"
] | 1
|
2020-11-10T00:54:45.000Z
|
2020-11-10T00:54:45.000Z
|
import matplotlib.font_manager as fm
import matplotlib.pyplot as plt
import numpy as np
font_location = './wordcloud_file/malgun.ttf' # For Windows
font_name = fm.FontProperties(fname=font_location).get_name()
plt.rc('font', family=font_name)
def percent_graph2(movie_review) :
b = movie_review
labelss = sorted(b['score'].unique())## ๋ผ๋ฒจ์ค์ ํจ. ํ๊ธ์ด ์ ์ฉ์ด ์๋จ!!!
c = b['score'].value_counts().sort_index() ## ๋น๋
print(c)
print(labelss)
fig = plt.figure(figsize=(8,8)) ## ์บ๋ฒ์ค ์์ฑ
fig.set_facecolor('white') ## ์บ๋ฒ์ค ๋ฐฐ๊ฒฝ์์ ํ์์์ผ๋ก ์ค์
ax = fig.add_subplot() ## ํ๋ ์ ์์ฑ
pie = ax.pie(c, ## ํ์ด์ฐจํธ ์ถ๋ ฅ
startangle=90, ## ์์์ ์ 90๋(degree)๋ก ์ง์
counterclock=False, ## ์๊ณ ๋ฐฉํฅ์ผ๋ก ๊ทธ๋ฆฐ๋ค.
# autopct=lambda p : '{:.2f}%'.format(p), ## ํผ์ผํฐ์ง ์ถ๋ ฅ
wedgeprops=dict(width=0.5),
colors = ['yellowgreen', 'orange'],
labels = labelss,
textprops={'fontsize': 22}
)
total = np.sum(c) ## ๋น๋์ ์ดํฉ
sum_pct = 0 ## ๋ฐฑ๋ถ์จ ์ด๊ธฐ๊ฐ
for i, l in enumerate(labelss):
ang1, ang2 = pie[0][i].theta1, pie[0][i].theta2 ## ๊ฐ1, ๊ฐ2
r = pie[0][i].r ## ์์ ๋ฐ์ง๋ฆ
x = ((r + 0.5) / 2) * np.cos(np.pi / 180 * ((ang1 + ang2) / 2)) ## ์ ์ค์ x์ขํ
y = ((r + 0.5) / 2) * np.sin(np.pi / 180 * ((ang1 + ang2) / 2)) ## ์ ์ค์ y์ขํ
if i < len(labelss) - 1:
sum_pct += float(f'{c[i] / total * 100:.2f}') ## ๋ฐฑ๋ถ์จ์ ๋์ ํ๋ค.
ax.text(x, y, f'{c[i] / total * 100:.2f}%', ha='center', va='center', size=22, color='white',
weight='bold') ## ๋ฐฑ๋ถ์จ ํ
์คํธ ํ์
else: ## ์ดํฉ์ 100์ผ๋ก ๋ง์ถ๊ธฐ์ํด ๋ง์ง๋ง ๋ฐฑ๋ถ์จ์ 100์์ ๋ฐฑ๋ถ์จ ๋์ ๊ฐ์ ๋นผ์ค๋ค.
ax.text(x, y, f'{100 - sum_pct:.2f}%', ha='center', va='center',size=22,color='white',
weight='bold')
# pie.rc('font', family=font_name)
# plt.legend(pie[0], labelss) ## ๋ฒ๋ก ํ์
plt.savefig('./static/images/pos_neg_ratio.png') # ๊ฒฝ๋ก
| 42.177778
| 105
| 0.553741
| 284
| 1,898
| 3.630282
| 0.56338
| 0.015519
| 0.014549
| 0.031038
| 0.212415
| 0.14549
| 0.122211
| 0.085354
| 0.085354
| 0.085354
| 0
| 0.045812
| 0.263962
| 1,898
| 45
| 106
| 42.177778
| 0.692198
| 0.18019
| 0
| 0.052632
| 0
| 0
| 0.142102
| 0.039656
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0.078947
| 0
| 0.105263
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
71ffb4e7bdba22327963714071779d2e8b20d391
| 3,727
|
py
|
Python
|
my_area/views.py
|
Vincent-Juma/area_master
|
3ea1dd1039053fb4de6326deb967383d09d7145b
|
[
"MIT"
] | 1
|
2021-05-28T14:16:54.000Z
|
2021-05-28T14:16:54.000Z
|
my_area/views.py
|
Vincent-Juma/area_master
|
3ea1dd1039053fb4de6326deb967383d09d7145b
|
[
"MIT"
] | null | null | null |
my_area/views.py
|
Vincent-Juma/area_master
|
3ea1dd1039053fb4de6326deb967383d09d7145b
|
[
"MIT"
] | 1
|
2021-04-13T09:14:07.000Z
|
2021-04-13T09:14:07.000Z
|
from django.shortcuts import render
from .forms import *
from django.shortcuts import redirect,get_object_or_404
from django.contrib.auth.decorators import login_required
from . models import *
from django.views import generic
@login_required(login_url='/accounts/login/')
def home(request):
mylocs = Myloc.objects.all()
return render(request, 'home.html',{"mylocs":mylocs,})
@login_required(login_url='accounts/login/')
def add_profile(request):
current_user = request.user
profile = Profile.objects.filter(id = current_user.id)
if request.method == 'POST':
form = NewProfileForm(request.POST, request.FILES)
if form.is_valid():
caption = form.save(commit=False)
caption.user = current_user
caption.save()
return redirect('myprofile')
else:
form = NewProfileForm()
return render(request, 'edit.html', {"form":form})
@login_required(login_url='accounts/login/')
def my_profile(request):
current_user = request.user
my_my_area = Myloc.objects.filter(user = current_user)
my_profile = Profile.objects.filter(user = current_user).first
return render(request, 'profile.html', {"my_my_areas":my_my_areas, "my_profile":my_profile})
@login_required(login_url='/accounts/login/')
def addmy_area(request):
current_user = request.user
if request.method == 'POST':
form = MylocForm(request.POST, request.FILES)
if form.is_valid():
image = form.save(commit=False)
image.user = current_user
image.save()
return redirect('home')
else:
form = MylocForm()
return render(request, 'addmy_area.html', {"form": form})
def myloc_details(request,myloc_id):
activities=Activity.objects.filter(myloc=myloc_id)
posts=Post.objects.filter(myloc=myloc_id)
myloc=Myloc.objects.get(pk=myloc_id)
return render(request,'details.html',{'myloc':myloc,'activities':activities,'posts':posts})
@login_required(login_url="/accounts/login/")
def new_activity(request,pk):
current_user = request.user
myloc = get_object_or_404(Myloc,pk=pk)
if request.method == 'POST':
activity_form = NewActivityForm(request.POST, request.FILES)
if activity_form.is_valid():
activity = activity_form.save(commit=False)
activity.user = current_user
activity.myloc=myloc
activity.save()
return redirect('detail', myloc_id=myloc.id)
else:
activity_form = NewActivityForm()
return render(request, 'new_activity.html', {"form": activity_form,'myloc':myloc})
@login_required(login_url="/accounts/login/")
def new_post(request,pk):
current_user = request.user
myloc = get_object_or_404(Myloc,pk=pk)
if request.method == 'POST':
post_form = NewPostForm(request.POST, request.FILES)
if post_form.is_valid():
post = post_form.save(commit=False)
post.user = current_user
post.myloc=myloc
post.save()
return redirect('detail', myloc_id=myloc.id)
else:
post_form = NewPostForm()
return render(request, 'new_post.html', {"form": post_form,'myloc':myloc})
@login_required(login_url='/accounts/login/')
def search_project(request):
if 'project_name' in request.GET and request.GET["project_name"]:
search_term = request.GET.get("project_name")
searched_project = Myloc.search_by_location(search_term)
message = f"{search_term}"
return render(request, "search.html",{"message":message,"project": searched_project})
else:
message = "No search history"
return render(request, 'search.html',{"message":message})
| 37.646465
| 96
| 0.676952
| 465
| 3,727
| 5.243011
| 0.172043
| 0.054143
| 0.070139
| 0.060295
| 0.407711
| 0.312961
| 0.283429
| 0.187449
| 0.140279
| 0.105824
| 0
| 0.003007
| 0.196941
| 3,727
| 98
| 97
| 38.030612
| 0.81156
| 0
| 0
| 0.306818
| 0
| 0
| 0.112721
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.068182
| 0
| 0.306818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9c01e95549f9ef77973f439bfde72aea99322f8e
| 20,840
|
py
|
Python
|
scripts/data/topple_dataset.py
|
davrempe/predicting-physical-dynamics
|
b0abb385a7ac491e25d1df0b9a9a943621fc2d37
|
[
"MIT"
] | 16
|
2020-02-29T06:44:16.000Z
|
2022-02-20T13:05:12.000Z
|
scripts/data/topple_dataset.py
|
davrempe/predicting-physical-dynamics
|
b0abb385a7ac491e25d1df0b9a9a943621fc2d37
|
[
"MIT"
] | 6
|
2020-02-13T08:09:28.000Z
|
2022-02-09T23:35:55.000Z
|
scripts/data/topple_dataset.py
|
davrempe/predicting-physical-dynamics
|
b0abb385a7ac491e25d1df0b9a9a943621fc2d37
|
[
"MIT"
] | 4
|
2020-04-22T09:46:55.000Z
|
2021-04-15T06:17:48.000Z
|
import numpy as np
import pickle
from os.path import exists, realpath
import sys
import math
from topple_data_loader import ToppleData, ToppleDataLoader
import transforms3d
class ToppleNormalizationInfo():
'''
Structure to hold all the normalization information for a dataset.
'''
def __init__(self):
# max element of any linear vel vector
self.max_lin_vel = None
# max element of any angular vel vector
self.max_ang_vel = None
# max distance between positions in two contiguous timesteps
self.max_pos = None
# max change in rotation around any axis between two contiguous timesteps (for euler rot)
self.max_rot = None
# max angle of rotation between two steps for axis-angle representation
self.max_delta_rot = None
# max 2-norm of applied impulse vector
self.force_vec_max = None
# max 2-norm of a point in an object point cloud (used for point cloud and force pos)
self.pc_max = None
# normalization values for shape-related stuff
self.density_offset = None
self.density_max = None
self.mass_offset = None
self.mass_max = None
self.inertia_offset = None
self.inertia_max = None
self.friction_offset = None
self.friction_max = None
def print_out(self):
print({'max_lin_vel' : self.max_lin_vel, 'max_ang_vel' : self.max_ang_vel, 'max_pos' : self.max_pos, \
'max_rot' : self.max_rot, 'max_delta_rot' : self.max_delta_rot, 'force_vec_max' : self.force_vec_max, 'pc_max' : self.pc_max, \
'density_off' : self.density_offset, 'density_max' : self.density_max, 'mass_off' : self.mass_offset, \
'mass_max' : self.mass_max, 'inertia_off' : self.inertia_offset, 'inertia_max' : self.inertia_max, \
'friction_off' : self.friction_offset, 'friction_max' : self.friction_max
})
def save(self, pkl_file):
''' Saves normalization info object to a specified .pkl file. '''
with open(pkl_file, 'wb') as f:
pickle.dump(self, f)
def load_from(self, pkl_file):
''' Load normalization info into this object from a specified .pkl file. '''
with open(pkl_file, 'rb') as f:
norm_info = pickle.load(f)
self.copy_from(norm_info)
def copy_from(self, norm_info):
'''
Takes values from the given normalization info object and copies them to this one
'''
self.max_lin_vel = norm_info.max_lin_vel
self.max_ang_vel = norm_info.max_ang_vel
self.max_pos = norm_info.max_pos
self.max_rot = norm_info.max_rot
try:
self.max_delta_rot = norm_info.max_delta_rot
except:
# old versions of data doesn't have max delta rot
pass
self.force_vec_max = norm_info.force_vec_max
self.pc_max = norm_info.pc_max
self.density_offset = norm_info.density_offset
self.density_max = norm_info.density_max
self.mass_offset = norm_info.mass_offset
self.mass_max = norm_info.mass_max
self.inertia_offset = norm_info.inertia_offset
self.inertia_max = norm_info.inertia_max
try:
self.friction_offset = norm_info.friction_offset
self.friction_max = norm_info.friction_max
except:
# old version doesn't have this
pass
class ToppleBatch(object):
'''
Structure to hold a single batch of data.
'''
def __init__(self, size, seq_len, num_pts):
self.size = size
self.num_steps = seq_len
self.num_pts = num_pts
self.point_cloud = np.zeros((self.size, self.num_pts, 3))
self.lin_vel = np.zeros((self.size, self.num_steps, 3))
self.ang_vel = np.zeros((self.size, self.num_steps, 3))
self.pos = np.zeros((self.size, self.num_steps, 3))
# cummulative euler angles
self.rot = np.zeros((self.size, self.num_steps, 3))
# change in rotation in quaternion rep (w, x, y, z)
self.delta_quat = np.zeros((self.size, self.num_steps, 4))
# change in rotation between steps in axis-angle rep (scaled 3 vec)
self.delta_rot = np.zeros((self.size, self.num_steps, 3))
# change in rotation between steps in split axis-angle rep (4-vec)
self.delta_rot_split = np.zeros((self.size, self.num_steps, 4))
# 0 if before topple idx, 1 if after
self.topple_label = np.zeros((self.size, self.num_steps), dtype=int)
# other meta-data not directly used in network
self.toppled = []
self.shape_name = []
self.body_friction = np.zeros((self.size))
self.mass = np.zeros((self.size))
self.scale = np.zeros((self.size, 3))
self.rot_euler = np.zeros((self.size, self.num_steps, 3))
class ToppleDataset(object):
'''
Loads toppling data and provides batches for training and model evaluation.
'''
def __init__(self, roots, norm_info_file, batch_size=32, num_steps=15, shuffle=False, num_pts=None, perturb_pts=0.0):
'''
- roots : list of directories containing data to load for this dataset
- norm_info_file : .pkl file containing normalization information
- batch_size : number of sequences to return in each batch
- num_steps : number of timesteps to return in each sequence
- shuffle : randomly shuffles the returned sequence ordering
- num_pts : the number of points to use in the returned point cloud. If None uses all points in the data.
- perturb_pts : the stdev to randomly perturb point clouds with. If None no perturbation is performed.
-
'''
# settings
self.batch_size = batch_size
self.steps_per_seq = num_steps
self.shuffle = shuffle
self.perturb_std = perturb_pts
self.num_pts = num_pts
# load in data
for root in roots:
if not exists(root):
print('Could not find dataset at ' + root)
return
data_loader = ToppleDataLoader()
self.data = data_loader.load_data(roots)
if num_pts is None:
# use all the points in the point cloud
self.num_pts = self.data.point_cloud.shape[1]
# load in normalization info
if not exists(norm_info_file):
print('Could not find normalization info at ' + norm_info_file)
return
self.norm_info = ToppleNormalizationInfo()
self.norm_info.load_from(norm_info_file)
print('Loaded normalization info!')
# see if we have axis-angle info (for backwards compat)
self.use_aa = False
self.use_aa_split = False
self.use_topple_idx = False
self.use_delta_quat = False
if len(self.data.delta_rot) > 0:
self.use_aa = True
if len(self.data.delta_rot_split) > 0:
self.use_aa_split = True
if len(self.data.topple_idx) > 0:
self.use_topple_idx = True
if len(self.data.body_friction) > 0:
self.use_body_friction = True
if len(self.data.delta_quat) > 0:
self.use_delta_quat = True
# normalize the data
print('Normalizing data...')
self.normalize_data(self.data, self.norm_info)
print('Finished normalizing!')
# order to iterate through data when returning batches (in order by default)
self.iter_inds = range(0, self.data.size)
# prepare to iterate through
self.reset()
def normalize_data(self, data, norm_info):
'''
Normalizes (in place) the given ToppleData using the ToppleNormalizationInfo.
'''
# point clouds -> [-1, 1]
data.point_cloud /= norm_info.pc_max
# force pos -> [-1, 1]
data.force_pos /= norm_info.pc_max
# force vec -> [-1, 1]
data.force_vec /= norm_info.force_vec_max
# density -> [0, 1]
data.density = (data.density - norm_info.density_offset) / norm_info.density_max
# mass -> [0, 1]
data.mass = (data.mass - norm_info.mass_offset) / norm_info.mass_max
# inertia -> [0, 1]
data.inertia = (data.inertia - norm_info.inertia_offset) / norm_info.inertia_max
# friction -> [0, 1]
if norm_info.friction_offset is not None:
data.body_friction = (data.body_friction - norm_info.friction_offset) / norm_info.friction_max
# now time sequence data
# velocities -> [-1, 1]
for i, lin_vel_steps in enumerate(data.lin_vel):
data.lin_vel[i] = [(x / norm_info.max_lin_vel) for x in lin_vel_steps]
for i, ang_vel_steps in enumerate(data.ang_vel):
data.ang_vel[i] = [(x / norm_info.max_ang_vel) for x in ang_vel_steps]
# delta position -> [-1, 1]
for i, pos_steps in enumerate(data.pos):
data.pos[i] = [(x / norm_info.max_pos) for x in pos_steps]
# delta rotation -> [-1, 1]
for i, rot_steps in enumerate(data.total_rot):
data.total_rot[i] = [(x / norm_info.max_rot) for x in rot_steps]
# delta rot axis-angle -> [-1, 1] norm
if self.use_aa:
for i, delta_rot_steps in enumerate(data.delta_rot):
data.delta_rot[i] = [(x / norm_info.max_delta_rot) for x in delta_rot_steps]
# make axes unit and and normalize angle -> [-1, 1]
if self.use_aa_split:
for i, delta_rot_split_steps in enumerate(data.delta_rot_split):
data.delta_rot_split[i] = [np.append(x[:3] / np.linalg.norm(x[:3]), x[3] / norm_info.max_delta_rot) for x in delta_rot_split_steps]
def reset(self):
'''
Prepares to iterate through dataset.
'''
if self.shuffle:
np.random.shuffle(self.iter_inds)
# we consider an epoch as returning one sequence from every single simulation
# ( though if the sequence length is shorter than sim length the unique sequences contained
# in the dataset will be much more than an epoch length )
self.num_batches = (self.data.size + self.batch_size - 1) // self.batch_size
self.batch_idx = 0
def has_next_batch(self):
'''
Returns false if done with the current "epoch" (seen each sim once).
'''
return self.batch_idx < self.num_batches
def next_batch(self, random_window=True, focus_toppling=False):
'''
Returns the next batch of data. if random_window=True will get a random sequence of correct length (otherwise
starts at 0). If focus_toppling=True, will make sure this sequence includes the part of the sequence where toppling occurs.
'''
# size is either batch_size, or shorter if we're at the end of the data
start_idx = self.batch_idx * self.batch_size
end_idx = min((self.batch_idx + 1) * self.batch_size, self.data.size)
batch_size = end_idx - start_idx
# get batch data
batch = ToppleBatch(self.batch_size, self.steps_per_seq, self.num_pts)
for i in range(batch_size):
pc, lin_vel, ang_vel, pos, rot, delta_quat, delta_rot, delta_rot_split, topple_label, meta_info = \
self.get_seq(self.iter_inds[start_idx + i], self.steps_per_seq, random_window, focus_toppling)
batch.point_cloud[i] = pc
batch.lin_vel[i] = lin_vel
batch.ang_vel[i] = ang_vel
batch.pos[i] = pos
batch.rot[i] = rot
if self.use_delta_quat:
batch.delta_quat[i] = delta_quat
if self.use_aa:
batch.delta_rot[i] = delta_rot
if self.use_aa_split:
batch.delta_rot_split[i] = delta_rot_split
if self.use_topple_idx:
batch.topple_label[i] = topple_label
batch.toppled.append(meta_info[0])
batch.shape_name.append(meta_info[1])
batch.scale[i] = meta_info[2]
batch.rot_euler[i] = meta_info[3]
if self.use_body_friction:
batch.body_friction[i] = meta_info[4]
batch.mass[i] = meta_info[5]
if batch_size != self.batch_size:
# need to pad the end with repeat of data
for i in range(self.batch_size - batch_size):
batch.point_cloud[batch_size + i] = batch.point_cloud[i]
batch.lin_vel[batch_size + i] = batch.lin_vel[i]
batch.ang_vel[batch_size + i] = batch.ang_vel[i]
batch.pos[batch_size + i] = batch.pos[i]
batch.rot[batch_size + i] = batch.rot[i]
if self.use_delta_quat:
batch.delta_quat[batch_size + i] = batch.delta_quat[i]
batch.toppled.append(batch.toppled[i])
batch.shape_name.append(batch.shape_name[i])
batch.scale[batch_size + i] = batch.scale[i]
batch.rot_euler[batch_size + i] = batch.rot_euler[i]
batch.mass[batch_size + i] = batch.mass[i]
if self.use_aa:
batch.delta_rot[batch_size + i] = batch.delta_rot[i]
if self.use_aa_split:
batch.delta_rot_split[batch_size + i] = batch.delta_rot_split[i]
if self.use_topple_idx:
batch.topple_label[batch_size + i] = batch.topple_label[i]
if self.use_body_friction:
batch.body_friction[batch_size + i] = batch.body_friction[i]
self.batch_idx += 1
return batch
def get_seq(self, idx, num_steps, random_window=True, focus_toppling=False):
'''
Returns a random contiguous sequence from the simulation at the given idx and length num_steps.
If num_steps > sim_length the final (sim_length-num_steps) steps are padded with the value at
sim[sim_length].
'''
# get the normalized canonical point cloud for this simulation
pc = np.copy(self.data.point_cloud[self.data.shape_idx[idx]])
scale = self.data.scale[idx]
# scale accordingly
pc *= np.reshape(scale, (1, -1))
# randomly perturb point cloud
pc += np.random.normal(0.0, self.perturb_std, pc.shape)
# randomly draw a subset of points if desired
if self.num_pts < pc.shape[0]:
pc_inds = np.random.choice(pc.shape[0], self.num_pts, replace=False)
pc = pc[pc_inds, :]
# randomly choose a size num_steps sequence from the simulation to return time-series data
total_steps = len(self.data.lin_vel[idx])
max_start_step = total_steps - num_steps
start_step = 0
if max_start_step < 0:
# simulation is shorter than desired sequence length
pad_len = abs(max_start_step)
lin_vel_list = self.data.lin_vel[idx]
lin_vel_out = np.array(lin_vel_list + [lin_vel_list[-1]]*pad_len)
ang_vel_list = self.data.ang_vel[idx]
ang_vel_out = np.array(ang_vel_list + [ang_vel_list[-1]]*pad_len)
pos_list = self.data.pos[idx]
pos_out = np.array(pos_list + [pos_list[-1]]*pad_len)
rot_list = self.data.total_rot[idx]
rot_out = np.array(rot_list + [rot_list[-1]]*pad_len)
if self.use_delta_quat:
delta_quat_list = self.data.delta_quat[idx]
delta_quat_out = np.array(delta_quat_list + [delta_quat_list[-1]]*pad_len)
euler_rot_list = self.data.rot_euler[idx]
euler_rot_out = np.array(euler_rot_list + [euler_rot_list[-1]]*pad_len)
if self.use_aa:
delta_rot_list = self.data.delta_rot[idx]
delta_rot_out = np.array(delta_rot_list + [delta_rot_list[-1]]*pad_len)
if self.use_aa_split:
delta_rot_split_list = self.data.delta_rot_split[idx]
delta_rot_split_out = np.array(delta_rot_split_list + [delta_rot_split_list[-1]]*pad_len)
if self.use_topple_idx:
topple_label_out = np.zeros((total_steps + pad_len), dtype=int)
seq_topple_idx = self.data.topple_idx[idx]
if seq_topple_idx > 0:
topple_label_out[seq_topple_idx:] = 1
else:
start_step = 0
if random_window:
if focus_toppling and self.data.toppled[idx]:
# choose window around the index where it topples
topple_idx = self.data.topple_idx[idx]
min_idx = max([topple_idx - num_steps + 1, 0])
if min_idx >= max_start_step:
# just pick the max index
start_step = max_start_step
else:
# our window is guaranteed to see some part of toppling
start_step = np.random.randint(min_idx, max_start_step+1)
else:
start_step = np.random.randint(0, max_start_step+1)
end_step = start_step + num_steps
# print('Range: %d, %d' % (start_step, end_step))
lin_vel_out = np.array(self.data.lin_vel[idx][start_step:end_step])
ang_vel_out = np.array(self.data.ang_vel[idx][start_step:end_step])
pos_out = np.array(self.data.pos[idx][start_step:end_step])
rot_out = np.array(self.data.total_rot[idx][start_step:end_step])
if self.use_delta_quat:
delta_quat_out = np.array(self.data.delta_quat[idx][start_step:end_step])
euler_rot_out = np.array(self.data.rot_euler[idx][start_step:end_step])
if self.use_aa:
delta_rot_out = np.array(self.data.delta_rot[idx][start_step:end_step])
if self.use_aa_split:
delta_rot_split_out = np.array(self.data.delta_rot_split[idx][start_step:end_step])
if self.use_topple_idx:
topple_label_out = np.zeros((num_steps), dtype=int)
seq_topple_idx = self.data.topple_idx[idx]
if seq_topple_idx > 0:
if seq_topple_idx <= start_step:
topple_label_out[:] = 1
elif seq_topple_idx < end_step:
topple_label_out[seq_topple_idx-start_step:] = 1
# rotate point cloud to align with first frame of sequence
init_rot = self.data.rot_euler[idx][start_step]
xrot, yrot, zrot = np.radians(init_rot)
R = transforms3d.euler.euler2mat(zrot, xrot, yrot, axes='szxy') # unity applies euler angles in z, x, y ordering
pc = np.dot(pc, R.T)
toppled = self.data.toppled[idx]
shape_name = self.data.shape_name[idx]
mass = self.data.mass[idx]
body_fric = -1.0
if self.use_body_friction:
body_fric = self.data.body_friction[idx]
meta_info = (toppled, shape_name, scale, euler_rot_out, body_fric, mass)
if not self.use_aa:
delta_rot_out = None
if not self.use_aa_split:
delta_rot_split_out = None
if not self.use_topple_idx:
topple_label_out = None
if not self.use_delta_quat:
delta_quat_out = None
return pc, lin_vel_out, ang_vel_out, pos_out, rot_out, delta_quat_out, delta_rot_out, delta_rot_split_out, topple_label_out, meta_info
def get_norm_info(self):
return self.norm_info
if __name__=='__main__':
# norm_info = ToppleNormalizationInfo()
# norm_info.load_from('../../data/sim/normalization_info/cube_train.pkl')
# norm_info.print_out()
topple_data = ToppleDataset(roots=['./data/sim/Cube/Cube30k_ObjSplit/Cube30kVal'], norm_info_file='./data/sim/normalization_info/cube_30k.pkl', \
batch_size=5, num_steps=10, shuffle=True, num_pts=None, perturb_pts=0.01)
count = 0
while topple_data.has_next_batch():
batch = topple_data.next_batch(random_window=True, focus_toppling=False)
count += 1
# print(batch.lin_vel[0])
# print(batch.toppled[0])
# print(batch.delta_rot_split[0])
# print(batch.delta_rot[0])
# print(batch.topple_label[0])
# print(batch.pos)
# print(batch.body_friction)
# print(batch.delta_quat[0])
# print(np.degrees(2*np.arccos(batch.delta_quat[0, :, 0])))
print('Total num batches: ' + str(count))
topple_data.reset()
count = 0
while topple_data.has_next_batch():
batch = topple_data.next_batch()
count += 1
print(batch.size)
print('Total num batches: ' + str(count))
| 45.010799
| 149
| 0.613964
| 2,952
| 20,840
| 4.080285
| 0.115515
| 0.033873
| 0.022665
| 0.016189
| 0.316231
| 0.19917
| 0.134745
| 0.102864
| 0.061768
| 0.044168
| 0
| 0.008406
| 0.292131
| 20,840
| 462
| 150
| 45.108225
| 0.808094
| 0.204894
| 0
| 0.155844
| 0
| 0
| 0.025924
| 0.005247
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042208
| false
| 0.006494
| 0.022727
| 0.003247
| 0.094156
| 0.032468
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9c029add7c4d9a1a7902d4f882039de120a387b3
| 13,193
|
py
|
Python
|
scripts/sct_apply_transfo.py
|
YangHee-Min/spinalcordtoolbox
|
38ca15aa99b03ca99b7885ddc98adf2755adc43d
|
[
"MIT"
] | null | null | null |
scripts/sct_apply_transfo.py
|
YangHee-Min/spinalcordtoolbox
|
38ca15aa99b03ca99b7885ddc98adf2755adc43d
|
[
"MIT"
] | null | null | null |
scripts/sct_apply_transfo.py
|
YangHee-Min/spinalcordtoolbox
|
38ca15aa99b03ca99b7885ddc98adf2755adc43d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#########################################################################################
#
# Apply transformations. This function is a wrapper for sct_WarpImageMultiTransform
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: Julien Cohen-Adad, Olivier Comtois
# Modified: 2014-07-20
#
# About the license: see the file LICENSE.TXT
#########################################################################################
# TODO: display message at the end
# TODO: interpolation methods
from __future__ import division, absolute_import
import sys, io, os, time, functools
from msct_parser import Parser
import sct_utils as sct
import sct_convert
import sct_image
import spinalcordtoolbox.image as msct_image
from sct_crop_image import ImageCropper
class Param:
def __init__(self):
self.verbose = '1'
self.remove_temp_files = '1'
# PARSER
# ==========================================================================================
def get_parser():
# parser initialisation
parser = Parser(__file__)
parser.usage.set_description('Apply transformations. This function is a wrapper for antsApplyTransforms (ANTs).')
parser.add_option(name="-i",
type_value="file",
description="input image",
mandatory=True,
example="t2.nii.gz")
parser.add_option(name="-d",
type_value="file",
description="destination image",
mandatory=True,
example="out.nii.gz")
parser.add_option(name="-w",
type_value=[[','], "file"],
description="Transformation, which can be a warping field (nifti image) or an affine transformation matrix (text file).",
mandatory=True,
example="warp1.nii.gz,warp2.nii.gz")
parser.add_option(name="-crop",
type_value="multiple_choice",
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
default_value='0',
example=['0', '1', '2'])
parser.add_option(name="-c",
type_value=None,
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
deprecated_by='-crop')
parser.add_option(name="-o",
type_value="file_output",
description="registered source.",
mandatory=False,
default_value='',
example="dest.nii.gz")
parser.add_option(name="-x",
type_value="multiple_choice",
description="interpolation method",
mandatory=False,
default_value='spline',
example=['nn', 'linear', 'spline'])
parser.add_option(name="-r",
type_value="multiple_choice",
description="""Remove temporary files.""",
mandatory=False,
default_value='1',
example=['0', '1'])
parser.add_option(name="-v",
type_value="multiple_choice",
description="""Verbose.""",
mandatory=False,
default_value='1',
example=['0', '1', '2'])
return parser
class Transform:
def __init__(self, input_filename, warp, fname_dest, output_filename='', verbose=0, crop=0, interp='spline', remove_temp_files=1, debug=0):
self.input_filename = input_filename
if isinstance(warp, str):
self.warp_input = list([warp])
else:
self.warp_input = warp
self.fname_dest = fname_dest
self.output_filename = output_filename
self.interp = interp
self.crop = crop
self.verbose = verbose
self.remove_temp_files = remove_temp_files
self.debug = debug
def apply(self):
# Initialization
fname_src = self.input_filename # source image (moving)
fname_warp_list = self.warp_input # list of warping fields
fname_out = self.output_filename # output
fname_dest = self.fname_dest # destination image (fix)
verbose = self.verbose
remove_temp_files = self.remove_temp_files
crop_reference = self.crop # if = 1, put 0 everywhere around warping field, if = 2, real crop
interp = sct.get_interpolation('isct_antsApplyTransforms', self.interp)
# Parse list of warping fields
sct.printv('\nParse list of warping fields...', verbose)
use_inverse = []
fname_warp_list_invert = []
# fname_warp_list = fname_warp_list.replace(' ', '') # remove spaces
# fname_warp_list = fname_warp_list.split(",") # parse with comma
for idx_warp, path_warp in enumerate(fname_warp_list):
# Check if inverse matrix is specified with '-' at the beginning of file name
if path_warp.startswith("-"):
use_inverse.append('-i')
fname_warp_list[idx_warp] = path_warp[1:] # remove '-'
fname_warp_list_invert += [[use_inverse[idx_warp], fname_warp_list[idx_warp]]]
else:
use_inverse.append('')
fname_warp_list_invert += [[path_warp]]
path_warp = fname_warp_list[idx_warp]
if path_warp.endswith((".nii", ".nii.gz")) \
and msct_image.Image(fname_warp_list[idx_warp]).header.get_intent()[0] != 'vector':
raise ValueError("Displacement field in {} is invalid: should be encoded" \
" in a 5D file with vector intent code" \
" (see https://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h" \
.format(path_warp))
# need to check if last warping field is an affine transfo
isLastAffine = False
path_fname, file_fname, ext_fname = sct.extract_fname(fname_warp_list_invert[-1][-1])
if ext_fname in ['.txt', '.mat']:
isLastAffine = True
# check if destination file is 3d
if not sct.check_if_3d(fname_dest):
sct.printv('ERROR: Destination data must be 3d')
# N.B. Here we take the inverse of the warp list, because sct_WarpImageMultiTransform concatenates in the reverse order
fname_warp_list_invert.reverse()
fname_warp_list_invert = functools.reduce(lambda x,y: x+y, fname_warp_list_invert)
# Extract path, file and extension
path_src, file_src, ext_src = sct.extract_fname(fname_src)
path_dest, file_dest, ext_dest = sct.extract_fname(fname_dest)
# Get output folder and file name
if fname_out == '':
path_out = '' # output in user's current directory
file_out = file_src + '_reg'
ext_out = ext_src
fname_out = os.path.join(path_out, file_out + ext_out)
# Get dimensions of data
sct.printv('\nGet dimensions of data...', verbose)
img_src = msct_image.Image(fname_src)
nx, ny, nz, nt, px, py, pz, pt = img_src.dim
# nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_src)
sct.printv(' ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz) + ' x ' + str(nt), verbose)
# if 3d
if nt == 1:
# Apply transformation
sct.printv('\nApply transformation...', verbose)
if nz in [0, 1]:
dim = '2'
else:
dim = '3'
sct.run(['isct_antsApplyTransforms',
'-d', dim,
'-i', fname_src,
'-o', fname_out,
'-t',
] + fname_warp_list_invert + [
'-r', fname_dest,
] + interp, verbose=verbose, is_sct_binary=True)
# if 4d, loop across the T dimension
else:
path_tmp = sct.tmp_create(basename="apply_transfo", verbose=verbose)
# convert to nifti into temp folder
sct.printv('\nCopying input data to tmp folder and convert to nii...', verbose)
img_src.save(os.path.join(path_tmp, "data.nii"))
sct.copy(fname_dest, os.path.join(path_tmp, file_dest + ext_dest))
fname_warp_list_tmp = []
for fname_warp in fname_warp_list:
path_warp, file_warp, ext_warp = sct.extract_fname(fname_warp)
sct.copy(fname_warp, os.path.join(path_tmp, file_warp + ext_warp))
fname_warp_list_tmp.append(file_warp + ext_warp)
fname_warp_list_invert_tmp = fname_warp_list_tmp[::-1]
curdir = os.getcwd()
os.chdir(path_tmp)
# split along T dimension
sct.printv('\nSplit along T dimension...', verbose)
im_dat = msct_image.Image('data.nii')
im_header = im_dat.hdr
data_split_list = sct_image.split_data(im_dat, 3)
for im in data_split_list:
im.save()
# apply transfo
sct.printv('\nApply transformation to each 3D volume...', verbose)
for it in range(nt):
file_data_split = 'data_T' + str(it).zfill(4) + '.nii'
file_data_split_reg = 'data_reg_T' + str(it).zfill(4) + '.nii'
status, output = sct.run(['isct_antsApplyTransforms',
'-d', '3',
'-i', file_data_split,
'-o', file_data_split_reg,
'-t',
] + fname_warp_list_invert_tmp + [
'-r', file_dest + ext_dest,
] + interp, verbose, is_sct_binary=True)
# Merge files back
sct.printv('\nMerge file back...', verbose)
import glob
path_out, name_out, ext_out = sct.extract_fname(fname_out)
# im_list = [Image(file_name) for file_name in glob.glob('data_reg_T*.nii')]
# concat_data use to take a list of image in input, now takes a list of file names to open the files one by one (see issue #715)
fname_list = glob.glob('data_reg_T*.nii')
fname_list.sort()
im_out = sct_image.concat_data(fname_list, 3, im_header['pixdim'])
im_out.save(name_out + ext_out)
os.chdir(curdir)
sct.generate_output_file(os.path.join(path_tmp, name_out + ext_out), fname_out)
# Delete temporary folder if specified
if int(remove_temp_files):
sct.printv('\nRemove temporary files...', verbose)
sct.rmtree(path_tmp, verbose=verbose)
# 2. crop the resulting image using dimensions from the warping field
warping_field = fname_warp_list_invert[-1]
# if last warping field is an affine transfo, we need to compute the space of the concatenate warping field:
if isLastAffine:
sct.printv('WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation...', verbose, 'warning')
elif crop_reference == 1:
ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field, background=0).crop()
# sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field+' -b 0')
elif crop_reference == 2:
ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field).crop()
# sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field)
sct.display_viewer_syntax([fname_dest, fname_out], verbose=verbose)
# MAIN
# ==========================================================================================
def main(args=None):
# check user arguments
if not args:
args = sys.argv[1:]
# Get parser info
parser = get_parser()
arguments = parser.parse(args)
input_filename = arguments["-i"]
fname_dest = arguments["-d"]
warp_filename = arguments["-w"]
transform = Transform(input_filename=input_filename, fname_dest=fname_dest, warp=warp_filename)
if "-crop" in arguments:
transform.crop = arguments["-crop"]
if "-o" in arguments:
transform.output_filename = arguments["-o"]
if "-x" in arguments:
transform.interp = arguments["-x"]
if "-r" in arguments:
transform.remove_temp_files = int(arguments["-r"])
transform.verbose = int(arguments.get('-v'))
sct.init_sct(log_level=transform.verbose, update=True) # Update log level
transform.apply()
# START PROGRAM
# ==========================================================================================
if __name__ == "__main__":
sct.init_sct()
# # initialize parameters
param = Param()
# call main function
main()
| 42.973941
| 175
| 0.560373
| 1,537
| 13,193
| 4.582954
| 0.214053
| 0.035775
| 0.046139
| 0.029671
| 0.20812
| 0.146508
| 0.103634
| 0.095684
| 0.058206
| 0.058206
| 0
| 0.008201
| 0.297582
| 13,193
| 306
| 176
| 43.114379
| 0.751915
| 0.179565
| 0
| 0.125581
| 0
| 0.013953
| 0.14086
| 0.009164
| 0
| 0
| 0
| 0.003268
| 0
| 1
| 0.023256
| false
| 0
| 0.04186
| 0
| 0.07907
| 0.051163
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9c04726d3c874dda1b944aacaee1f8285db82b6a
| 3,132
|
py
|
Python
|
tests/plugins/test_plugin_base.py
|
vurankar/mongo-connector
|
202aa28743855643fddd77d3e66bf1a640df3ed6
|
[
"Apache-2.0"
] | 1
|
2019-08-24T21:06:00.000Z
|
2019-08-24T21:06:00.000Z
|
tests/plugins/test_plugin_base.py
|
vurankar/mongo-connector
|
202aa28743855643fddd77d3e66bf1a640df3ed6
|
[
"Apache-2.0"
] | 13
|
2017-08-07T04:36:25.000Z
|
2021-02-08T17:37:27.000Z
|
tests/plugins/test_plugin_base.py
|
vurankar/mongo-connector
|
202aa28743855643fddd77d3e66bf1a640df3ed6
|
[
"Apache-2.0"
] | 4
|
2018-10-22T17:30:46.000Z
|
2020-07-07T21:24:48.000Z
|
# Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests methods in plugin_base.py
"""
import copy
import sys
sys.path[0:0] = [""]
from mongo_connector.plugins.plugin_base import PluginBase
from tests import unittest
from tests.plugins.helpers import (BAD_PLUGIN_CONFIGS, get_test_namespace)
class TestPluginBase(unittest.TestCase):
""" Tests the utils
"""
def setUp(self):
"""Initialize test instance.
"""
self.namespace = get_test_namespace()
def test_name(self):
"""Test name.
"""
configs = self.namespace.plugins[0]
for cfg in configs:
obj = PluginBase(cfg)
self.assertEqual(cfg['pluginName'], obj.name())
for cfg in BAD_PLUGIN_CONFIGS:
obj = PluginBase(cfg)
self.assertEqual(obj.name().index('generated'), 0)
def test_info(self):
"""Test info.
"""
configs = self.namespace.plugins[0]
for cfg in configs:
obj = PluginBase(cfg)
self.assertEqual(cfg['config'], obj.info())
for cfg in BAD_PLUGIN_CONFIGS:
obj = PluginBase(cfg)
self.assertEqual(obj.info(), {})
def _test_not_implemented_method_by_name(self, name):
"""Test not implemented method by name.
"""
configs = copy.deepcopy(self.namespace.plugins)
configs.extend(BAD_PLUGIN_CONFIGS)
for cfg in configs:
obj = PluginBase(cfg)
try:
method = getattr(obj, name)
if not method or not callable(method):
raise KeyError
method()
except NotImplementedError as exc:
pass
return True
def test_invoke(self):
"""Test invoke.
"""
flag = self._test_not_implemented_method_by_name('invoke')
self.assertEqual(flag, True)
def test_bulk_invoke(self):
"""Test bulk_invoke.
"""
# Bulk invoke is really implemented but it calls invoke in loop
# which returns an not implemented exception.
flag = self._test_not_implemented_method_by_name('bulk_invoke')
self.assertEqual(flag, True)
def test_commit(self):
"""Test commit.
"""
flag = self._test_not_implemented_method_by_name('commit')
self.assertEqual(flag, True)
def test_stop(self):
"""Test stop.
"""
flag = self._test_not_implemented_method_by_name('stop')
self.assertEqual(flag, True)
if __name__ == '__main__':
unittest.main()
| 27
| 74
| 0.623244
| 379
| 3,132
| 4.986807
| 0.345646
| 0.042328
| 0.057143
| 0.07619
| 0.325397
| 0.325397
| 0.277778
| 0.22328
| 0.142857
| 0.142857
| 0
| 0.007542
| 0.280332
| 3,132
| 115
| 75
| 27.234783
| 0.830967
| 0.2947
| 0
| 0.307692
| 0
| 0
| 0.028011
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.153846
| false
| 0.019231
| 0.096154
| 0
| 0.288462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|