hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f55aac1947adabbf36a8c37e2b70b19c3a004a2f
| 12,814
|
py
|
Python
|
qaeval/tests/metric_test.py
|
danieldeutsch/qaeval
|
dd7273183dd1b2c9995115310ef041daa953ca81
|
[
"Apache-2.0"
] | 6
|
2020-10-30T17:34:05.000Z
|
2021-10-10T09:27:49.000Z
|
qaeval/tests/metric_test.py
|
danieldeutsch/qaeval
|
dd7273183dd1b2c9995115310ef041daa953ca81
|
[
"Apache-2.0"
] | 1
|
2020-10-02T03:20:09.000Z
|
2021-05-07T15:32:27.000Z
|
qaeval/tests/metric_test.py
|
danieldeutsch/qaeval
|
dd7273183dd1b2c9995115310ef041daa953ca81
|
[
"Apache-2.0"
] | null | null | null |
import json
import os
import pytest
import unittest
from typing import List
from qaeval import QAEval, FIXTURES_ROOT
@pytest.mark.skipif(
"GENERATION_MODEL" not in os.environ,
reason="`GENERATION_MODEL` environment variable not set",
)
@pytest.mark.skipif(
"ANSWERING_MODEL" not in os.environ,
reason="`ANSWERING_MODEL` environment variable not set",
)
class TestQAEval(unittest.TestCase):
def setUp(self) -> None:
self.summaries = []
self.references_list = []
with open(f"{FIXTURES_ROOT}/multiling2011.jsonl", "r") as f:
for line in f:
data = json.loads(line)
summary = data["summary"]["text"]
references = [reference["text"] for reference in data["references"]]
self.summaries.append(summary)
self.references_list.append(references)
def _check_output(self, metric: QAEval, expected_output: List) -> None:
actual_output = metric.score_batch(self.summaries, self.references_list)
assert len(expected_output) == len(actual_output)
for expected, actual in zip(expected_output, actual_output):
assert len(expected) == len(actual) == 1
expected = expected["qa-eval"]
actual = actual["qa-eval"]
assert len(expected) == len(actual)
for metric in expected.keys():
assert expected[metric] == pytest.approx(actual[metric], abs=1e-5)
def test_qaeval(self):
# This is a regression test, not necessarily a test for correctness
metric = QAEval(
generation_model_path=os.environ["GENERATION_MODEL"],
answering_model_dir=os.environ["ANSWERING_MODEL"],
)
expected_output = [
{
"qa-eval": {
"is_answered": 0.2171952736318408,
"em": 0.03078358208955224,
"f1": 0.05688114487088367,
}
},
{
"qa-eval": {
"is_answered": 0.2706778606965174,
"em": 0.08286691542288557,
"f1": 0.11367400349443259,
}
},
{
"qa-eval": {
"is_answered": 0.4552238805970149,
"em": 0.05223880597014925,
"f1": 0.10360696517412935,
}
},
{
"qa-eval": {
"is_answered": 0.2671408582089552,
"em": 0.04582555970149253,
"f1": 0.05402803689883914,
}
},
{
"qa-eval": {
"is_answered": 0.17126063232225966,
"em": 0.025276841598459315,
"f1": 0.04173576561636263,
}
},
{
"qa-eval": {
"is_answered": 0.3291829383548209,
"em": 0.029159756771697066,
"f1": 0.0543755246092705,
}
},
{
"qa-eval": {
"is_answered": 0.34836235489220563,
"em": 0.05223880597014925,
"f1": 0.09381412591922542,
}
},
{
"qa-eval": {
"is_answered": 0.4337987481945113,
"em": 0.04537794896485315,
"f1": 0.12145356515842792,
}
},
{
"qa-eval": {
"is_answered": 0.44427039821776665,
"em": 0.06434837092731831,
"f1": 0.10272833079850623,
}
},
{
"qa-eval": {
"is_answered": 0.40391255917571706,
"em": 0.09642160957950431,
"f1": 0.13482779720666102,
}
},
{
"qa-eval": {
"is_answered": 0.5345864661654135,
"em": 0.12349624060150374,
"f1": 0.16393273976257167,
}
},
{
"qa-eval": {
"is_answered": 0.5204365079365079,
"em": 0.12678571428571428,
"f1": 0.16151234567901235,
}
},
]
self._check_output(metric, expected_output)
@pytest.mark.skipif(
"LERC_MODEL" not in os.environ,
reason="`LERC_MODEL` environment variable not set",
)
@pytest.mark.skipif(
"LERC_PRETRAINED_MODEL" not in os.environ,
reason="`LERC_PRETRAINED_MODEL` environment variable not set",
)
def test_qaeval_with_lerc(self):
# This is a regression test, not necessarily a test for correctness
metric = QAEval(
generation_model_path=os.environ["GENERATION_MODEL"],
answering_model_dir=os.environ["ANSWERING_MODEL"],
use_lerc=True,
lerc_model_path=os.environ["LERC_MODEL"],
lerc_pretrained_model_path=os.environ["LERC_PRETRAINED_MODEL"],
)
expected_output = [
{
"qa-eval": {
"is_answered": 0.2171952736318408,
"em": 0.03078358208955224,
"f1": 0.05688114487088367,
"lerc": 0.5280342313984585,
}
},
{
"qa-eval": {
"is_answered": 0.2706778606965174,
"em": 0.08286691542288557,
"f1": 0.11367400349443259,
"lerc": 0.8588525844061404,
}
},
{
"qa-eval": {
"is_answered": 0.4552238805970149,
"em": 0.05223880597014925,
"f1": 0.10360696517412935,
"lerc": 1.2307390170310861,
}
},
{
"qa-eval": {
"is_answered": 0.2671408582089552,
"em": 0.04582555970149253,
"f1": 0.05402803689883914,
"lerc": 0.6782244059549116,
}
},
{
"qa-eval": {
"is_answered": 0.17126063232225966,
"em": 0.025276841598459315,
"f1": 0.04173576561636263,
"lerc": 0.40871678001285994,
}
},
{
"qa-eval": {
"is_answered": 0.3291829383548209,
"em": 0.029159756771697066,
"f1": 0.0543755246092705,
"lerc": 0.6477515654560587,
}
},
{
"qa-eval": {
"is_answered": 0.34836235489220563,
"em": 0.05223880597014925,
"f1": 0.09381412591922542,
"lerc": 0.947292007320556,
}
},
{
"qa-eval": {
"is_answered": 0.4337987481945113,
"em": 0.04537794896485315,
"f1": 0.12145356515842792,
"lerc": 1.2629075305115793,
}
},
{
"qa-eval": {
"is_answered": 0.44427039821776665,
"em": 0.06434837092731831,
"f1": 0.10272833079850623,
"lerc": 1.1977039740821571,
}
},
{
"qa-eval": {
"is_answered": 0.40391255917571706,
"em": 0.09642160957950431,
"f1": 0.13482779720666102,
"lerc": 1.2360802221434326,
}
},
{
"qa-eval": {
"is_answered": 0.5345864661654135,
"em": 0.12349624060150374,
"f1": 0.16393273976257167,
"lerc": 1.5575424717221045,
}
},
{
"qa-eval": {
"is_answered": 0.5204365079365079,
"em": 0.12678571428571428,
"f1": 0.16151234567901235,
"lerc": 1.4713040575976408,
}
},
]
self._check_output(metric, expected_output)
@pytest.mark.skipif(
"LERC_MODEL" not in os.environ,
reason="`LERC_MODEL` environment variable not set",
)
@pytest.mark.skipif(
"LERC_PRETRAINED_MODEL" not in os.environ,
reason="`LERC_PRETRAINED_MODEL` environment variable not set",
)
def test_return_qa_pairs(self):
metric = QAEval(
generation_model_path=os.environ["GENERATION_MODEL"],
answering_model_dir=os.environ["ANSWERING_MODEL"],
use_lerc=True,
lerc_model_path=os.environ["LERC_MODEL"],
lerc_pretrained_model_path=os.environ["LERC_PRETRAINED_MODEL"],
)
summaries = [
"Dan walked to the bakery this morning.",
"He bought some scones today",
]
references_list = [
["Dan went to buy scones earlier this morning."],
["Dan went to buy scones earlier this morning."],
]
results_list = metric.score_batch(summaries, references_list, return_qa_pairs=True)
assert len(results_list) == 2
metrics, qa_pairs_list = results_list[0]
assert metrics["qa-eval"]["is_answered"] == 1.0
assert metrics["qa-eval"]["em"] == 0.5
assert metrics["qa-eval"]["f1"] == 0.5
self.assertAlmostEqual(metrics["qa-eval"]["lerc"], 3.171376943588257, places=4)
assert len(qa_pairs_list) == 1
qa_pairs = qa_pairs_list[0]
assert len(qa_pairs) == 2
assert (
qa_pairs[0]["question"]["question"]
== "Who went to buy scones earlier this morning?"
)
assert qa_pairs[0]["prediction"]["prediction"] == "Dan"
assert qa_pairs[0]["prediction"]["start"] == 0
assert qa_pairs[0]["prediction"]["end"] == 3
assert qa_pairs[0]["prediction"]["is_answered"] == 1.0
assert qa_pairs[0]["prediction"]["em"] == 1.0
assert qa_pairs[0]["prediction"]["f1"] == 1.0
self.assertAlmostEqual(
qa_pairs[0]["prediction"]["lerc"], 5.035197734832764, places=4
)
assert (
qa_pairs[1]["question"]["question"]
== "What did Dan go to buy earlier this morning?"
)
assert qa_pairs[1]["prediction"]["prediction"] == "bakery"
assert qa_pairs[1]["prediction"]["start"] == 18
assert qa_pairs[1]["prediction"]["end"] == 24
assert qa_pairs[1]["prediction"]["is_answered"] == 1.0
assert qa_pairs[1]["prediction"]["em"] == 0.0
assert qa_pairs[1]["prediction"]["f1"] == 0.0
self.assertAlmostEqual(
qa_pairs[1]["prediction"]["lerc"], 1.30755615234375, places=4
)
metrics, qa_pairs_list = results_list[1]
assert metrics["qa-eval"]["is_answered"] == 0.5
assert metrics["qa-eval"]["em"] == 0.5
assert metrics["qa-eval"]["f1"] == 0.5
self.assertAlmostEqual(metrics["qa-eval"]["lerc"], 2.492440700531006, places=4)
assert len(qa_pairs_list) == 1
qa_pairs = qa_pairs_list[0]
assert len(qa_pairs) == 2
assert (
qa_pairs[0]["question"]["question"]
== "Who went to buy scones earlier this morning?"
)
assert qa_pairs[0]["prediction"]["prediction"] == "He"
assert qa_pairs[0]["prediction"]["start"] == 0
assert qa_pairs[0]["prediction"]["end"] == 2
assert qa_pairs[0]["prediction"]["is_answered"] == 0.0
assert qa_pairs[0]["prediction"]["em"] == 0.0
assert qa_pairs[0]["prediction"]["f1"] == 0.0
assert qa_pairs[0]["prediction"]["lerc"] == 0.0
assert (
qa_pairs[1]["question"]["question"]
== "What did Dan go to buy earlier this morning?"
)
assert qa_pairs[1]["prediction"]["prediction"] == "scones"
assert qa_pairs[1]["prediction"]["start"] == 15
assert qa_pairs[1]["prediction"]["end"] == 21
assert qa_pairs[1]["prediction"]["is_answered"] == 1.0
assert qa_pairs[1]["prediction"]["em"] == 1.0
assert qa_pairs[1]["prediction"]["f1"] == 1.0
self.assertAlmostEqual(
qa_pairs[1]["prediction"]["lerc"], 4.984881401062012, places=4
)
| 36.927954
| 91
| 0.478851
| 1,147
| 12,814
| 5.203139
| 0.147341
| 0.051609
| 0.06317
| 0.069705
| 0.787031
| 0.759048
| 0.711796
| 0.677782
| 0.628184
| 0.628184
| 0
| 0.220119
| 0.398002
| 12,814
| 346
| 92
| 37.034682
| 0.553539
| 0.010223
| 0
| 0.479042
| 0
| 0
| 0.166404
| 0.013013
| 0
| 0
| 0
| 0
| 0.146707
| 1
| 0.01497
| false
| 0
| 0.017964
| 0
| 0.035928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
191e8cdae13f3bdbc7895835e5af41831529e667
| 596
|
py
|
Python
|
executor/meta/connection.py
|
asyre/bachelor_degree
|
274415a32dc642ecde53935d6b9c8bb23d21cf29
|
[
"MIT"
] | null | null | null |
executor/meta/connection.py
|
asyre/bachelor_degree
|
274415a32dc642ecde53935d6b9c8bb23d21cf29
|
[
"MIT"
] | null | null | null |
executor/meta/connection.py
|
asyre/bachelor_degree
|
274415a32dc642ecde53935d6b9c8bb23d21cf29
|
[
"MIT"
] | null | null | null |
from typing import Any, Optional
from executor.meta.env_var import must_extract_env_var_if_present
from executor.meta.meta import set_meta_information, has_meta_information, get_meta_information
__path_key = "connection"
def has_connection(obj: Any) -> bool:
return has_meta_information(obj, __path_key)
def get_connection(obj: Any) -> Optional[str]:
return get_meta_information(obj, __path_key)
def connection(path: str):
def __decorator(func):
set_meta_information(func, __path_key, must_extract_env_var_if_present(path))
return func
return __decorator
| 25.913043
| 95
| 0.781879
| 85
| 596
| 5
| 0.317647
| 0.211765
| 0.075294
| 0.08
| 0.254118
| 0.254118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145973
| 596
| 22
| 96
| 27.090909
| 0.834971
| 0
| 0
| 0
| 0
| 0
| 0.016779
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.230769
| 0.153846
| 0.846154
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
1921d9d8af104e0d92c67a4f222b0660c752e1f0
| 136
|
py
|
Python
|
reservoirpy/nodes/readouts/__init__.py
|
neuronalX/reservoirpy
|
b150f097451edfa71dbfaee474149b628cd786d4
|
[
"MIT"
] | 18
|
2019-03-01T17:15:17.000Z
|
2020-12-08T13:12:49.000Z
|
reservoirpy/nodes/readouts/__init__.py
|
neuronalX/reservoirpy
|
b150f097451edfa71dbfaee474149b628cd786d4
|
[
"MIT"
] | 4
|
2019-02-19T09:25:50.000Z
|
2020-06-04T16:01:54.000Z
|
reservoirpy/nodes/readouts/__init__.py
|
neuronalX/reservoirpy
|
b150f097451edfa71dbfaee474149b628cd786d4
|
[
"MIT"
] | 12
|
2019-02-08T08:03:42.000Z
|
2020-12-16T09:35:47.000Z
|
from .force import FORCE
from .lms import LMS
from .ridge import Ridge
from .rls import RLS
__all__ = ["FORCE", "RLS", "LMS", "Ridge"]
| 19.428571
| 42
| 0.698529
| 21
| 136
| 4.333333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169118
| 136
| 6
| 43
| 22.666667
| 0.80531
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
193894d72c14a522120ef4c991010becb2b1ddd7
| 39
|
py
|
Python
|
aoc_cqkh42/year_2020/tests/__init__.py
|
cqkh42/advent-of-code
|
bcf31cf8973a5b6d67492c412dce10df742e04d1
|
[
"MIT"
] | null | null | null |
aoc_cqkh42/year_2020/tests/__init__.py
|
cqkh42/advent-of-code
|
bcf31cf8973a5b6d67492c412dce10df742e04d1
|
[
"MIT"
] | null | null | null |
aoc_cqkh42/year_2020/tests/__init__.py
|
cqkh42/advent-of-code
|
bcf31cf8973a5b6d67492c412dce10df742e04d1
|
[
"MIT"
] | null | null | null |
"""
Tests for 2020's Advent of Code
"""
| 13
| 31
| 0.641026
| 7
| 39
| 3.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0.179487
| 39
| 3
| 32
| 13
| 0.65625
| 0.794872
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1943501e07b9c9d14fca6f371166648b42dd5dc4
| 25
|
py
|
Python
|
hashdist/spec/tests/test_stack/use_alternate_package/origdirectory/origdirectory.py
|
krafczyk/hashdist
|
a322a66d4bcd4b989a6a163cd2569f3e71995f60
|
[
"BSD-3-Clause"
] | 67
|
2015-01-21T14:16:20.000Z
|
2022-03-31T23:21:09.000Z
|
hashdist/spec/tests/test_stack/use_alternate_package/origdirectory/origdirectory.py
|
dannygriffin000/hashdist
|
a322a66d4bcd4b989a6a163cd2569f3e71995f60
|
[
"BSD-3-Clause"
] | 77
|
2015-01-01T00:38:55.000Z
|
2020-06-15T22:04:42.000Z
|
hashdist/spec/tests/test_stack/use_alternate_package/origdirectory/origdirectory.py
|
dannygriffin000/hashdist
|
a322a66d4bcd4b989a6a163cd2569f3e71995f60
|
[
"BSD-3-Clause"
] | 20
|
2015-01-22T16:17:49.000Z
|
2021-02-11T21:35:25.000Z
|
"""
Example hook file
"""
| 8.333333
| 17
| 0.6
| 3
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 3
| 18
| 8.333333
| 0.714286
| 0.68
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
19711682f8618a84671f9ce2817039a4f5ef7694
| 84
|
py
|
Python
|
mdx_include/__init__.py
|
scutojr/mdx_include
|
15f560413b82e498d856e6b8d7e35ed5eb82e196
|
[
"BSD-3-Clause"
] | 38
|
2018-10-26T15:36:50.000Z
|
2022-02-28T22:01:41.000Z
|
mdx_include/__init__.py
|
scutojr/mdx_include
|
15f560413b82e498d856e6b8d7e35ed5eb82e196
|
[
"BSD-3-Clause"
] | 6
|
2018-09-24T16:35:19.000Z
|
2021-03-23T14:48:53.000Z
|
mdx_include/__init__.py
|
scutojr/mdx_include
|
15f560413b82e498d856e6b8d7e35ed5eb82e196
|
[
"BSD-3-Clause"
] | 3
|
2020-09-10T12:44:46.000Z
|
2021-11-28T12:12:50.000Z
|
name = "mdx_include"
from .mdx_include import makeExtension
assert makeExtension
| 12
| 38
| 0.809524
| 10
| 84
| 6.6
| 0.7
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 84
| 6
| 39
| 14
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
198c7fb4db867cd0eb0639589cf0bd60a4e2295a
| 230
|
py
|
Python
|
02-desafio.py
|
SweydAbdul/EstudosPythonCeV
|
5eb61d4e1d47b99d57de776c835aa9f3c2bcee3b
|
[
"MIT"
] | null | null | null |
02-desafio.py
|
SweydAbdul/EstudosPythonCeV
|
5eb61d4e1d47b99d57de776c835aa9f3c2bcee3b
|
[
"MIT"
] | null | null | null |
02-desafio.py
|
SweydAbdul/EstudosPythonCeV
|
5eb61d4e1d47b99d57de776c835aa9f3c2bcee3b
|
[
"MIT"
] | null | null | null |
print('===== \033[31mDESAFIO 02\033[m =====')
dia = input('Dia = ')
mes = input('Mes = ')
ano = input('Ano = ')
print('Voce nasceu no dia \033[33m{}\033[m de \033[33m{}\033[m de \033[33m{}\033[m. Correto?'.format(dia, mes, ano))
| 38.333333
| 116
| 0.578261
| 39
| 230
| 3.410256
| 0.410256
| 0.120301
| 0.203008
| 0.225564
| 0.255639
| 0.255639
| 0.255639
| 0.255639
| 0.255639
| 0
| 0
| 0.171717
| 0.13913
| 230
| 5
| 117
| 46
| 0.5
| 0
| 0
| 0
| 0
| 0.2
| 0.604348
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
199a7e9742a6f71b9d6bc7cb413027e1d02290ee
| 627
|
py
|
Python
|
test_array_calculus.py
|
chapman-phys220-2018f/cw09-betterthanfrank
|
03c173c462b2c0fe880d25b7129ad821700ae8bf
|
[
"MIT"
] | null | null | null |
test_array_calculus.py
|
chapman-phys220-2018f/cw09-betterthanfrank
|
03c173c462b2c0fe880d25b7129ad821700ae8bf
|
[
"MIT"
] | null | null | null |
test_array_calculus.py
|
chapman-phys220-2018f/cw09-betterthanfrank
|
03c173c462b2c0fe880d25b7129ad821700ae8bf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
###
# Name:Gabriella Nutt & Amelia & Gwenyth
#Student ID: 2307512
#Email: nutt@chapman.edu
#Course: PHYS220/MATH220/CPSC220 Fall 2018
#Assignment: CW09
###
import array_calculus as ac
import numpy as np
def test_gradient():
"""
Tests that the gradient returns the correct values.
"""
x = np.arange(0, 10,1)
D = ac.gradient(x)
D = D[:5]
assert np.array_equal(D, np.array([[-1.,1.,0.,0.,0.,0.,0.,0.,0.,0.],[-0.5,0.,0.5,0.,0.,0.,0.,0.,0.,0.], [0.,-0.5,0.,0.5,0.,0.,0.,0.,0.,0.], [0.,0.,-0.5,0.,0.5,0.,0.,0.,0.,0.], [0.,0.,0.,-0.5,0.,0.5,0.,0.,0.,0.]]))
| 28.5
| 217
| 0.559809
| 125
| 627
| 2.784
| 0.408
| 0.224138
| 0.258621
| 0.287356
| 0.16092
| 0.16092
| 0.16092
| 0.16092
| 0.16092
| 0.16092
| 0
| 0.162921
| 0.148325
| 627
| 22
| 217
| 28.5
| 0.488764
| 0.371611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.142857
| false
| 0
| 0.285714
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
19b1223621446caa4d7c133cc6b8dc334714a4bd
| 101
|
py
|
Python
|
data_base/connec_database.py
|
Markynice/python_testing
|
43bac522d9339cac98221cf0be48de9277ca1624
|
[
"Apache-2.0"
] | null | null | null |
data_base/connec_database.py
|
Markynice/python_testing
|
43bac522d9339cac98221cf0be48de9277ca1624
|
[
"Apache-2.0"
] | null | null | null |
data_base/connec_database.py
|
Markynice/python_testing
|
43bac522d9339cac98221cf0be48de9277ca1624
|
[
"Apache-2.0"
] | null | null | null |
import mysql
cnx = mysql.connector.connect(user='', password='', host='', database='')
cnx.close()
| 16.833333
| 73
| 0.663366
| 12
| 101
| 5.583333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108911
| 101
| 5
| 74
| 20.2
| 0.744444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
5fdb583fa10d5de1a068dba3bc492b9a01521f09
| 273
|
py
|
Python
|
derm_ita/__init__.py
|
acorbin3/derm_ita
|
af89e9ef14f3708998a3d4282807ac02b9638e22
|
[
"MIT"
] | null | null | null |
derm_ita/__init__.py
|
acorbin3/derm_ita
|
af89e9ef14f3708998a3d4282807ac02b9638e22
|
[
"MIT"
] | null | null | null |
derm_ita/__init__.py
|
acorbin3/derm_ita
|
af89e9ef14f3708998a3d4282807ac02b9638e22
|
[
"MIT"
] | null | null | null |
from derm_ita.derm_ita import get_ita, get_cropped_center_ita, get_random_patches_ita, get_structured_patches_ita
from derm_ita.skin_tone_classification import get_del_bino_type, get_groh_ita_category, get_kinyanjui_type,\
get_kinyanjui_groh_type,get_fitzpatrick_type
| 54.6
| 113
| 0.893773
| 45
| 273
| 4.8
| 0.444444
| 0.097222
| 0.101852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069597
| 273
| 4
| 114
| 68.25
| 0.850394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
27162024581c5e6dc5867d6a4d60e9639fe5da66
| 94
|
py
|
Python
|
streamflow_app/apps.py
|
deephealthproject/backend
|
4bf6899c1308cbd42231ff9e29fe68b3ccb881e5
|
[
"MIT"
] | 2
|
2021-09-21T19:04:26.000Z
|
2021-12-31T05:21:16.000Z
|
streamflow_app/apps.py
|
deephealthproject/backend
|
4bf6899c1308cbd42231ff9e29fe68b3ccb881e5
|
[
"MIT"
] | null | null | null |
streamflow_app/apps.py
|
deephealthproject/backend
|
4bf6899c1308cbd42231ff9e29fe68b3ccb881e5
|
[
"MIT"
] | 2
|
2020-03-20T14:05:48.000Z
|
2020-06-16T16:15:47.000Z
|
from django.apps import AppConfig
class SFAppConfig(AppConfig):
name = 'streamflow_app'
| 15.666667
| 33
| 0.765957
| 11
| 94
| 6.454545
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 94
| 5
| 34
| 18.8
| 0.898734
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
2723fd68fbf139f16aca593966ae5bdd3cc6b8c4
| 561
|
py
|
Python
|
exe1.py
|
NearHuscarl/LearnPythonTheHardWay
|
51ee6447f5b6892c3b3b886c4afa196ff15e814b
|
[
"Zed"
] | null | null | null |
exe1.py
|
NearHuscarl/LearnPythonTheHardWay
|
51ee6447f5b6892c3b3b886c4afa196ff15e814b
|
[
"Zed"
] | null | null | null |
exe1.py
|
NearHuscarl/LearnPythonTheHardWay
|
51ee6447f5b6892c3b3b886c4afa196ff15e814b
|
[
"Zed"
] | null | null | null |
print("Hello World!")
print ("Hello Again")
print("I like typing this.")
print("This is fun.")
print('Yay! Printing.')
print("I'd much rather you 'not'.")
print('I "said" do not touch this.')
print('this command will print one line')
print('this\n command\n will\n print\n multiple\n lines')
# print('nevermind')
# --Output--
# Hello World!
# Hello Again
# I like typing this.
# This is fun.
# Yay! Printing.
# I'd much rather you 'not'.
# I "said" do not touch this.
# this command will print one line
# this
# command
# will
# print
# multiple
# lines
| 20.035714
| 57
| 0.666667
| 91
| 561
| 4.10989
| 0.318681
| 0.048128
| 0.120321
| 0.160428
| 0.342246
| 0.342246
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174688
| 561
| 27
| 58
| 20.777778
| 0.807775
| 0.417112
| 0
| 0
| 0
| 0
| 0.648387
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
273444f9a1b7083c5142e2bf60b1e1380606e106
| 91
|
py
|
Python
|
examples/apps/ai_spleen_seg_app/__main__.py
|
jlvahldiek/monai-deploy-app-sdk
|
050aeabec581067a11566f59a2970b075d36ae7c
|
[
"Apache-2.0"
] | 28
|
2021-09-17T18:16:42.000Z
|
2022-03-31T16:32:36.000Z
|
examples/apps/ai_spleen_seg_app/__main__.py
|
jlvahldiek/monai-deploy-app-sdk
|
050aeabec581067a11566f59a2970b075d36ae7c
|
[
"Apache-2.0"
] | 109
|
2021-09-17T18:34:31.000Z
|
2022-03-31T21:04:35.000Z
|
examples/apps/ai_spleen_seg_app/__main__.py
|
jlvahldiek/monai-deploy-app-sdk
|
050aeabec581067a11566f59a2970b075d36ae7c
|
[
"Apache-2.0"
] | 11
|
2021-09-17T20:23:31.000Z
|
2022-03-29T08:55:19.000Z
|
from app import AISpleenSegApp
if __name__ == "__main__":
AISpleenSegApp(do_run=True)
| 18.2
| 31
| 0.758242
| 11
| 91
| 5.454545
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 91
| 4
| 32
| 22.75
| 0.779221
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
27469245bbb862daef90cd4e4c9aa369efdb40c1
| 42
|
py
|
Python
|
2010/scrapper2010.py
|
madewulf/PopulationPyramid.net
|
62316ac5030550c45d7a6c8d3e8e66f5c405dd68
|
[
"MIT"
] | 29
|
2015-02-25T11:01:44.000Z
|
2022-03-09T04:05:54.000Z
|
2010/scrapper2010.py
|
madewulf/PopulationPyramid.net
|
62316ac5030550c45d7a6c8d3e8e66f5c405dd68
|
[
"MIT"
] | 3
|
2018-02-10T12:29:35.000Z
|
2018-02-16T08:19:46.000Z
|
2010/scrapper2010.py
|
madewulf/PopulationPyramid.net
|
62316ac5030550c45d7a6c8d3e8e66f5c405dd68
|
[
"MIT"
] | 14
|
2015-02-27T08:56:37.000Z
|
2020-08-17T02:11:24.000Z
|
__author__ = 'madewulf'
import requests
| 8.4
| 23
| 0.761905
| 4
| 42
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 4
| 24
| 10.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
275a964a8b850452d888acde494e6eaa07baead9
| 115
|
py
|
Python
|
examples/django/mysite/myapp/urls.py
|
MartinSahlen/python-cloud-fn
|
97f74f375485972e999d2ec6d8feabf61604a78e
|
[
"MIT"
] | 230
|
2017-05-02T10:25:13.000Z
|
2021-12-25T18:02:26.000Z
|
examples/django/mysite/myapp/urls.py
|
MartinSahlen/python-cloud-fn
|
97f74f375485972e999d2ec6d8feabf61604a78e
|
[
"MIT"
] | 50
|
2017-05-22T07:39:56.000Z
|
2021-06-10T18:43:37.000Z
|
examples/django/mysite/myapp/urls.py
|
MartinSahlen/python-cloud-fn
|
97f74f375485972e999d2ec6d8feabf61604a78e
|
[
"MIT"
] | 38
|
2017-05-28T21:53:17.000Z
|
2021-06-03T12:18:22.000Z
|
from django.conf.urls import url
from .views import data_view
urlpatterns = [
url(r'^data', data_view),
]
| 16.428571
| 32
| 0.686957
| 17
| 115
| 4.529412
| 0.647059
| 0.207792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 115
| 6
| 33
| 19.166667
| 0.836957
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
2760de454f04a6a74bc68ced51e7b7b8c4ea7eb1
| 216
|
py
|
Python
|
postprocessedtracer/__init__.py
|
kahoooo/PostProcessedTracer
|
0c1078c8ccbe4ea1dd1b5a0db788fd8df4a82824
|
[
"MIT"
] | null | null | null |
postprocessedtracer/__init__.py
|
kahoooo/PostProcessedTracer
|
0c1078c8ccbe4ea1dd1b5a0db788fd8df4a82824
|
[
"MIT"
] | null | null | null |
postprocessedtracer/__init__.py
|
kahoooo/PostProcessedTracer
|
0c1078c8ccbe4ea1dd1b5a0db788fd8df4a82824
|
[
"MIT"
] | null | null | null |
from postprocessedtracer.frame import Frame
from postprocessedtracer.integrator import VanLeer2
from postprocessedtracer.particles import Particles
__all__ = ['Frame',
'Particles',
'VanLeer2']
| 27
| 51
| 0.75
| 19
| 216
| 8.315789
| 0.421053
| 0.436709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.185185
| 216
| 7
| 52
| 30.857143
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0.101852
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
2769e6fadd9db4aa5c4ee67a59b497853d727353
| 26
|
py
|
Python
|
__init__.py
|
d0cwoo/media_player.nuvo
|
0062c700e16247288c8494dd2079e9bb89a9cca0
|
[
"MIT"
] | null | null | null |
__init__.py
|
d0cwoo/media_player.nuvo
|
0062c700e16247288c8494dd2079e9bb89a9cca0
|
[
"MIT"
] | null | null | null |
__init__.py
|
d0cwoo/media_player.nuvo
|
0062c700e16247288c8494dd2079e9bb89a9cca0
|
[
"MIT"
] | null | null | null |
"""The Nuvo component."""
| 13
| 25
| 0.615385
| 3
| 26
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.695652
| 0.730769
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
27743fb17f0e17d88e7d0e81059e2fd6a12fff57
| 10,361
|
py
|
Python
|
test/compare/test.py
|
x5g/CubeSolver
|
451ae8f580038d3840cd9279cbdbcd2c13f5045d
|
[
"MIT"
] | 1
|
2021-05-08T02:17:39.000Z
|
2021-05-08T02:17:39.000Z
|
test/compare/test.py
|
x5g/CubeSolver
|
451ae8f580038d3840cd9279cbdbcd2c13f5045d
|
[
"MIT"
] | 7
|
2020-06-06T01:32:09.000Z
|
2022-02-10T09:25:40.000Z
|
test/compare/test.py
|
x5g/CubeSolver
|
451ae8f580038d3840cd9279cbdbcd2c13f5045d
|
[
"MIT"
] | 1
|
2020-03-17T21:39:20.000Z
|
2020-03-17T21:39:20.000Z
|
import numpy as np
from brokenaxes import brokenaxes
from selenium import webdriver
import time
import xlrd
import kociemba
def remove_same(arr): # Delete array continuous repeating elements
i=0
while(i<(len(arr) - 1)):
if (arr[i] == arr[i + 1] or arr[i]=="X" or arr[i]=="X'" or arr[i]=="Y" or arr[i]=="Y'" or arr[i]=="Z" or arr[i]=="Z'" or arr[i]=="X2" or arr[i]=="Y2" or arr[i]=="Z2"):
arr.pop(i)
i=i-1
i=i+1
'''
# 获取层先法和CFOP 其中CFOP可以通过CHAOS-master
browser = webdriver.Chrome()
browser.get('http://159.226.5.97:9006') # 发起http请求
time.sleep(2)
autoelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[3]/div/ul/li[3]/div/div/input')
autoelem.click()
time.sleep(1)
xl = xlrd.open_workbook('result.xls')
sheet = xl.sheet_by_name('Comparision')
for num in range(1000):
# input = "D U' B' U' B D' B' D' L' L' U B F' D U' B R' U' D' D' U' F' U L'"
input = sheet.row(num+1)[0].value
#print(input)
recoveryelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[2]/div/ul/li[5]')
recoveryelem.click()
time.sleep(1)
inputelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[4]/div/ul/li[2]/div/div/input')
inputelem.send_keys(input)
time.sleep(1)
executeelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[4]/div/ul/li[6]')
executeelem.click()
time.sleep(5)
executedelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[4]/div/ul/li[3]/div/div/input')
executedelem.clear()
time.sleep(1)
solveelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[3]/div/ul/li[4]')
solveelem.click()
# time.sleep(20)
while str(inputelem.get_attribute('value')) == "":
pass
time.sleep(1)
output = inputelem.get_attribute('value')
print(output)
# output_arr = output.split(' ')
# remove_same(output_arr)
# print(len(output_arr))
# stateelem = browser.find_element_by_xpath('/html/body/div[2]/div/ul/li[2]/div/ul/li[3]/div/div/input')
# print(stateelem.get_attribute('value'))
'''
'''
# 获取CubeSolver(20,2)和CubeSolver(18,5)
browser = webdriver.Chrome()
# browser.get('https://czx.ac.cn/cubesolvertest') # 发起http请求
browser.get('http://127.0.0.1:8000/cubesolvertest')
time.sleep(2)
for num in range(1000):
# input = "D U' B' U' B D' B' D' L' L' U B F' D U' B R' U' D' D' U' F' U L'"
input = sheet.row(num+1)[4].value
inputelem = browser.find_element_by_id('solinput')
inputelem.clear()
inputelem.send_keys(input)
rotationelem = browser.find_element_by_id('Rotation')
rotationelem.click()
time.sleep(2)
solveelem = browser.find_element_by_id('sbtn')
solveelem.click()
time.sleep(6)
outputelem = browser.find_element_by_id('soloutput')
output = outputelem.get_attribute('value')
print(output)
homeelem = browser.find_element_by_xpath('/html/body/div[2]/center/table/tbody/tr/td[1]/table[2]/tbody/tr[2]/td[1]/button')
homeelem.click()
time.sleep(1)
'''
'''
# 计算步数 去重
xl = xlrd.open_workbook (r'result.xls')
sheet = xl.sheet_by_name('Comparision')
for num in range(1000):
input = sheet.row(num + 1)[13].value
output_arr = input.split(' ')
remove_same(output_arr)
print(len(output_arr))
'''
'''
# 计算步数 不去重
xl = xlrd.open_workbook('result.xls')
sheet = xl.sheet_by_name('Comparision')
for num in range(1000):
input = sheet.row(num + 1)[7].value
output_arr = input.split(' ')
# 带步数(20f)这样的写法
# step_number = len(output_arr) - 1
# for i in range(len(output_arr) - 1) :
# if output_arr[i].find('2') == 1:
# step_number = step_number + 1
# 不带步数的写法
step_number = len(output_arr)
for i in range(len(output_arr)):
if output_arr[i].find('2') == 1:
step_number = step_number + 1
print(step_number)
'''
'''
# 画图1
# Draw the result from result.xls
# Author: Wang Wei
from selenium import webdriver
from matplotlib import pyplot as plt
from time import sleep
import xlwt
import xlrd
import pylab as pl
y_OKociemba=[]
y_DeepCubeA=[]
y_LayerFirst=[]
y_CFOP=[]
y_Kociemba=[]
y_OKociemba2=[]
xl = xlrd.open_workbook (r'result.xls')
sheet = xl.sheet_by_name('Comparision')
ave_OKociemba = 0
ave_DeepCubeA = 0
ave_LayerFirst = 0
ave_CFOP = 0
ave_Kociemba = 0
ave_OKociemba2 = 0
for num in range (1000):
# 两种技计数模式:模式一 计算认为D2为1步,模式二 认为D2为2步
# 第一种计数模式下的数据源
# y_OKociemba.append(sheet.row(num+1)[2].value)
# y_DeepCubeA.append(sheet.row(num+1)[5].value)
# y_LayerFirst.append(sheet.row(num+1)[8].value)
# y_CFOP.append(sheet.row(num+1)[11].value)
# y_Kociemba.append(sheet.row(num+1)[14].value)
# y_OKociemba2.append(sheet.row(num+1)[17].value)
# 第二种计数模式下的数据源
y_OKociemba.append(sheet.row(num+1)[3].value)
y_DeepCubeA.append(sheet.row(num+1)[6].value)
y_LayerFirst.append(sheet.row(num+1)[9].value)
y_CFOP.append(sheet.row(num+1)[12].value)
y_Kociemba.append(sheet.row(num+1)[15].value)
y_OKociemba2.append(sheet.row(num+1)[18].value)
ave_OKociemba = ave_OKociemba + y_OKociemba[num]
ave_DeepCubeA = ave_DeepCubeA + y_DeepCubeA[num]
ave_LayerFirst = ave_LayerFirst + y_LayerFirst[num]
ave_CFOP = ave_CFOP + y_CFOP[num]
ave_Kociemba = ave_Kociemba + y_Kociemba[num]
ave_OKociemba2 = ave_OKociemba2 + y_OKociemba2[num]
ave_OKociemba = ave_OKociemba / 1000
ave_DeepCubeA = ave_DeepCubeA / 1000
ave_LayerFirst = ave_LayerFirst / 1000
ave_CFOP = ave_CFOP / 1000
ave_Kociemba = ave_Kociemba / 1000
ave_OKociemba2 = ave_OKociemba2 / 1000
print("Kociemba = ", str(ave_Kociemba), str(min(y_Kociemba)), str(max(y_Kociemba)))
print("OKociemba = ", str(ave_OKociemba), str(min(y_OKociemba)), str(max(y_OKociemba)))
print("Okociemba2 = ", str(ave_OKociemba2), str(min(y_OKociemba2)), str(max(y_OKociemba2)))
print("CFOP = ", str(ave_CFOP), str(min(y_CFOP)), str(max(y_CFOP)))
print("LayerFirst = ", str(ave_LayerFirst), str(min(y_LayerFirst)), str(max(y_LayerFirst)))
print("DeepCubeA = ", str(ave_DeepCubeA), str(min(y_DeepCubeA)), str(max(y_DeepCubeA)))
# Draw the result
x=[]
for i in range (1000): # Abscissa assignment
x.append((i+1))
plt.figure(figsize=[20,5],dpi= 400)
# Broken line diagram
# plt.plot(x, y_OKociemba2, 'blue', label='O-Kociemba(18,5)')
# plt.plot(x, y_OKociemba, 'red', label='O-Kociemba(20,2)')
# plt.plot(x, y_DeepCubeA, 'green', label='DeepCubeA')
# plt.plot(x, y_LayerFirst, 'black', label='LayerFirst')
# plt.plot(x, y_CFOP, 'purple', label='CFOP')
# plt.plot(x, y_Kociemba, 'orange', label='Kociemba')
# Scatter diagram
plt.scatter(x, y_OKociemba2, marker = '.', color = 'blue', s = 15, label = 'O-Kociemba(18,5)')
plt.scatter(x, y_OKociemba, marker = '.',color = 'red', s = 15 ,label = 'O-Kociemba(20,2)')
plt.scatter(x, y_DeepCubeA, marker = '.', color = 'green', s = 15, label = 'DeepCubeA')
plt.scatter(x, y_LayerFirst, marker = '.',color = 'black', s = 15 ,label = 'LayerFirst')
plt.scatter(x, y_CFOP, marker = '.', color = 'purple', s = 15, label = 'CFOP')
plt.scatter(x, y_Kociemba, marker = '.',color = 'orange', s = 15 ,label = 'Kociemba')
# 第一种计数模式下的标题
# plt.title('Comparision of O-Kociemba(18,5), O-Kociemba(20,2), DeepCubeA, LayerFirst, CFOP and Kociemba in Counting Mode 1')
# 第二种计数模式下的标题
plt.title('Comparision of O-Kociemba(18,5), O-Kociemba(20,2), DeepCubeA, LayerFirst, CFOP and Kociemba in Counting Mode 2')
plt.xlabel('Time')
plt.ylabel('Step Number')
plt.xlim(0,1000)
# 第一种计数模式下的范围
# plt.ylim(0,180)
# 第二种计数模式下的范围
plt.ylim(0, 200)
plt.legend(loc = 1)
plt.show()
'''
# 画图2
# Draw the result from result.xls
# Author: Wang Wei
from selenium import webdriver
from matplotlib import pyplot as plt
from time import sleep
import xlwt
import xlrd
import pylab as pl
y_OKociemba=[]
y_DeepCubeA=[]
y_Kociemba=[]
y_OKociemba2=[]
xl = xlrd.open_workbook (r'result.xls')
sheet = xl.sheet_by_name('Comparision')
ave_OKociemba = 0
ave_DeepCubeA = 0
ave_Kociemba = 0
ave_OKociemba2 = 0
for num in range (1000):
# 两种技计数模式:模式一 计算认为D2为1步,模式二 认为D2为2步
# 第一种计数模式下的数据源
# y_OKociemba.append(sheet.row(num+1)[2].value)
# y_DeepCubeA.append(sheet.row(num+1)[5].value)
# y_Kociemba.append(sheet.row(num+1)[14].value)
# y_OKociemba2.append(sheet.row(num+1)[17].value)
# 第二种计数模式下的数据源
y_OKociemba.append(sheet.row(num+1)[3].value)
y_DeepCubeA.append(sheet.row(num+1)[6].value)
y_Kociemba.append(sheet.row(num+1)[15].value)
y_OKociemba2.append(sheet.row(num+1)[18].value)
ave_OKociemba = ave_OKociemba + y_OKociemba[num]
ave_DeepCubeA = ave_DeepCubeA + y_DeepCubeA[num]
ave_Kociemba = ave_Kociemba + y_Kociemba[num]
ave_OKociemba2 = ave_OKociemba2 + y_OKociemba2[num]
ave_OKociemba = ave_OKociemba / 1000
ave_DeepCubeA = ave_DeepCubeA / 1000
ave_Kociemba = ave_Kociemba / 1000
ave_OKociemba2 = ave_OKociemba2 / 1000
print("Kociemba = ", str(ave_Kociemba), str(min(y_Kociemba)), str(max(y_Kociemba)))
print("OKociemba = ", str(ave_OKociemba), str(min(y_OKociemba)), str(max(y_OKociemba)))
print("Okociemba2 = ", str(ave_OKociemba2), str(min(y_OKociemba2)), str(max(y_OKociemba2)))
print("DeepCubeA = ", str(ave_DeepCubeA), str(min(y_DeepCubeA)), str(max(y_DeepCubeA)))
# Draw the result
x=[]
for i in range (1000): # Abscissa assignment
x.append((i+1))
plt.figure(figsize=[20,5],dpi= 400)
# Broken line diagram
# plt.plot(x, y_OKociemba2, 'blue', label='O-Kociemba(18,5)')
# plt.plot(x, y_OKociemba, 'red', label='O-Kociemba(20,2)')
# plt.plot(x, y_DeepCubeA, 'green', label='DeepCubeA')
# plt.plot(x, y_Kociemba, 'orange', label='Kociemba')
# Scatter diagram
plt.scatter(x, y_OKociemba2, marker = '.', color = 'blue', s = 15, label = 'O-Kociemba(18,5)')
plt.scatter(x, y_OKociemba, marker = '.',color = 'red', s = 15 ,label = 'O-Kociemba(20,2)')
plt.scatter(x, y_DeepCubeA, marker = '.', color = 'green', s = 15, label = 'DeepCubeA')
plt.scatter(x, y_Kociemba, marker = '.',color = 'orange', s = 15 ,label = 'Kociemba')
# 第一种计数模式下的标题
# plt.title('Comparision of O-Kociemba(18,5), O-Kociemba(20,2), DeepCubeA and Kociemba in Counting Mode 1')
# 第二种计数模式下的标题
plt.title('Comparision of O-Kociemba(18,5), O-Kociemba(20,2), DeepCubeA and Kociemba in Counting Mode 2')
plt.xlabel('Time')
plt.ylabel('Step Number')
plt.xlim(0,1000)
# 第一种计数模式下的范围
# plt.ylim(15,40)
# 第二种技术模式下的范围
plt.ylim(15,45)
plt.legend(loc = 1)
plt.show()
| 32.378125
| 175
| 0.682077
| 1,640
| 10,361
| 4.175
| 0.131098
| 0.028041
| 0.038557
| 0.042062
| 0.785307
| 0.736381
| 0.730539
| 0.711553
| 0.695195
| 0.677815
| 0
| 0.043085
| 0.146511
| 10,361
| 319
| 176
| 32.479624
| 0.7312
| 0.077792
| 0
| 0.065574
| 0
| 0.016393
| 0.107465
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0.196721
| 0
| 0.213115
| 0.065574
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
27ab2d1efc56f57c2bc9166c7883e42a0cb0f9e0
| 177
|
py
|
Python
|
navigator/modules/auth/__init__.py
|
jelitox/navigator-api
|
202efaff10e3d4bea8082dddbb7fad807f90bf11
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2021-09-25T21:35:08.000Z
|
2021-09-25T21:35:08.000Z
|
navigator/modules/auth/__init__.py
|
webclinic017/navigator-api
|
d5844339c3127be77db0ee38aa7b833633e34075
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 92
|
2020-10-17T17:48:02.000Z
|
2022-03-31T10:30:25.000Z
|
navigator/modules/auth/__init__.py
|
webclinic017/navigator-api
|
d5844339c3127be77db0ee38aa7b833633e34075
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
from .decorators import login_required
from .handlers import AuthHandler
from .middleware import auth_middleware
__all__ = ["AuthHandler", "login_required", "auth_middleware"]
| 29.5
| 62
| 0.819209
| 20
| 177
| 6.85
| 0.5
| 0.189781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 177
| 5
| 63
| 35.4
| 0.861635
| 0
| 0
| 0
| 0
| 0
| 0.225989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
27acf7797058adb3f303ad11eaf6a0a97a74870b
| 219
|
py
|
Python
|
externalfunctions.py
|
Dmozze/M3138hw_bot
|
eea79aaa4f42d2990ee85e31fae9a26c4c0c06b3
|
[
"MIT"
] | 4
|
2018-12-28T23:18:34.000Z
|
2019-02-04T15:18:27.000Z
|
externalfunctions.py
|
Dmozze/M3138hw_bot
|
eea79aaa4f42d2990ee85e31fae9a26c4c0c06b3
|
[
"MIT"
] | null | null | null |
externalfunctions.py
|
Dmozze/M3138hw_bot
|
eea79aaa4f42d2990ee85e31fae9a26c4c0c06b3
|
[
"MIT"
] | null | null | null |
import csv
def generate_csv_file(data):
f = open('sheet.csv', 'w')
with open('sheet.csv', 'w') as csvfile:
writer = csv.writer(csvfile)
for row in data:
writer.writerow(row)
| 24.333333
| 44
| 0.56621
| 30
| 219
| 4.066667
| 0.6
| 0.147541
| 0.196721
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.30137
| 219
| 8
| 45
| 27.375
| 0.797386
| 0
| 0
| 0
| 1
| 0
| 0.094787
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
27f020619cc144453400ca99f17bc4a4f2257519
| 146
|
py
|
Python
|
tests/views.py
|
knyghty/django-html5-colorfield
|
f921791dbfbc0f776d2dc339c19492e710d3b44c
|
[
"MIT"
] | 4
|
2015-03-26T18:26:03.000Z
|
2017-05-04T06:39:28.000Z
|
tests/views.py
|
knyghty/django-html5-colorfield
|
f921791dbfbc0f776d2dc339c19492e710d3b44c
|
[
"MIT"
] | null | null | null |
tests/views.py
|
knyghty/django-html5-colorfield
|
f921791dbfbc0f776d2dc339c19492e710d3b44c
|
[
"MIT"
] | null | null | null |
from django.views.generic import CreateView
from .models import Duck
class DuckCreateView(CreateView):
model = Duck
fields = ["color"]
| 16.222222
| 43
| 0.732877
| 17
| 146
| 6.294118
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184932
| 146
| 8
| 44
| 18.25
| 0.89916
| 0
| 0
| 0
| 0
| 0
| 0.034247
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
27f8bd6b5e0a8a11ab9333e8752eea0479ea837f
| 129
|
py
|
Python
|
src/gui/cryptotools/__init__.py
|
vasilypht/Cryptographic-methods-labs
|
0058496f0f4efdfc0e555b8e25e689257c26e878
|
[
"MIT"
] | 1
|
2022-02-15T08:49:30.000Z
|
2022-02-15T08:49:30.000Z
|
src/gui/cryptotools/__init__.py
|
vasilypht/Cryptographic-methods-labs
|
0058496f0f4efdfc0e555b8e25e689257c26e878
|
[
"MIT"
] | null | null | null |
src/gui/cryptotools/__init__.py
|
vasilypht/Cryptographic-methods-labs
|
0058496f0f4efdfc0e555b8e25e689257c26e878
|
[
"MIT"
] | null | null | null |
from typing import Final
from .freqanalysis import FreqAnalysisWidget
WIDGETS_CRYPTOTOOLS: Final = (
FreqAnalysisWidget,
)
| 16.125
| 44
| 0.79845
| 12
| 129
| 8.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155039
| 129
| 7
| 45
| 18.428571
| 0.93578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
fd6b84b8063286994d04e9b7a0ce6a3061fe5d5a
| 273
|
py
|
Python
|
Lab2/task2.py
|
PiotrGrzybowski/ProbabilisticMachineLearning
|
c835a1bdf7ab1b2e58bcf90ae02b7405c9c72977
|
[
"MIT"
] | null | null | null |
Lab2/task2.py
|
PiotrGrzybowski/ProbabilisticMachineLearning
|
c835a1bdf7ab1b2e58bcf90ae02b7405c9c72977
|
[
"MIT"
] | null | null | null |
Lab2/task2.py
|
PiotrGrzybowski/ProbabilisticMachineLearning
|
c835a1bdf7ab1b2e58bcf90ae02b7405c9c72977
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.special
def student_application(tries, successes, p):
return scipy.special.binom(tries, successes) * np.power(p, successes) * np.power((1 - p), tries - successes)
if __name__ == "__main__":
print(student_application(9, 0, 0.01))
| 24.818182
| 112
| 0.710623
| 39
| 273
| 4.717949
| 0.589744
| 0.228261
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025974
| 0.153846
| 273
| 10
| 113
| 27.3
| 0.770563
| 0
| 0
| 0
| 0
| 0
| 0.029304
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0.166667
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
fdc09337f6dc73850442ff0f64527b45418e526c
| 121
|
py
|
Python
|
pybib/__init__.py
|
jgilchrist/pybib
|
44bd9feba984ef08ad675b42affc038dd7c33809
|
[
"BSD-3-Clause"
] | 33
|
2015-01-08T17:49:32.000Z
|
2020-03-06T22:01:09.000Z
|
pybib/__init__.py
|
jgilchrist/pybib
|
44bd9feba984ef08ad675b42affc038dd7c33809
|
[
"BSD-3-Clause"
] | 15
|
2015-02-14T22:53:56.000Z
|
2021-03-01T18:53:30.000Z
|
pybib/__init__.py
|
jgilchrist/pybib
|
44bd9feba984ef08ad675b42affc038dd7c33809
|
[
"BSD-3-Clause"
] | 3
|
2015-02-14T22:13:37.000Z
|
2018-12-13T11:47:54.000Z
|
__version__ = '2.3.2'
from .formatters import format_entry
from .utils import search, get_bibtex
from .main import main
| 20.166667
| 37
| 0.785124
| 19
| 121
| 4.684211
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028846
| 0.140496
| 121
| 5
| 38
| 24.2
| 0.826923
| 0
| 0
| 0
| 0
| 0
| 0.041322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
fdd96c330b72cd44a632452a5937101cb3b8dd43
| 176
|
py
|
Python
|
nightscout/exceptions.py
|
Mryck/py-nightscout
|
9225a3028adaf31dc5b2d465c746df70b1fbb276
|
[
"MIT"
] | 2
|
2020-12-06T11:44:12.000Z
|
2022-03-29T07:52:21.000Z
|
nightscout/exceptions.py
|
Mryck/py-nightscout
|
9225a3028adaf31dc5b2d465c746df70b1fbb276
|
[
"MIT"
] | 16
|
2020-07-27T04:44:39.000Z
|
2021-12-30T04:15:08.000Z
|
nightscout/exceptions.py
|
Mryck/py-nightscout
|
9225a3028adaf31dc5b2d465c746df70b1fbb276
|
[
"MIT"
] | 1
|
2022-03-29T07:52:22.000Z
|
2022-03-29T07:52:22.000Z
|
"""Exceptions for NS."""
class NSError(Exception):
"""Generic NS exception."""
pass
class NSConnectionError(NSError):
"""NS connection exception."""
pass
| 12.571429
| 34
| 0.636364
| 17
| 176
| 6.588235
| 0.588235
| 0.232143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210227
| 176
| 13
| 35
| 13.538462
| 0.805755
| 0.369318
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
fdee3286f21f90afb8e0fc566d8d4b38055b0556
| 50
|
py
|
Python
|
src/fogml/rl/__init__.py
|
tszydlo/FogML
|
c56f5f4f1e8c13aeba7247697cc74d16d2cd2908
|
[
"Apache-2.0"
] | 21
|
2018-04-09T13:13:09.000Z
|
2022-03-24T10:11:49.000Z
|
src/fogml/rl/__init__.py
|
tszydlo/FogML
|
c56f5f4f1e8c13aeba7247697cc74d16d2cd2908
|
[
"Apache-2.0"
] | null | null | null |
src/fogml/rl/__init__.py
|
tszydlo/FogML
|
c56f5f4f1e8c13aeba7247697cc74d16d2cd2908
|
[
"Apache-2.0"
] | 7
|
2019-07-01T01:19:29.000Z
|
2022-03-12T15:28:20.000Z
|
from .qlearning import QStatesIntervals, QLearning
| 50
| 50
| 0.88
| 5
| 50
| 8.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 50
| 1
| 50
| 50
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
fdfd2fdf13c194372d09cd8ef0edb6fcdddf5226
| 227
|
py
|
Python
|
messages/compiler/exceptions.py
|
definitelyNotFBI/utt
|
1695e3a1f81848e19b042cdc4db9cf1d263c26a9
|
[
"Apache-2.0"
] | 340
|
2018-08-27T16:30:45.000Z
|
2022-03-28T14:31:44.000Z
|
messages/compiler/exceptions.py
|
definitelyNotFBI/utt
|
1695e3a1f81848e19b042cdc4db9cf1d263c26a9
|
[
"Apache-2.0"
] | 706
|
2018-09-02T17:50:32.000Z
|
2022-03-31T13:03:15.000Z
|
messages/compiler/exceptions.py
|
glevkovich/concord-bft
|
a1b7b57472f5375230428d16c613a760b33233fa
|
[
"Apache-2.0"
] | 153
|
2018-08-29T05:37:25.000Z
|
2022-03-23T14:08:45.000Z
|
class CmfParseError(Exception):
def __init__(self, parseinfo, msg):
self.message = '({}:{}) Error: {}'.format(
parseinfo.line + 1, parseinfo.pos, msg)
def __str__(self):
return self.message
| 28.375
| 51
| 0.599119
| 24
| 227
| 5.333333
| 0.666667
| 0.171875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005882
| 0.251101
| 227
| 7
| 52
| 32.428571
| 0.747059
| 0
| 0
| 0
| 0
| 0
| 0.07489
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
8befa2f2f8d23b5ec2ca8dc7f2ba97015392b337
| 190
|
py
|
Python
|
botwinick_utils/__init__.py
|
dbotwinick/botwinick_utils
|
50bf626c2a965c3a97231f3977c20d7098268b8a
|
[
"BSD-3-Clause"
] | null | null | null |
botwinick_utils/__init__.py
|
dbotwinick/botwinick_utils
|
50bf626c2a965c3a97231f3977c20d7098268b8a
|
[
"BSD-3-Clause"
] | null | null | null |
botwinick_utils/__init__.py
|
dbotwinick/botwinick_utils
|
50bf626c2a965c3a97231f3977c20d7098268b8a
|
[
"BSD-3-Clause"
] | null | null | null |
# author: Drew Botwinick, Botwinick Innovations
# title: miscellaneous utilities for python 2/3 with minimal dependencies and minimal interconnection between modules
# license: 3-clause BSD
| 47.5
| 117
| 0.821053
| 24
| 190
| 6.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018182
| 0.131579
| 190
| 3
| 118
| 63.333333
| 0.927273
| 0.963158
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8bf1d931fff58790f972b692b75db9e9118f0c5d
| 718
|
py
|
Python
|
mighty/utils/stub.py
|
dizcza/pytorch-mighty
|
942c53b529377c9100bffc2f7f20ec740763e6ae
|
[
"BSD-3-Clause"
] | 1
|
2020-11-14T20:15:07.000Z
|
2020-11-14T20:15:07.000Z
|
mighty/utils/stub.py
|
dizcza/pytorch-mighty
|
942c53b529377c9100bffc2f7f20ec740763e6ae
|
[
"BSD-3-Clause"
] | null | null | null |
mighty/utils/stub.py
|
dizcza/pytorch-mighty
|
942c53b529377c9100bffc2f7f20ec740763e6ae
|
[
"BSD-3-Clause"
] | 2
|
2021-01-15T05:52:53.000Z
|
2021-03-26T17:41:17.000Z
|
"""
Stubs
-----
.. autosummary::
:toctree: toctree/utils/
OptimizerStub
"""
from typing import Optional, Callable
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
class OptimizerStub(Optimizer):
"""
An Optimizer stub for trainers that update model weights in a gradient-free
fashion.
"""
def __init__(self):
self.param_groups = []
def step(self, closure: Optional[Callable[[], float]] = ...):
pass
def state_dict(self) -> dict:
return {}
def load_state_dict(self, state_dict: dict) -> None:
pass
class CriterionStub(nn.Module):
def forward(self, *args, **kwargs):
return torch.Tensor([0.0])
| 17.512195
| 79
| 0.63649
| 85
| 718
| 5.270588
| 0.588235
| 0.060268
| 0.058036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003663
| 0.239554
| 718
| 40
| 80
| 17.95
| 0.81685
| 0.225627
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0.125
| 0.25
| 0.125
| 0.8125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
8bf262d737224eefc7105efefb24d8a773ccbf51
| 21,131
|
py
|
Python
|
python/api/app_identity_pb2_grpc.py
|
ownmfa/api
|
af48e8389d9ecc66b787aa0d261f69848c955294
|
[
"0BSD"
] | 1
|
2021-06-18T15:39:19.000Z
|
2021-06-18T15:39:19.000Z
|
python/api/app_identity_pb2_grpc.py
|
ownmfa/api
|
af48e8389d9ecc66b787aa0d261f69848c955294
|
[
"0BSD"
] | null | null | null |
python/api/app_identity_pb2_grpc.py
|
ownmfa/api
|
af48e8389d9ecc66b787aa0d261f69848c955294
|
[
"0BSD"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from api import app_identity_pb2 as api_dot_app__identity__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class AppIdentityServiceStub(object):
"""AppIdentityService contains functions to query and modify applications and identities.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateApp = channel.unary_unary(
'/ownmfa.api.AppIdentityService/CreateApp',
request_serializer=api_dot_app__identity__pb2.CreateAppRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.App.FromString,
)
self.CreateIdentity = channel.unary_unary(
'/ownmfa.api.AppIdentityService/CreateIdentity',
request_serializer=api_dot_app__identity__pb2.CreateIdentityRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.CreateIdentityResponse.FromString,
)
self.ActivateIdentity = channel.unary_unary(
'/ownmfa.api.AppIdentityService/ActivateIdentity',
request_serializer=api_dot_app__identity__pb2.ActivateIdentityRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.Identity.FromString,
)
self.ChallengeIdentity = channel.unary_unary(
'/ownmfa.api.AppIdentityService/ChallengeIdentity',
request_serializer=api_dot_app__identity__pb2.ChallengeIdentityRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.VerifyIdentity = channel.unary_unary(
'/ownmfa.api.AppIdentityService/VerifyIdentity',
request_serializer=api_dot_app__identity__pb2.VerifyIdentityRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetApp = channel.unary_unary(
'/ownmfa.api.AppIdentityService/GetApp',
request_serializer=api_dot_app__identity__pb2.GetAppRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.App.FromString,
)
self.GetIdentity = channel.unary_unary(
'/ownmfa.api.AppIdentityService/GetIdentity',
request_serializer=api_dot_app__identity__pb2.GetIdentityRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.Identity.FromString,
)
self.UpdateApp = channel.unary_unary(
'/ownmfa.api.AppIdentityService/UpdateApp',
request_serializer=api_dot_app__identity__pb2.UpdateAppRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.App.FromString,
)
self.DeleteApp = channel.unary_unary(
'/ownmfa.api.AppIdentityService/DeleteApp',
request_serializer=api_dot_app__identity__pb2.DeleteAppRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteIdentity = channel.unary_unary(
'/ownmfa.api.AppIdentityService/DeleteIdentity',
request_serializer=api_dot_app__identity__pb2.DeleteIdentityRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListApps = channel.unary_unary(
'/ownmfa.api.AppIdentityService/ListApps',
request_serializer=api_dot_app__identity__pb2.ListAppsRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.ListAppsResponse.FromString,
)
self.ListIdentities = channel.unary_unary(
'/ownmfa.api.AppIdentityService/ListIdentities',
request_serializer=api_dot_app__identity__pb2.ListIdentitiesRequest.SerializeToString,
response_deserializer=api_dot_app__identity__pb2.ListIdentitiesResponse.FromString,
)
class AppIdentityServiceServicer(object):
"""AppIdentityService contains functions to query and modify applications and identities.
"""
def CreateApp(self, request, context):
"""Create an application. Applications contain external user identities and common settings for authentication methods.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateIdentity(self, request, context):
"""Create an identity. Identities are used to challenge, activate, and verify external users.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ActivateIdentity(self, request, context):
"""Activate an identity following a challenge.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ChallengeIdentity(self, request, context):
"""Issue a challenge to an identity.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VerifyIdentity(self, request, context):
"""Verify an identity following a challenge.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetApp(self, request, context):
"""Get an application by ID. Applications contain external user identities and common settings for authentication methods.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIdentity(self, request, context):
"""Get an identity by ID. Identities are used to challenge, activate, and verify external users.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateApp(self, request, context):
"""Update an application. Applications contain external user identities and common settings for authentication methods.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteApp(self, request, context):
"""Delete an application by ID. Applications contain external user identities and common settings for authentication methods.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteIdentity(self, request, context):
"""Delete an identity by ID. Identities are used to challenge, activate, and verify external users.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListApps(self, request, context):
"""List all applications. Applications contain external user identities and common settings for authentication methods.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListIdentities(self, request, context):
"""List identities. Identities are used to challenge, activate, and verify external users.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AppIdentityServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateApp': grpc.unary_unary_rpc_method_handler(
servicer.CreateApp,
request_deserializer=api_dot_app__identity__pb2.CreateAppRequest.FromString,
response_serializer=api_dot_app__identity__pb2.App.SerializeToString,
),
'CreateIdentity': grpc.unary_unary_rpc_method_handler(
servicer.CreateIdentity,
request_deserializer=api_dot_app__identity__pb2.CreateIdentityRequest.FromString,
response_serializer=api_dot_app__identity__pb2.CreateIdentityResponse.SerializeToString,
),
'ActivateIdentity': grpc.unary_unary_rpc_method_handler(
servicer.ActivateIdentity,
request_deserializer=api_dot_app__identity__pb2.ActivateIdentityRequest.FromString,
response_serializer=api_dot_app__identity__pb2.Identity.SerializeToString,
),
'ChallengeIdentity': grpc.unary_unary_rpc_method_handler(
servicer.ChallengeIdentity,
request_deserializer=api_dot_app__identity__pb2.ChallengeIdentityRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'VerifyIdentity': grpc.unary_unary_rpc_method_handler(
servicer.VerifyIdentity,
request_deserializer=api_dot_app__identity__pb2.VerifyIdentityRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetApp': grpc.unary_unary_rpc_method_handler(
servicer.GetApp,
request_deserializer=api_dot_app__identity__pb2.GetAppRequest.FromString,
response_serializer=api_dot_app__identity__pb2.App.SerializeToString,
),
'GetIdentity': grpc.unary_unary_rpc_method_handler(
servicer.GetIdentity,
request_deserializer=api_dot_app__identity__pb2.GetIdentityRequest.FromString,
response_serializer=api_dot_app__identity__pb2.Identity.SerializeToString,
),
'UpdateApp': grpc.unary_unary_rpc_method_handler(
servicer.UpdateApp,
request_deserializer=api_dot_app__identity__pb2.UpdateAppRequest.FromString,
response_serializer=api_dot_app__identity__pb2.App.SerializeToString,
),
'DeleteApp': grpc.unary_unary_rpc_method_handler(
servicer.DeleteApp,
request_deserializer=api_dot_app__identity__pb2.DeleteAppRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteIdentity': grpc.unary_unary_rpc_method_handler(
servicer.DeleteIdentity,
request_deserializer=api_dot_app__identity__pb2.DeleteIdentityRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListApps': grpc.unary_unary_rpc_method_handler(
servicer.ListApps,
request_deserializer=api_dot_app__identity__pb2.ListAppsRequest.FromString,
response_serializer=api_dot_app__identity__pb2.ListAppsResponse.SerializeToString,
),
'ListIdentities': grpc.unary_unary_rpc_method_handler(
servicer.ListIdentities,
request_deserializer=api_dot_app__identity__pb2.ListIdentitiesRequest.FromString,
response_serializer=api_dot_app__identity__pb2.ListIdentitiesResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'ownmfa.api.AppIdentityService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class AppIdentityService(object):
"""AppIdentityService contains functions to query and modify applications and identities.
"""
@staticmethod
def CreateApp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/CreateApp',
api_dot_app__identity__pb2.CreateAppRequest.SerializeToString,
api_dot_app__identity__pb2.App.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateIdentity(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/CreateIdentity',
api_dot_app__identity__pb2.CreateIdentityRequest.SerializeToString,
api_dot_app__identity__pb2.CreateIdentityResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ActivateIdentity(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/ActivateIdentity',
api_dot_app__identity__pb2.ActivateIdentityRequest.SerializeToString,
api_dot_app__identity__pb2.Identity.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ChallengeIdentity(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/ChallengeIdentity',
api_dot_app__identity__pb2.ChallengeIdentityRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def VerifyIdentity(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/VerifyIdentity',
api_dot_app__identity__pb2.VerifyIdentityRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetApp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/GetApp',
api_dot_app__identity__pb2.GetAppRequest.SerializeToString,
api_dot_app__identity__pb2.App.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetIdentity(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/GetIdentity',
api_dot_app__identity__pb2.GetIdentityRequest.SerializeToString,
api_dot_app__identity__pb2.Identity.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateApp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/UpdateApp',
api_dot_app__identity__pb2.UpdateAppRequest.SerializeToString,
api_dot_app__identity__pb2.App.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteApp(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/DeleteApp',
api_dot_app__identity__pb2.DeleteAppRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteIdentity(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/DeleteIdentity',
api_dot_app__identity__pb2.DeleteIdentityRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListApps(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/ListApps',
api_dot_app__identity__pb2.ListAppsRequest.SerializeToString,
api_dot_app__identity__pb2.ListAppsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListIdentities(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ownmfa.api.AppIdentityService/ListIdentities',
api_dot_app__identity__pb2.ListIdentitiesRequest.SerializeToString,
api_dot_app__identity__pb2.ListIdentitiesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 47.378924
| 133
| 0.671904
| 1,937
| 21,131
| 6.96541
| 0.073826
| 0.050548
| 0.064334
| 0.07686
| 0.865698
| 0.853839
| 0.791506
| 0.619626
| 0.596279
| 0.596279
| 0
| 0.004852
| 0.258719
| 21,131
| 445
| 134
| 47.485393
| 0.856486
| 0.080356
| 0
| 0.587131
| 1
| 0
| 0.090472
| 0.054604
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069705
| false
| 0
| 0.008043
| 0.032172
| 0.117962
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8bfe3756dbba1980aba004685ead18cda9752e7e
| 225
|
py
|
Python
|
psyneulink/library/components/mechanisms/modulatory/control/agt/__init__.py
|
JeshuaT/PsyNeuLink
|
912f691028e848659055430f37b6c15273c762f1
|
[
"Apache-2.0"
] | 67
|
2018-01-05T22:18:44.000Z
|
2022-03-27T11:27:31.000Z
|
psyneulink/library/components/mechanisms/modulatory/control/agt/__init__.py
|
JeshuaT/PsyNeuLink
|
912f691028e848659055430f37b6c15273c762f1
|
[
"Apache-2.0"
] | 1,064
|
2017-12-01T18:58:27.000Z
|
2022-03-31T22:22:24.000Z
|
psyneulink/library/components/mechanisms/modulatory/control/agt/__init__.py
|
JeshuaT/PsyNeuLink
|
912f691028e848659055430f37b6c15273c762f1
|
[
"Apache-2.0"
] | 25
|
2017-12-01T20:27:07.000Z
|
2022-03-08T21:49:39.000Z
|
from . import agtcontrolmechanism
from . import lccontrolmechanism
from .agtcontrolmechanism import *
from .lccontrolmechanism import *
__all__ = list(agtcontrolmechanism.__all__)
__all__.extend(lccontrolmechanism.__all__)
| 25
| 43
| 0.84
| 20
| 225
| 8.65
| 0.35
| 0.115607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097778
| 225
| 8
| 44
| 28.125
| 0.852217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e305e71ac55bcc04d2c26d20df46110ac96f7f50
| 3,539
|
py
|
Python
|
venv1/Lib/site-packages/tensorflow/python/estimator/estimator_lib.py
|
Soum-Soum/Tensorflow_Face_Finder
|
fec6c15d2df7012608511ad87f4b55731bf99478
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
venv1/Lib/site-packages/tensorflow/python/estimator/estimator_lib.py
|
Soum-Soum/Tensorflow_Face_Finder
|
fec6c15d2df7012608511ad87f4b55731bf99478
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-05-20T00:58:04.000Z
|
2021-05-20T00:58:04.000Z
|
venv1/Lib/site-packages/tensorflow/python/estimator/estimator_lib.py
|
Soum-Soum/Tensorflow_Face_Finder
|
fec6c15d2df7012608511ad87f4b55731bf99478
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimator: High level tools for working with models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.python.estimator.canned.baseline import BaselineClassifier
from tensorflow.python.estimator.canned.baseline import BaselineRegressor
from tensorflow.python.estimator.canned.dnn import DNNClassifier
from tensorflow.python.estimator.canned.dnn import DNNRegressor
from tensorflow.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedClassifier
from tensorflow.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedRegressor
from tensorflow.python.estimator.canned.linear import LinearClassifier
from tensorflow.python.estimator.canned.linear import LinearRegressor
from tensorflow.python.estimator.canned.parsing_utils import classifier_parse_example_spec
from tensorflow.python.estimator.canned.parsing_utils import regressor_parse_example_spec
from tensorflow.python.estimator.estimator import Estimator
from tensorflow.python.estimator.export import export_lib as export
from tensorflow.python.estimator.exporter import Exporter
from tensorflow.python.estimator.exporter import FinalExporter
from tensorflow.python.estimator.exporter import LatestExporter
from tensorflow.python.estimator.inputs import inputs
from tensorflow.python.estimator.model_fn import EstimatorSpec
from tensorflow.python.estimator.model_fn import ModeKeys
from tensorflow.python.estimator.run_config import RunConfig
from tensorflow.python.estimator.training import EvalSpec
from tensorflow.python.estimator.training import train_and_evaluate
from tensorflow.python.estimator.training import TrainSpec
from tensorflow.python.estimator.warm_starting_util import VocabInfo
from tensorflow.python.estimator.warm_starting_util import WarmStartSettings
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
# Canned Estimators
'BaselineClassifier',
'BaselineRegressor',
'DNNClassifier',
'DNNRegressor',
'DNNLinearCombinedClassifier',
'DNNLinearCombinedRegressor',
'LinearClassifier',
'LinearRegressor',
# I/O
'classifier_parse_example_spec',
'regressor_parse_example_spec',
'inputs',
'export',
# Estimator
'Estimator',
'EstimatorSpec',
'ModeKeys',
'RunConfig',
# Training utilities
'train_and_evaluate',
'EvalSpec',
'TrainSpec',
'Exporter',
'LatestExporter',
'FinalExporter',
# Warm-starting
'WarmStartSettings',
'VocabInfo',
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
| 40.215909
| 95
| 0.7731
| 396
| 3,539
| 6.765152
| 0.358586
| 0.130646
| 0.186637
| 0.259798
| 0.403509
| 0.403509
| 0.307204
| 0.120941
| 0.0433
| 0
| 0
| 0.002622
| 0.137892
| 3,539
| 87
| 96
| 40.678161
| 0.875451
| 0.253744
| 0
| 0
| 0
| 0
| 0.137658
| 0.043513
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.509091
| 0
| 0.509091
| 0.018182
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e306e1019d79e331fecb238d0e52fb89043ce700
| 123
|
py
|
Python
|
ginger/scripts/templates/project_templates/project_name/main/urls.py
|
vivsh/django-ginger
|
d293109becc72845a23f2aeb732ed808a7a67d69
|
[
"MIT"
] | null | null | null |
ginger/scripts/templates/project_templates/project_name/main/urls.py
|
vivsh/django-ginger
|
d293109becc72845a23f2aeb732ed808a7a67d69
|
[
"MIT"
] | null | null | null |
ginger/scripts/templates/project_templates/project_name/main/urls.py
|
vivsh/django-ginger
|
d293109becc72845a23f2aeb732ed808a7a67d69
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url, patterns
from ginger.conf.urls import scan
from . import views
urlpatterns = scan(views)
| 24.6
| 42
| 0.796748
| 19
| 123
| 5.157895
| 0.578947
| 0.163265
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130081
| 123
| 5
| 43
| 24.6
| 0.915888
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e31b0c2855890d4d9313601aa1eb20629426a6c4
| 222
|
py
|
Python
|
Inprocessing/Thomas/Python/core/optimizers/__init__.py
|
maliha93/Fairness-Analysis-Code
|
acf13c6e7993704fc627249fe4ada44d8b616264
|
[
"MIT"
] | 9
|
2020-07-09T07:32:57.000Z
|
2022-02-25T12:21:28.000Z
|
Inprocessing/Thomas/Python/core/optimizers/__init__.py
|
maliha93/Fairness-Analysis-Code
|
acf13c6e7993704fc627249fe4ada44d8b616264
|
[
"MIT"
] | null | null | null |
Inprocessing/Thomas/Python/core/optimizers/__init__.py
|
maliha93/Fairness-Analysis-Code
|
acf13c6e7993704fc627249fe4ada44d8b616264
|
[
"MIT"
] | 3
|
2020-07-09T12:35:09.000Z
|
2021-09-06T13:54:19.000Z
|
class SMLAOptimizer:
pass
from .linear_shatter import LinearShatterBFOptimizer
from .cma import CMAESOptimizer
from .bfgs import BFGSOptimizer
OPTIMIZERS = { opt.cli_key():opt for opt in SMLAOptimizer.__subclasses__() }
| 31.714286
| 76
| 0.81982
| 26
| 222
| 6.769231
| 0.730769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117117
| 222
| 7
| 76
| 31.714286
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
e3298358c9f89977f2494752dcf1c4427b3ddbaa
| 265
|
py
|
Python
|
DA2Lite/data/aug.py
|
nota-github/DA2Lite
|
d7cdcf9d1e409f11ba1c5ece41f8b4c20db44aab
|
[
"MIT"
] | 9
|
2021-04-04T05:02:36.000Z
|
2022-03-22T08:42:23.000Z
|
DA2Lite/data/aug.py
|
nota-github/DA2Lite
|
d7cdcf9d1e409f11ba1c5ece41f8b4c20db44aab
|
[
"MIT"
] | 2
|
2022-03-14T05:20:51.000Z
|
2022-03-15T02:35:36.000Z
|
DA2Lite/data/aug.py
|
nota-github/DA2Lite
|
d7cdcf9d1e409f11ba1c5ece41f8b4c20db44aab
|
[
"MIT"
] | 6
|
2021-04-30T09:26:13.000Z
|
2022-01-27T11:25:50.000Z
|
import torchvision.transforms as transforms
def common(img_size):
return [transforms.RandomCrop(img_size, padding=img_size//8), transforms.RandomHorizontalFlip()]
def normalize(mean, std):
return [transforms.ToTensor(), transforms.Normalize(mean, std)]
| 26.5
| 100
| 0.773585
| 31
| 265
| 6.516129
| 0.548387
| 0.10396
| 0.158416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004237
| 0.109434
| 265
| 9
| 101
| 29.444444
| 0.851695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
e329d98b2447f952cddeaea5b031682c96c737f7
| 9,256
|
py
|
Python
|
qcm/Box1NewCalibNewBoard.py
|
golfit/work-archive
|
bdd37d46fda3fde15ec2164d3335d6b4ed576bd7
|
[
"MIT"
] | null | null | null |
qcm/Box1NewCalibNewBoard.py
|
golfit/work-archive
|
bdd37d46fda3fde15ec2164d3335d6b4ed576bd7
|
[
"MIT"
] | null | null | null |
qcm/Box1NewCalibNewBoard.py
|
golfit/work-archive
|
bdd37d46fda3fde15ec2164d3335d6b4ed576bd7
|
[
"MIT"
] | null | null | null |
#Board #1 in Box #1 blew up (one of the 220 pF caps smoked and left a crater in the board). Needed a new calibration (below).
#This calibration was "in-place", with board being digitized on scope in parallel with digitizer.
#Ted Golfinopoulos, 12 June 2012
from numpy import *
#Spectrogram method
#Frequency points [Hz]
fspS=[39848.197, 44205.496, 48773.631, 53130.930, 57628.786, 62056.364, 66483.941, 70911.519, 75198.538, 79555.837, 83913.135, 88340.713, 92698.011, 97055.310,101412.608,105840.186,110127.205,114484.503,118701.244,122917.984,127345.562,131632.581,135779.043,140136.341,144423.361,148710.380,152856.842,157073.582,161360.602,165507.063,169723.803,174010.823,178157.284,182374.025,186590.765,190737.227,194953.967,199100.429,203246.890,207252.794,211469.534,215615.995,219692.178,223979.197,227914.822,232201.841,236137.466,240354.206,244360.110,248366.013,252442.196,256588.657,260664.839,264600.464,268676.646,272682.550,276758.732,280764.636,284700.260,288776.442,292782.346,296717.970,300653.595,304870.335]
#Transfer function, V_scope_probe/V_voltage_div
#Real part
HspSr=[191.26254, 198.01742, 201.17052, 203.18420, 204.71376, 205.98569, 207.61547, 208.68802, 209.57683, 210.30838, 210.93110, 211.47041, 211.93264, 212.32351, 212.68729, 212.98587, 213.24067, 213.44697, 213.63552, 213.80984, 213.96282, 214.10045, 214.21865, 214.33202, 214.41524, 214.50015, 214.58833, 214.69523, 214.76679, 214.83040, 214.89192, 214.94749, 214.99882, 215.03426, 215.09245, 215.14477, 215.19129, 215.20974, 215.24076, 215.28380, 215.32555, 215.35654, 215.35454, 215.37624, 215.40131, 215.43496, 215.46607, 215.48598, 215.54929, 215.56604, 215.57691, 215.60282, 215.64791, 215.65759, 215.68756, 215.67506, 215.66687, 215.72782, 215.79551, 215.85055, 215.91030, 215.99862, 216.38318, 216.78124]
#Imaginary part
HspSi=[-4.63175, -0.08654, 0.50919, 0.85536, 1.04255, 1.20460, 1.42934, 1.48848, 1.53804, 1.54055, 1.50699, 1.45542, 1.39556, 1.33844, 1.24601, 1.13232, 1.02199, 0.91487, 0.78762, 0.65762, 0.52755, 0.38310, 0.24262, 0.10652, -0.01348, -0.13699, -0.26757, -0.40378, -0.52324, -0.65134, -0.76128, -0.86670, -0.98808, -1.11034, -1.20955, -1.31116, -1.40821, -1.49787, -1.58650, -1.65523, -1.73567, -1.83708, -1.94412, -2.02988, -2.09223, -2.13279, -2.21099, -2.28356, -2.30856, -2.39815, -2.45199, -2.47496, -2.51340, -2.55553, -2.58020, -2.66953, -2.67027, -2.70819, -2.80570, -2.83342, -2.84863, -2.94976, -3.05185, -3.73391]
#Synchronous detection method
#Frequency points [Hz]
fsdS=[41889.73810,44114.57039,47848.93812,52576.46410,55630.48484,58185.95508,61647.89654,65086.55562,68758.19769,71854.76215,73788.48172,76403.06734,78968.42664,80720.61094,82550.04296,84456.04357,86301.06141,88075.81920,89906.79700,92543.52583,93853.35854,95387.11950,96832.77403,98878.78108,100226.63678,102008.50218,103285.18041,104611.46328,106293.18119,107857.89442,109560.34787,111524.48155,112806.30551,114115.44864,115722.77062,116743.02336,118426.11533,119554.64387,121166.87538,122445.24462,123850.19694,125370.84030,126554.69702,128129.17313,129331.58832,130886.41118,132332.29957,133671.36082,135343.03903,136353.42075,137962.71974,139478.48858,140607.37864,141951.44914,143239.40949,144849.99466,145864.05986,147523.93011,148519.18696,150167.16031,151207.41134,152785.08675,153905.70187,155389.27588,156635.32446,157983.32515,159338.73835,160594.89927,162044.90213,163209.85761,164730.23503,165854.74141,167414.51420,168471.84837,170094.20024,171121.67727,172767.10690,173762.37030,175424.37537,176444.83816,178048.29137,179346.71238,180733.12052,182063.50219,183366.07825,184925.72418,186082.94198,187372.12506,188723.76542,190450.61857,191410.03891,192906.03763,194063.58325,195676.80808,196747.68932,198268.84665,199510.00387,201033.14222,202016.42480,203623.69902,204645.08953,205890.00058,207269.38759,208615.85831,209879.89494,211310.13576,212483.10033,214004.51966,215353.57963,216539.09684,217985.63870,219287.61457,220582.77522,222140.55578,223406.64113,224984.75222,226059.38570,227196.52046,228710.19944,229937.76715,231285.77171,232656.30861,233928.78790,234999.81665,236704.32710,237711.37390,239342.39113,240301.91430,241859.54354,242990.45559,244540.31688,245631.91714,246748.06557,248294.94705,249500.34686,250871.93779,251873.82073,253426.66046,254649.47742,256117.61620,257130.02311,258381.56639,259731.31614,261108.56104,262373.94921,263382.18144,264995.69191,266069.77761,267631.85195,268689.51661,270162.44340,271366.20556,272385.42656,274037.76940,275040.20709,276652.34758,277567.45101,279038.23214,280236.04348,281275.67874,282901.01576,283804.38892,285375.89601,286542.91787,288073.72344,289206.56275,290301.33009,291858.28657,292775.29523,294281.02317,295471.24771,296492.75323,298106.86382,299027.04053,300489.57655,301639.96119,303175.37302,304360.18923]
#Transfer function
#Real part
HsdSr=[196.53879, 199.18392, 201.69070, 203.30593, 204.49780, 205.56614, 206.79198, 207.76644, 208.48037, 209.05719, 209.58141, 209.98065, 210.37372, 210.69497, 210.96303, 211.17010, 211.36943, 211.57581, 211.74636, 211.90828, 212.02617, 212.19678, 212.29345, 212.41293, 212.51852, 212.62396, 212.74413, 212.84941, 212.93356, 213.02276, 213.12527, 213.20208, 213.28531, 213.35673, 213.41726, 213.48860, 213.52185, 213.53816, 213.59489, 213.65151, 213.66502, 213.70376, 213.75223, 213.75419, 213.79083, 213.81598, 213.85917, 213.88998, 213.92682, 213.95177, 213.99126, 214.02439, 214.06718, 214.10340, 214.12026, 214.15676, 214.19512, 214.23830, 214.27774, 214.32013, 214.35855, 214.41710, 214.44822, 214.49276, 214.53605, 214.55868, 214.56727, 214.58274, 214.60403, 214.59460, 214.59993, 214.59808, 214.64450, 214.66942, 214.67845, 214.70811, 214.75616, 214.75387, 214.77383, 214.82000, 214.83769, 214.82679, 214.82142, 214.84407, 214.84184, 214.83960, 214.83894, 214.87904, 214.88703, 214.89154, 214.91759, 214.96515, 214.97141, 214.96656, 214.96964, 214.98767, 215.03386, 215.04875, 215.05032, 215.06714, 215.07055, 215.08905, 215.11242, 215.12185, 215.14278, 215.13192, 215.11159, 215.12790, 215.15306, 215.14695, 215.13593, 215.12177, 215.13150, 215.18748, 215.21079, 215.21898, 215.23220, 215.23135, 215.23134, 215.25027, 215.27058, 215.23429, 215.16008, 215.09985, 215.10651, 215.12712, 215.12001, 215.13032, 215.14900, 215.15953, 215.17604, 215.23820, 215.33278, 215.41224, 215.41457, 215.38893, 215.37476, 215.34615, 215.35344, 215.40473, 215.42633, 215.45953, 215.47049, 215.42835, 215.41936, 215.43613, 215.46239, 215.49919, 215.46130, 215.40229, 215.40711, 215.41434, 215.35729, 215.32345, 215.31019, 215.29766, 215.28346, 215.29718, 215.37320, 215.40992, 215.38577, 215.33203, 215.32245, 215.33644, 215.34385, 215.35470, 215.36927, 215.36584, 215.35894, 215.41253, 215.48404, 215.57993, 215.72364, 215.87762, 216.06126, 216.23037, 216.33011, 216.46698]
#Imaginary part
HsdSi=[0.48013, 0.46394, 0.58643, 0.73235, 0.85420, 0.93000, 1.00974, 1.06786, 1.12598, 1.16796, 1.17123, 1.15566, 1.13802, 1.11648, 1.08284, 1.06132, 1.02723, 0.99291, 0.96994, 0.91811, 0.88520, 0.87737, 0.84966, 0.83064, 0.80485, 0.82189, 0.84591, 0.82743, 0.82732, 0.82238, 0.81516, 0.78624, 0.77289, 0.73104, 0.68105, 0.59878, 0.52077, 0.46157, 0.39075, 0.34165, 0.28864, 0.26452, 0.21109, 0.17632, 0.14037, 0.10965, 0.07346, 0.04278, -0.00405, -0.04434, -0.08394, -0.14825, -0.22899, -0.27810, -0.32535, -0.38267, -0.42758, -0.43847, -0.45359, -0.46124, -0.44979, -0.45249, -0.44087, -0.43950, -0.43017, -0.43963, -0.44843, -0.45299, -0.50014, -0.55429, -0.62223, -0.64534, -0.66791, -0.73258, -0.77347, -0.77142, -0.82837, -0.89300, -0.88231, -0.91004, -0.98172, -1.03538, -1.05977, -1.06772, -1.13246, -1.18273, -1.19142, -1.23486, -1.31190, -1.36214, -1.36834, -1.39656, -1.42519, -1.49098, -1.50352, -1.52363, -1.54733, -1.56802, -1.59199, -1.59652, -1.63162, -1.66058, -1.66215, -1.68052, -1.72759, -1.76353, -1.77798, -1.81904, -1.80727, -1.83267, -1.84658, -1.88831, -1.91316, -1.89597, -1.89608, -1.94648, -1.98621, -2.01880, -2.07623, -2.13167, -2.16980, -2.21415, -2.32681, -2.38085, -2.42770, -2.43285, -2.41746, -2.41513, -2.40255, -2.35804, -2.33492, -2.28133, -2.19421, -2.18898, -2.22425, -2.28833, -2.35392, -2.35043, -2.34957, -2.33023, -2.34171, -2.36791, -2.42275, -2.50625, -2.50818, -2.49049, -2.47563, -2.50965, -2.55136, -2.59632, -2.63905, -2.68625, -2.73199, -2.71832, -2.72033, -2.74107, -2.77608, -2.78321, -2.75247, -2.74547, -2.73488, -2.79893, -2.82934, -2.79028, -2.77209, -2.74381, -2.73419, -2.74010, -2.81687, -2.87704, -2.92426, -2.87660, -2.87745, -2.92901, -3.01253, -3.11826, -3.16780, -3.46678]
#Complex transfer function - multiply the Fourier transform of the output voltage of the capacitive voltage divider by this and then inverse Fourier transform to construct the correct output voltage.
HsdS=double(HsdSr) + 1j*double(HsdSi)
HspS=double(HspSr) + 1j*double(HspSi)
#Save data to numpy format.
save('fsdSBox1', fsdS)
save('HsdSBox1', HsdS)
save('fspSBox1', fspS)
save('HspSBox1', HspS)
| 243.578947
| 2,296
| 0.713807
| 1,594
| 9,256
| 4.142409
| 0.672522
| 0.002272
| 0.006361
| 0.006967
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.709596
| 0.101556
| 9,256
| 37
| 2,297
| 250.162162
| 0.084416
| 0.072494
| 0
| 0
| 0
| 0
| 0.003735
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e334049dd16d807bd237df0ebbd6812030c27137
| 191
|
py
|
Python
|
list3/task1.py
|
luk9400/python-kurs
|
8978176be1c28d886707f1150a8843720bd83929
|
[
"MIT"
] | null | null | null |
list3/task1.py
|
luk9400/python-kurs
|
8978176be1c28d886707f1150a8843720bd83929
|
[
"MIT"
] | null | null | null |
list3/task1.py
|
luk9400/python-kurs
|
8978176be1c28d886707f1150a8843720bd83929
|
[
"MIT"
] | null | null | null |
matrix = ["1.1 2.2 3.3", "4.4 5.5 6.6", "7.7 8.8 9.9"]
def transpose(matrix):
return [" ".join(row.split()[i] for row in matrix) for i in range(len(matrix))]
print(transpose(matrix))
| 21.222222
| 83
| 0.602094
| 40
| 191
| 2.875
| 0.55
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 0.172775
| 191
| 8
| 84
| 23.875
| 0.613924
| 0
| 0
| 0
| 0
| 0
| 0.17801
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.25
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
e34f1bb1cd75e1cb148604f221f320aa87939cc1
| 4,765
|
py
|
Python
|
talon/transformer.py
|
MystPi/talon
|
70d6ece15cb6a5c9989b7cfd5e0a077a1d97e12d
|
[
"MIT"
] | 4
|
2022-01-10T00:24:52.000Z
|
2022-01-26T18:37:54.000Z
|
talon/transformer.py
|
MystPi/talon
|
70d6ece15cb6a5c9989b7cfd5e0a077a1d97e12d
|
[
"MIT"
] | 1
|
2022-01-11T16:46:30.000Z
|
2022-01-11T17:30:07.000Z
|
talon/transformer.py
|
MystPi/talon
|
70d6ece15cb6a5c9989b7cfd5e0a077a1d97e12d
|
[
"MIT"
] | null | null | null |
import lark
from . import nodes
class Transformer(lark.Transformer):
def start(self, args):
return nodes.Instructions(args)
def codeblock(self, args):
return nodes.Instructions(args)
def num(self, args):
num = args[0]
if '.' in num:
num = float(num)
else:
num = int(num)
return nodes.Primitive(num)
def string(self, args):
s = args[0][1:-1]
s = bytes(s, "utf-8").decode("unicode_escape")
return nodes.Primitive(s)
def var(self, args):
return nodes.Identifier(args[0])
def true(self, args):
return nodes.Primitive(True)
def false(self, args):
return nodes.Primitive(False)
def list(self, args):
# Lists are simply an Instructions object. When evaled, an
# Instructions object will return a list of all of its
# evaluated values.
return nodes.List(nodes.Instructions(args))
def list_access(self, args):
return nodes.ListAccess(args[0], args[1])
def list_slice(self, args):
return nodes.ListSlice(args[0], args[1], args[2])
def list_assign(self, args):
return nodes.ListAssign(args[0], args[1], args[2], args[3])
def range_incl(self, args):
return nodes.Range(args[0], args[1], True)
def range_excl(self, args):
return nodes.Range(args[0], args[1], False)
def neg(self, args):
return nodes.UnaryOp('-', args[0])
def abs(self, args):
return nodes.UnaryOp('+', args[0])
def not_(self, args):
return nodes.UnaryOp('!', args[0])
def add(self, args):
return nodes.BinOp('+', args[0], args[1])
def sub(self, args):
return nodes.BinOp('-', args[0], args[1])
def mul(self, args):
return nodes.BinOp('*', args[0], args[1])
def div(self, args):
return nodes.BinOp('/', args[0], args[1])
def mod(self, args):
return nodes.BinOp('%', args[0], args[1])
def pow(self, args):
return nodes.BinOp('^', args[0], args[1])
def eq(self, args):
return nodes.BinOp('==', args[0], args[1])
def neq(self, args):
return nodes.BinOp('!=', args[0], args[1])
def lt(self, args):
return nodes.BinOp('<', args[0], args[1])
def gt(self, args):
return nodes.BinOp('>', args[0], args[1])
def lteq(self, args):
return nodes.BinOp('<=', args[0], args[1])
def gteq(self, args):
return nodes.BinOp('>=', args[0], args[1])
def and_(self, args):
return nodes.BinOp('&&', args[0], args[1])
def or_(self, args):
return nodes.BinOp('||', args[0], args[1])
def assign_var(self, args):
# Variable assignments don't have to have a value, like in `this x`.
# If they don't, the value of the assignment is None.
if args[1] is None:
args[1] = nodes.Primitive(None)
return nodes.Assignment(nodes.Identifier(args[0]), args[1], new=True)
def assign_value(self, args):
if args[1] == '=':
return nodes.Assignment(nodes.Identifier(args[0]), args[2])
else:
return nodes.CompOp(nodes.Identifier(args[0]), args[1].value, args[2])
def if_(self, args):
return nodes.If(args[0], args[1])
def if_else(self, args):
return nodes.If(args[0], args[1], args[2])
def for_(self, args):
return nodes.For(nodes.Identifier(args[0].value), args[1], args[2])
def while_(self, args):
return nodes.While(args[0], args[1])
def break_(self, args):
return nodes.BreakInstruction()
def return_(self, args):
return nodes.ReturnInstruction(args[0])
def fun_def(self, args):
# Create and assign a function to a variable.
return nodes.Assignment(nodes.Identifier(args[0].value), nodes.Function(args[1], args[2]), new=True)
def anon_fun(self, args):
return nodes.Function(args[0], args[1])
def lambda_(self, args):
# Lambdas are the almost same as anonymous functions, their one difference
# being they have a single expression as their body instead of a codeblock.
# This means that we have to create a 'mini codeblock' with a single
# return instruction as the body.
return nodes.Function(args[0], nodes.Instructions([nodes.ReturnInstruction(args[1])]))
def fun_args(self, args):
# args[0] can be None, meaning there are no arguments
if args[0] is None:
temp = None
else:
temp = [nodes.Identifier(arg.value) for arg in args]
return nodes.Instructions(temp)
def fun_call(self, args):
return nodes.FunctionCall(args[0], nodes.Instructions(args[1:]))
| 29.78125
| 108
| 0.591396
| 668
| 4,765
| 4.185629
| 0.203593
| 0.173104
| 0.193133
| 0.23784
| 0.432046
| 0.366237
| 0.351574
| 0.300429
| 0.23176
| 0.185265
| 0
| 0.023309
| 0.2617
| 4,765
| 160
| 109
| 29.78125
| 0.771461
| 0.1234
| 0
| 0.047619
| 0
| 0
| 0.010562
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.409524
| false
| 0
| 0.019048
| 0.361905
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
e357f0e433a867db75a8d342b3d87467a00210ba
| 763
|
py
|
Python
|
Date module/date.py
|
Jayson7/mumswhocode-cohort2-python-files
|
1d84e121a63bd0072acabd14859c849a02919ab6
|
[
"MIT"
] | null | null | null |
Date module/date.py
|
Jayson7/mumswhocode-cohort2-python-files
|
1d84e121a63bd0072acabd14859c849a02919ab6
|
[
"MIT"
] | null | null | null |
Date module/date.py
|
Jayson7/mumswhocode-cohort2-python-files
|
1d84e121a63bd0072acabd14859c849a02919ab6
|
[
"MIT"
] | 1
|
2022-01-27T11:35:46.000Z
|
2022-01-27T11:35:46.000Z
|
'''
the datetime module isused to gather up things that has to do with date and time in python.
'''
# datetime
import datetime
from datetime import time , date
from datetime import datetime as d
#full datetime output
# print(d.now())
# print(dir(d.today))
# print(d.today())
# # year
# name = 'myriam'
# print(f" the prof in the class is {name} ")
# print(datetime.datetime.now().year)
# print(datetime.datetime.now().strftime("%y"))
#%a
#%A
#%w
#%d
#%b
#%B
#%m
#y
# print(datetime.datetime.now().strftime("%B") )
#%p AM/PM
# # only date
# print(date.today())
# # only time using strif time
strTime = datetime.datetime.now().strftime("%H:%M:%S")
print(strTime)
# strDate = datetime.datetime.now().strftime("%d:%m:%y")
# print(strDate)
| 17.340909
| 91
| 0.647444
| 115
| 763
| 4.295652
| 0.434783
| 0.161943
| 0.192308
| 0.218623
| 0.129555
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17038
| 763
| 43
| 92
| 17.744186
| 0.780411
| 0.693316
| 0
| 0
| 0
| 0
| 0.040404
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0.2
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e3793eacc5fa4b509034c50af8de4f55f29bf6a1
| 147
|
py
|
Python
|
reddit2telegram/channels/r_mildlyvagina/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 187
|
2016-09-20T09:15:54.000Z
|
2022-03-29T12:22:33.000Z
|
reddit2telegram/channels/r_mildlyvagina/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 84
|
2016-09-22T14:25:07.000Z
|
2022-03-19T01:26:17.000Z
|
reddit2telegram/channels/r_mildlyvagina/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 172
|
2016-09-21T15:39:39.000Z
|
2022-03-16T15:15:58.000Z
|
#encoding:utf-8
subreddit = 'mildlyvagina'
t_channel = '@r_mildlyvagina'
def send_post(submission, r2t):
return r2t.send_simple(submission)
| 16.333333
| 38
| 0.755102
| 19
| 147
| 5.631579
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023438
| 0.129252
| 147
| 8
| 39
| 18.375
| 0.8125
| 0.095238
| 0
| 0
| 0
| 0
| 0.204545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
8b6f0c1fbe183cf73cf56e464058b503cbcc90ba
| 160
|
py
|
Python
|
show_time.py
|
jinglong485/Echeveria-project
|
a8a1c935d196e948d0d5b5b8080c492cb8bc738c
|
[
"BSD-3-Clause"
] | null | null | null |
show_time.py
|
jinglong485/Echeveria-project
|
a8a1c935d196e948d0d5b5b8080c492cb8bc738c
|
[
"BSD-3-Clause"
] | null | null | null |
show_time.py
|
jinglong485/Echeveria-project
|
a8a1c935d196e948d0d5b5b8080c492cb8bc738c
|
[
"BSD-3-Clause"
] | null | null | null |
import time
while True:
print(time.strftime('%H%M%S',time.localtime()))
print(time.strftime("%Y-%m-%d_%H:%M:%S",time.localtime()))
time.sleep(1)
| 32
| 63
| 0.61875
| 26
| 160
| 3.769231
| 0.538462
| 0.183673
| 0.346939
| 0.142857
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007246
| 0.1375
| 160
| 5
| 64
| 32
| 0.702899
| 0
| 0
| 0
| 0
| 0
| 0.146497
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8b71023979301f638fb0a9db02292f0243947bf8
| 107
|
py
|
Python
|
Kattis/sorttwonumbers/anurag_solution.py
|
rawat9/dsa-prep
|
9d5fec1b4ec1268e2c526c62b989b2c136b4e7f5
|
[
"MIT"
] | 1
|
2022-02-20T00:28:42.000Z
|
2022-02-20T00:28:42.000Z
|
Kattis/sorttwonumbers/anurag_solution.py
|
rawat9/dsa-prep
|
9d5fec1b4ec1268e2c526c62b989b2c136b4e7f5
|
[
"MIT"
] | null | null | null |
Kattis/sorttwonumbers/anurag_solution.py
|
rawat9/dsa-prep
|
9d5fec1b4ec1268e2c526c62b989b2c136b4e7f5
|
[
"MIT"
] | null | null | null |
num1, num2 = list(map(int, input().split()))
if num1 < num2:
print(num1, num2)
else:
print(num2, num1)
| 15.285714
| 44
| 0.635514
| 17
| 107
| 4
| 0.588235
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 0.168224
| 107
| 6
| 45
| 17.833333
| 0.674157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8b74e01dfe6eed956b0930b8019c38cfd240fffd
| 533
|
py
|
Python
|
fiepipeunreal4/shell/installs.py
|
leith-bartrich/fiellc
|
1c02690538f442dede5d6afc8926355cb2ac838e
|
[
"MIT"
] | null | null | null |
fiepipeunreal4/shell/installs.py
|
leith-bartrich/fiellc
|
1c02690538f442dede5d6afc8926355cb2ac838e
|
[
"MIT"
] | null | null | null |
fiepipeunreal4/shell/installs.py
|
leith-bartrich/fiellc
|
1c02690538f442dede5d6afc8926355cb2ac838e
|
[
"MIT"
] | null | null | null |
from fiepipedesktoplib.assetaspect.shell.simpleapplication import AbstractSimpleApplicationCommand
from fiepipedesktoplib.shells.ui.abspath_input_ui import AbspathInputDefaultUI
from fiepipeunreal4.data.installs import Unreal4Install
from fiepipeunreal4.routines.installs import Unreal4InstallsRoutines
class Unreal4InstallsCommand(AbstractSimpleApplicationCommand[Unreal4Install]):
def get_routines(self) -> Unreal4InstallsRoutines:
return Unreal4InstallsRoutines(self.get_feedback_ui(), AbspathInputDefaultUI(self))
| 48.454545
| 98
| 0.87242
| 45
| 533
| 10.222222
| 0.555556
| 0.091304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016227
| 0.075047
| 533
| 10
| 99
| 53.3
| 0.916836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.571429
| 0.142857
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
8b7ce55f0dca928c4c67a04e6c155dc1965e299e
| 722
|
py
|
Python
|
nahamconCTF/crypto/Ooo-la-la/solve.py
|
deut-erium/WriteUps
|
36b4193f5fab9f95527a48626ecba631d5a03796
|
[
"MIT"
] | 11
|
2020-06-06T05:28:27.000Z
|
2022-01-09T00:42:49.000Z
|
2020/nahamconCTF/crypto/Ooo-la-la/solve.py
|
CSEA-IITB/WriteUps
|
46e7f36b0c4ef182cbaf375fd10fda954b6667a0
|
[
"MIT"
] | 1
|
2020-09-06T18:19:55.000Z
|
2020-09-06T18:19:55.000Z
|
nahamconCTF/crypto/Ooo-la-la/solve.py
|
deut-erium/WriteUps
|
36b4193f5fab9f95527a48626ecba631d5a03796
|
[
"MIT"
] | 6
|
2020-06-06T05:36:43.000Z
|
2021-08-11T10:17:18.000Z
|
from gmpy2 import invert
p = 1830213987675567884451892843232991595746198390911664175679946063194531096037459873211879206428207
q = 1830213987675567884451892843232991595746198390911664175679946063194531096037459873211879206428213
N = 3349683240683303752040100187123245076775802838668125325785318315004398778586538866210198083573169673444543518654385038484177110828274648967185831623610409867689938609495858551308025785883804091
e = 65537
c = 87760575554266991015431110922576261532159376718765701749513766666239189012106797683148334771446801021047078003121816710825033894805743112580942399985961509685534309879621205633997976721084983
d = invert(e, (p-1)*(q-1))
pt = pow(c,d,N)
print(bytes.fromhex(hex(pt)[2:]).decode())
| 55.538462
| 197
| 0.911357
| 33
| 722
| 19.939394
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.848266
| 0.041551
| 722
| 12
| 198
| 60.166667
| 0.102601
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8b7e1a93b948544923d82d4f51dea8cd375d06fc
| 103
|
py
|
Python
|
fruit/mixin/meta.py
|
felko/fruit
|
4768fd333ac3b7c0bd6d339304b23e20e312d2d1
|
[
"MIT"
] | 4
|
2017-06-14T14:50:05.000Z
|
2019-07-29T16:51:24.000Z
|
fruit/mixin/meta.py
|
felko/fruit
|
4768fd333ac3b7c0bd6d339304b23e20e312d2d1
|
[
"MIT"
] | null | null | null |
fruit/mixin/meta.py
|
felko/fruit
|
4768fd333ac3b7c0bd6d339304b23e20e312d2d1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.4
# coding: utf-8
class MixinMeta(type):
"""
Abstract mixin metaclass.
"""
| 11.444444
| 26
| 0.650485
| 14
| 103
| 4.785714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034884
| 0.165049
| 103
| 8
| 27
| 12.875
| 0.744186
| 0.61165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
8b88c69f15ace5079d5737efe3b92847fbe1a9fa
| 69
|
py
|
Python
|
doc2.py
|
semilleroecusta/machine_learning
|
0c943d39c8eff9a63dba39a85518ff0e4fd4c06c
|
[
"MIT"
] | null | null | null |
doc2.py
|
semilleroecusta/machine_learning
|
0c943d39c8eff9a63dba39a85518ff0e4fd4c06c
|
[
"MIT"
] | null | null | null |
doc2.py
|
semilleroecusta/machine_learning
|
0c943d39c8eff9a63dba39a85518ff0e4fd4c06c
|
[
"MIT"
] | null | null | null |
def main():
print('Hola, mundo')
if __name__ =='__main__':
main()
| 11.5
| 25
| 0.623188
| 9
| 69
| 3.888889
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15942
| 69
| 5
| 26
| 13.8
| 0.603448
| 0
| 0
| 0
| 0
| 0
| 0.275362
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8b8fce29dcf3c31dce7a8249c3922bcf5b2376c9
| 127
|
py
|
Python
|
utils/tkevents.py
|
recharge-sp/arctools
|
e345d45def76c22dacfabccfe5b44340d9bf7064
|
[
"BSD-2-Clause"
] | null | null | null |
utils/tkevents.py
|
recharge-sp/arctools
|
e345d45def76c22dacfabccfe5b44340d9bf7064
|
[
"BSD-2-Clause"
] | null | null | null |
utils/tkevents.py
|
recharge-sp/arctools
|
e345d45def76c22dacfabccfe5b44340d9bf7064
|
[
"BSD-2-Clause"
] | null | null | null |
def selectall(event):
event.widget.tag_add("sel","1.0","end")
event.widget.mark_set("insert", "end")
return 'break'
| 31.75
| 43
| 0.645669
| 19
| 127
| 4.210526
| 0.789474
| 0.275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018349
| 0.141732
| 127
| 4
| 44
| 31.75
| 0.715596
| 0
| 0
| 0
| 0
| 0
| 0.179688
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8bbc340314ed70d0f724236bdd0776aeb43c0df5
| 85
|
py
|
Python
|
tccli/services/wav/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 47
|
2018-05-31T11:26:25.000Z
|
2022-03-08T02:12:45.000Z
|
tccli/services/wav/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 23
|
2018-06-14T10:46:30.000Z
|
2022-02-28T02:53:09.000Z
|
tccli/services/wav/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 22
|
2018-10-22T09:49:45.000Z
|
2022-03-30T08:06:04.000Z
|
# -*- coding: utf-8 -*-
from tccli.services.wav.wav_client import action_caller
| 21.25
| 55
| 0.694118
| 12
| 85
| 4.75
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.164706
| 85
| 4
| 56
| 21.25
| 0.788732
| 0.247059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
8bddb02d6b41bf56115731bafd37afb1223a8e55
| 844
|
py
|
Python
|
musicdl/modules/utils/misc.py
|
Cyouno/Music-Downloader
|
c1d2b3b8a5e6f8300cd62ba861209b61d243c2c2
|
[
"MIT"
] | null | null | null |
musicdl/modules/utils/misc.py
|
Cyouno/Music-Downloader
|
c1d2b3b8a5e6f8300cd62ba861209b61d243c2c2
|
[
"MIT"
] | null | null | null |
musicdl/modules/utils/misc.py
|
Cyouno/Music-Downloader
|
c1d2b3b8a5e6f8300cd62ba861209b61d243c2c2
|
[
"MIT"
] | 1
|
2020-04-27T22:54:17.000Z
|
2020-04-27T22:54:17.000Z
|
'''
Function:
一些工具函数
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
import os
import json
'''检查文件夹是否存在'''
def checkDir(dirpath):
if not os.path.exists(dirpath):
os.mkdir(dirpath)
return False
return True
'''导入配置文件'''
def loadConfig(filepath='config.json'):
f = open(filepath, 'r', encoding='utf-8')
return json.load(f)
'''清楚可能出问题的字符'''
def filterBadCharacter(string):
string = string.replace('<em>', '').replace('</em>', '') \
.replace('<', '').replace('>', '').replace('\\', '').replace('/', '') \
.replace('?', '').replace(':', '').replace('"', '').replace(':', '') \
.replace('|', '').replace('?', '').replace('*', '')
return string.strip().encode('utf-8', 'ignore').decode('utf-8')
'''秒转时分秒'''
def seconds2hms(seconds):
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return '%02d:%02d:%02d' % (h, m, s)
| 21.1
| 78
| 0.57346
| 96
| 844
| 5.041667
| 0.520833
| 0.289256
| 0.390496
| 0.46281
| 0.159091
| 0.159091
| 0.159091
| 0.159091
| 0.159091
| 0.159091
| 0
| 0.019444
| 0.146919
| 844
| 40
| 79
| 21.1
| 0.652778
| 0.088863
| 0
| 0
| 0
| 0
| 0.093278
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.1
| 0
| 0.55
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
476ad9cbe2150dc779979d67248c0916b5140505
| 261
|
py
|
Python
|
src/exchange.py
|
colticol/CryptoCurrencyProfit
|
99fd29b828c7f5a21b02051249e66a90ba2ecf1f
|
[
"BSD-2-Clause"
] | null | null | null |
src/exchange.py
|
colticol/CryptoCurrencyProfit
|
99fd29b828c7f5a21b02051249e66a90ba2ecf1f
|
[
"BSD-2-Clause"
] | 3
|
2018-01-11T07:34:50.000Z
|
2018-07-24T13:48:24.000Z
|
src/exchange.py
|
colticol/CryptoCurrencyTax
|
99fd29b828c7f5a21b02051249e66a90ba2ecf1f
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
class Exchange(object):
"""docstring for Exchange"""
def __init__(self):
self.trades = False
def getTrades(self):
return self.trades
# @abstractmethod
def calcTotal(self, controller):
pass
| 17.4
| 36
| 0.59387
| 27
| 261
| 5.592593
| 0.703704
| 0.13245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005319
| 0.279693
| 261
| 14
| 37
| 18.642857
| 0.797872
| 0.233716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 4
|
478abcd28d11f947fcefcf86d195fb17f67c2e00
| 29
|
py
|
Python
|
src/pysiaalarm/data/__init__.py
|
nestorix1343/pysiaalarm
|
f40f1b429ec43c70dc1a5b47191a50d8db04dd0f
|
[
"MIT"
] | 12
|
2020-05-22T20:32:29.000Z
|
2022-03-06T23:42:16.000Z
|
src/pysiaalarm/data/__init__.py
|
nestorix1343/pysiaalarm
|
f40f1b429ec43c70dc1a5b47191a50d8db04dd0f
|
[
"MIT"
] | 18
|
2020-09-30T17:20:29.000Z
|
2022-02-21T12:54:43.000Z
|
src/pysiaalarm/data/__init__.py
|
nestorix1343/pysiaalarm
|
f40f1b429ec43c70dc1a5b47191a50d8db04dd0f
|
[
"MIT"
] | 10
|
2020-06-14T11:18:40.000Z
|
2022-01-06T13:53:36.000Z
|
"""Data helpers and files."""
| 29
| 29
| 0.655172
| 4
| 29
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.730769
| 0.793103
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
478b08805e7ae58d04cf7079c7a1a24d04dfbfbc
| 2,645
|
py
|
Python
|
observatory/models/user.py
|
spookey/observatory
|
be5cc92f53f12e6341e7e3040f26360e54cfdf7d
|
[
"MIT"
] | null | null | null |
observatory/models/user.py
|
spookey/observatory
|
be5cc92f53f12e6341e7e3040f26360e54cfdf7d
|
[
"MIT"
] | 1
|
2020-03-28T09:51:56.000Z
|
2020-03-28T09:51:56.000Z
|
observatory/models/user.py
|
spookey/dz_stats_page
|
be5cc92f53f12e6341e7e3040f26360e54cfdf7d
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from logging import getLogger
from flask_login import UserMixin
from observatory.database import TXT_LEN_SHORT, CreatedMixin, Model
from observatory.lib.clock import (
epoch_milliseconds,
epoch_seconds,
time_format,
)
from observatory.models.point import Point
from observatory.start.extensions import BCRYPT, DB
LOG = getLogger(__name__)
# pylint: disable=no-member
# pylint: disable=too-many-ancestors
class User(UserMixin, CreatedMixin, Model):
username = DB.Column(
DB.String(length=TXT_LEN_SHORT), unique=True, nullable=False
)
pw_hash = DB.Column(DB.LargeBinary(length=TXT_LEN_SHORT), nullable=True)
active = DB.Column(DB.Boolean(), nullable=False, default=True)
last_login = DB.Column(DB.DateTime(), nullable=True)
points = DB.relationship(
'Point',
backref=DB.backref('user', lazy=True),
order_by='Point.created.desc()',
cascade='all,delete-orphan',
lazy=True,
)
def __init__(self, username, password, **kwargs):
Model.__init__(self, username=username, **kwargs)
self.set_password(password, _commit=False)
@classmethod
def by_username(cls, username):
return cls.query.filter(cls.username == username).first()
def get_id(self):
'''required by flask-login'''
return str(self.prime)
@property
def is_active(self):
'''required by flask-login'''
return self.active
@property
def last_login_fmt(self):
return time_format(self.last_login)
@property
def last_login_epoch(self):
return epoch_seconds(self.last_login)
@property
def last_login_epoch_ms(self):
return epoch_milliseconds(self.last_login)
def check_password(self, plain):
if plain is None:
return False
return BCRYPT.check_password_hash(self.pw_hash, plain)
@staticmethod
def hash_password(plain):
if plain is not None:
return BCRYPT.generate_password_hash(plain)
return None
def set_password(self, plain, _commit=True):
return self.update(pw_hash=self.hash_password(plain), _commit=_commit)
def refresh(self, _commit=True):
LOG.info('refreshing last_login for "%s"', self.username)
return self.update(last_login=datetime.utcnow(), _commit=_commit)
@property
def query_points(self):
return Point.query_sorted(query=Point.query.with_parent(self))
@property
def length(self):
return self.query_points.count()
@property
def latest(self):
return self.query_points.first()
| 27.552083
| 78
| 0.681664
| 331
| 2,645
| 5.247734
| 0.323263
| 0.046632
| 0.023028
| 0.034542
| 0.107081
| 0.078296
| 0.043754
| 0.043754
| 0
| 0
| 0
| 0
| 0.215123
| 2,645
| 95
| 79
| 27.842105
| 0.836705
| 0.04121
| 0
| 0.101449
| 0
| 0
| 0.030111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.202899
| false
| 0.115942
| 0.101449
| 0.115942
| 0.608696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
47d2d76f28eb8c5fddbeeb16b77bc8ba6aa56e5d
| 655
|
py
|
Python
|
2-APIs-and-Data-Visualization/starter/username_crawler.py
|
koltpython/python-workshops
|
8b7f5ce1bad0380d90a65a0e033d0acefd7ddcde
|
[
"MIT"
] | 3
|
2019-12-09T17:23:43.000Z
|
2021-12-15T09:10:44.000Z
|
2-APIs-and-Data-Visualization/starter/username_crawler.py
|
koltpython/python-workshops
|
8b7f5ce1bad0380d90a65a0e033d0acefd7ddcde
|
[
"MIT"
] | 1
|
2020-02-03T18:01:20.000Z
|
2020-02-03T18:27:43.000Z
|
2-APIs-and-Data-Visualization/starter/username_crawler.py
|
koltpython/python-workshops
|
8b7f5ce1bad0380d90a65a0e033d0acefd7ddcde
|
[
"MIT"
] | null | null | null |
import requests
import urllib.parse
def get_usernames_from_names(name_file):
usernames = []
# TODO: search for usernames and return the list containing the first search results
# Hint: Try this url in your browser
search_url = 'https://www.instagram.com/web/search/topsearch/?query=Ahmet%80Uysal'
return usernames
def write_usernames_to_file(usernames):
# We want to store intermediate results in a file
# TODO: write given usernames to a file named usernames.txt
pass
def read_usernames_from_file(username_file):
usernames = []
# TODO: read usernames from the file with given name
return usernames
| 22.586207
| 88
| 0.735878
| 92
| 655
| 5.108696
| 0.554348
| 0.082979
| 0.07234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003824
| 0.201527
| 655
| 28
| 89
| 23.392857
| 0.894837
| 0.418321
| 0
| 0.363636
| 0
| 0
| 0.178667
| 0
| 0
| 0
| 0
| 0.035714
| 0
| 1
| 0.272727
| false
| 0.090909
| 0.181818
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
47ec36d154ece57c44890be4c50d5d9156c37ce7
| 522
|
py
|
Python
|
ex8.py
|
jpch89/lpthw
|
dc492e5e88afa309413756a839312dd6148b1144
|
[
"MIT"
] | 4
|
2019-04-04T02:35:52.000Z
|
2021-11-16T05:44:42.000Z
|
ex8.py
|
jpch89/lpthw
|
dc492e5e88afa309413756a839312dd6148b1144
|
[
"MIT"
] | null | null | null |
ex8.py
|
jpch89/lpthw
|
dc492e5e88afa309413756a839312dd6148b1144
|
[
"MIT"
] | 7
|
2020-12-02T10:01:14.000Z
|
2022-02-03T07:05:51.000Z
|
# -*- coding: utf-8 -*-
# @Author: jpch89
# @Email: jpch89@outlook.com
# @Date: 2018-06-27 23:59:30
# @Last Modified by: jpch89
# @Last Modified time: 2018-06-28 00:02:38
formatter = '{} {} {} {}'
print(formatter.format(1, 2, 3, 4))
print(formatter.format("one", "two", "three", "four"))
print(formatter.format(True, False, False, True))
print(formatter.format(formatter, formatter, formatter, formatter))
print(formatter.format(
"Try your",
"Own text here",
"Maybe a peom",
"Or a song about fear"
))
| 29
| 67
| 0.645594
| 74
| 522
| 4.554054
| 0.648649
| 0.207715
| 0.296736
| 0.172107
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089041
| 0.16092
| 522
| 18
| 68
| 29
| 0.680365
| 0.312261
| 0
| 0
| 0
| 0
| 0.223796
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.454545
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
9a221f0fdcd0fc22e0356706cb4ba2f42a285d03
| 1,950
|
py
|
Python
|
src/experimental/test_edge_vertex.py
|
suhelhammoud/omr
|
36befc41acdf6af7d33d84b282f93ed485a77cc6
|
[
"Apache-2.0"
] | 2
|
2018-03-20T12:30:08.000Z
|
2019-08-06T02:03:36.000Z
|
src/experimental/test_edge_vertex.py
|
suhelhammoud/omr
|
36befc41acdf6af7d33d84b282f93ed485a77cc6
|
[
"Apache-2.0"
] | null | null | null |
src/experimental/test_edge_vertex.py
|
suhelhammoud/omr
|
36befc41acdf6af7d33d84b282f93ed485a77cc6
|
[
"Apache-2.0"
] | 1
|
2019-08-06T02:03:37.000Z
|
2019-08-06T02:03:37.000Z
|
from experimental.vertex_edge import Vertex, EDGE
import unittest
class EdgeVertexTestCase(unittest.TestCase):
def test_vertex(self):
c = (100, 100) # center
tl = (0, 0) # top_left
bl = (50, 150) # bottom_left
br = (150, 150) # bottom_right
tr = (150, 50) # top_right
# vertices
self.assertEqual(Vertex.get(tl, c), Vertex.TOP_LEFT)
self.assertEqual(Vertex.get(bl, c), Vertex.BOTTOM_LEFT)
self.assertEqual(Vertex.get(br, c), Vertex.BOTTOM_RIGHT)
self.assertEqual(Vertex.get(tr, c), Vertex.TOP_RIGHT)
# r1 = EDGE.get([tl, tl, tr], c)
# print(r1)
# # print( [EDGE.str(i) for i in r1])
# edges
self.assertEqual(EDGE.get([tl, tl, tr], c), [EDGE.TOP])
self.assertEqual(EDGE.get([tl, tr, tr], c), [EDGE.TOP])
self.assertEqual(EDGE.get([tl, tl, br], c), [EDGE.TOP, EDGE.RIGHT])
self.assertEqual(EDGE.get([tl, tr, br], c), [EDGE.TOP, EDGE.RIGHT])
self.assertEqual(EDGE.get([tl, bl, br], c), [EDGE.LEFT, EDGE.BOTTOM])
self.assertEqual(EDGE.get([tl, br, br], c), [EDGE.LEFT, EDGE.BOTTOM])
self.assertEqual(EDGE.get([tl, bl, tr], c), [EDGE.LEFT, EDGE.TOP, EDGE.RIGHT])
self.assertEqual(EDGE.get([tl, br, tr], c), [EDGE.LEFT, EDGE.TOP, EDGE.RIGHT])
self.assertEqual(EDGE.get([bl, bl, br], c), [EDGE.BOTTOM])
self.assertEqual(EDGE.get([bl, br, br], c), [EDGE.BOTTOM])
self.assertEqual(EDGE.get([bl, tl, tr], c), [EDGE.LEFT, EDGE.TOP])
self.assertEqual(EDGE.get([bl, tr, tr], c), [EDGE.LEFT, EDGE.TOP])
self.assertEqual(EDGE.get([bl, bl, tr], c), [EDGE.BOTTOM, EDGE.RIGHT])
self.assertEqual(EDGE.get([bl, br, tr], c), [EDGE.BOTTOM, EDGE.RIGHT])
self.assertEqual(EDGE.get([bl, tl, br], c), [EDGE.LEFT, EDGE.TOP, EDGE.RIGHT])
self.assertEqual(EDGE.get([bl, tr, br], c), [EDGE.LEFT, EDGE.TOP, EDGE.RIGHT])
| 40.625
| 86
| 0.590769
| 288
| 1,950
| 3.965278
| 0.125
| 0.262697
| 0.2662
| 0.308231
| 0.700525
| 0.651489
| 0.590193
| 0.590193
| 0.564799
| 0.438704
| 0
| 0.017787
| 0.221538
| 1,950
| 47
| 87
| 41.489362
| 0.734519
| 0.072308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.689655
| 1
| 0.034483
| false
| 0
| 0.068966
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9a3150dc095ce224d8ac116a879190e180bbe95e
| 94
|
py
|
Python
|
asset_item/apps.py
|
ifty500/asset_management
|
a95e49f3a4cc0d4a84e88518de35ac09cf5b476e
|
[
"bzip2-1.0.6"
] | null | null | null |
asset_item/apps.py
|
ifty500/asset_management
|
a95e49f3a4cc0d4a84e88518de35ac09cf5b476e
|
[
"bzip2-1.0.6"
] | null | null | null |
asset_item/apps.py
|
ifty500/asset_management
|
a95e49f3a4cc0d4a84e88518de35ac09cf5b476e
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.apps import AppConfig
class AssetItemConfig(AppConfig):
name = 'asset_item'
| 15.666667
| 33
| 0.765957
| 11
| 94
| 6.454545
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 94
| 5
| 34
| 18.8
| 0.898734
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9a3dd9441641f6306c28dc80919646d95ed0959b
| 135
|
py
|
Python
|
setup.py
|
janelia-flyem/flyem_syn_eval
|
770d7bc88f27c65e136c734fe4c0958d41b3c8c9
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
janelia-flyem/flyem_syn_eval
|
770d7bc88f27c65e136c734fe4c0958d41b3c8c9
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
janelia-flyem/flyem_syn_eval
|
770d7bc88f27c65e136c734fe4c0958d41b3c8c9
|
[
"BSD-3-Clause"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name="flyem_syn_eval",
version="0.0.2",
packages=find_packages(),
)
| 19.285714
| 43
| 0.681481
| 18
| 135
| 4.888889
| 0.722222
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027523
| 0.192593
| 135
| 6
| 44
| 22.5
| 0.779817
| 0
| 0
| 0
| 0
| 0
| 0.140741
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7be01cd19f4c71af09edae9d2ee08c169745ada8
| 328
|
py
|
Python
|
venv/lib/python3.8/site-packages/azureml/_workspace/workspace_location_resolver.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azureml/_workspace/workspace_location_resolver.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azureml/_workspace/workspace_location_resolver.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
def get_workspace_dependent_resource_location(location):
if location == "centraluseuap":
return "eastus"
return location
| 32.8
| 60
| 0.45122
| 21
| 328
| 6.857143
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14939
| 328
| 9
| 61
| 36.444444
| 0.516129
| 0.527439
| 0
| 0
| 0
| 0
| 0.133803
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
7be19471e1ebbe5edf4e2325ae218fda9108a464
| 73
|
py
|
Python
|
3-classical_search.py
|
zealng/search-and-optimization
|
a8ed4342e0a2537d393143253e93c3f0f7e59969
|
[
"MIT"
] | null | null | null |
3-classical_search.py
|
zealng/search-and-optimization
|
a8ed4342e0a2537d393143253e93c3f0f7e59969
|
[
"MIT"
] | null | null | null |
3-classical_search.py
|
zealng/search-and-optimization
|
a8ed4342e0a2537d393143253e93c3f0f7e59969
|
[
"MIT"
] | null | null | null |
# A* search, minimax search, (extra) RRT (rapidly-exploring random trees)
| 73
| 73
| 0.753425
| 10
| 73
| 5.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 73
| 1
| 73
| 73
| 0.859375
| 0.972603
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7be3b2ab61526db9f3382e33ed41eaba0499001e
| 112
|
py
|
Python
|
tilebeard/__init__.py
|
olegsson/tilebeard
|
49571e26820c8f05bed683ab67fddbb130a3e66b
|
[
"MIT"
] | 1
|
2019-06-28T16:14:22.000Z
|
2019-06-28T16:14:22.000Z
|
tilebeard/__init__.py
|
olegsson/tilebeard
|
49571e26820c8f05bed683ab67fddbb130a3e66b
|
[
"MIT"
] | null | null | null |
tilebeard/__init__.py
|
olegsson/tilebeard
|
49571e26820c8f05bed683ab67fddbb130a3e66b
|
[
"MIT"
] | null | null | null |
from .tilebeard import TileBeard, ClusterBeard
__version__ = '0.2.10'
__all__ = ['TileBeard', 'ClusterBeard']
| 18.666667
| 46
| 0.741071
| 12
| 112
| 6.25
| 0.75
| 0.56
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 0.125
| 112
| 5
| 47
| 22.4
| 0.72449
| 0
| 0
| 0
| 0
| 0
| 0.241071
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d000e329ce3bfa632804146b58f63d51d35e3f9d
| 180
|
py
|
Python
|
runtests.py
|
parthenon/aldryn-boilerplates
|
515431e5869ef463de4a34841eeddf22446cbb7e
|
[
"BSD-3-Clause"
] | 6
|
2015-12-05T03:50:10.000Z
|
2018-10-02T05:04:24.000Z
|
runtests.py
|
parthenon/aldryn-boilerplates
|
515431e5869ef463de4a34841eeddf22446cbb7e
|
[
"BSD-3-Clause"
] | 26
|
2015-01-28T10:04:29.000Z
|
2018-11-23T08:32:21.000Z
|
runtests.py
|
parthenon/aldryn-boilerplates
|
515431e5869ef463de4a34841eeddf22446cbb7e
|
[
"BSD-3-Clause"
] | 10
|
2015-04-14T21:23:45.000Z
|
2018-11-21T13:11:34.000Z
|
# -*- coding: utf-8 -*-
import os
import sys
cmd = 'coverage run `which djangocms-helper` aldryn_boilerplates test --cms --extra-settings=test_settings'
sys.exit(os.system(cmd))
| 22.5
| 107
| 0.727778
| 26
| 180
| 4.961538
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.122222
| 180
| 7
| 108
| 25.714286
| 0.810127
| 0.116667
| 0
| 0
| 0
| 0
| 0.630573
| 0.191083
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d001793120af97982872544dcdd26da0c3b4fc08
| 540
|
py
|
Python
|
osp-stibbons-master/real_estate/views.py
|
deepakavattotte2191/FISBO-Real-esate--website
|
08390a69013e78673607d4243a4f9f2d91531905
|
[
"Apache-2.0"
] | null | null | null |
osp-stibbons-master/real_estate/views.py
|
deepakavattotte2191/FISBO-Real-esate--website
|
08390a69013e78673607d4243a4f9f2d91531905
|
[
"Apache-2.0"
] | null | null | null |
osp-stibbons-master/real_estate/views.py
|
deepakavattotte2191/FISBO-Real-esate--website
|
08390a69013e78673607d4243a4f9f2d91531905
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
from property.views import hot_properties
from property.models import Property
def index(request, *args, **kwargs):
context = {
'carousel_property_list': hot_properties(4),
'hot_property_list': hot_properties(4),
'recommended_property_list': hot_properties(4)
}
return render(request, 'index.html', context)
def about(request, *args, **kwargs):
return render(request, 'about.html')
def blog(request, *args, **kwargs):
return render(request, 'blog.html')
| 25.714286
| 54
| 0.703704
| 66
| 540
| 5.606061
| 0.363636
| 0.140541
| 0.137838
| 0.202703
| 0.405405
| 0.194595
| 0
| 0
| 0
| 0
| 0
| 0.006726
| 0.174074
| 540
| 20
| 55
| 27
| 0.82287
| 0
| 0
| 0
| 0
| 0
| 0.172222
| 0.087037
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.214286
| 0.142857
| 0.642857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
d010d4cf03c653bbb328629a0e2f5c6eeef4c788
| 535
|
py
|
Python
|
tests/solidity/test_cases/01_ast_basics.py
|
AlexandreH/securify2
|
2d2ba0e1c20cdda550120ecdc1a7164db9b90e3c
|
[
"Apache-2.0"
] | 258
|
2020-01-23T16:58:38.000Z
|
2022-03-31T17:29:25.000Z
|
tests/solidity/test_cases/01_ast_basics.py
|
sirhashalot/securify2
|
6852707449577add14bafce8e304946b3490a977
|
[
"Apache-2.0"
] | 34
|
2020-01-30T06:11:58.000Z
|
2022-02-27T07:53:17.000Z
|
tests/solidity/test_cases/01_ast_basics.py
|
sirhashalot/securify2
|
6852707449577add14bafce8e304946b3490a977
|
[
"Apache-2.0"
] | 66
|
2020-01-28T09:23:05.000Z
|
2022-03-22T09:01:43.000Z
|
# language=Solidity
"""
pragma solidity ^0.5.0;
contract A {
uint i;
function foo(uint a) public {
i += 1;
}
}
"""
import unittest
from securify.solidity.v_0_5_x import solidity_grammar as ast
def validate_attributed_ast(test: unittest.TestCase, source_unit: ast.SourceUnit):
test.assertIsInstance(source_unit.nodes[1], ast.ContractDefinition)
test.assertIsInstance(source_unit.nodes[1].nodes[0], ast.VariableDeclaration)
test.assertIsInstance(source_unit.nodes[1].nodes[1], ast.FunctionDefinition)
| 25.47619
| 82
| 0.740187
| 71
| 535
| 5.43662
| 0.492958
| 0.103627
| 0.202073
| 0.233161
| 0.305699
| 0.305699
| 0.212435
| 0
| 0
| 0
| 0
| 0.024017
| 0.143925
| 535
| 20
| 83
| 26.75
| 0.818777
| 0.235514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d071f5cae19e530b810e109bcb39567f5debe825
| 111
|
py
|
Python
|
stonehenge/middlewares/types.py
|
RobertTownley/stonehenge
|
376b8e1501dd12ac1bcec5de680a5b521b0d949c
|
[
"MIT"
] | 1
|
2018-09-07T14:15:31.000Z
|
2018-09-07T14:15:31.000Z
|
stonehenge/middlewares/types.py
|
RobertTownley/stonehenge
|
376b8e1501dd12ac1bcec5de680a5b521b0d949c
|
[
"MIT"
] | 5
|
2018-09-06T01:48:12.000Z
|
2021-05-08T10:47:00.000Z
|
stonehenge/middlewares/types.py
|
RobertTownley/stonehenge
|
376b8e1501dd12ac1bcec5de680a5b521b0d949c
|
[
"MIT"
] | null | null | null |
from typing import Callable
from ..requests.request import Request
Middleware = Callable[[Request], Request]
| 18.5
| 41
| 0.792793
| 13
| 111
| 6.769231
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126126
| 111
| 5
| 42
| 22.2
| 0.907216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d095d30b8942d5364e0002c535ca4797001d8e44
| 1,102
|
py
|
Python
|
typing_pattern_exercises/topic2/override/example3.py
|
fbsamples/python-typing-tutorial
|
ffb90f87bb402d3d66d2e2eec4f56197da9f1c64
|
[
"MIT"
] | null | null | null |
typing_pattern_exercises/topic2/override/example3.py
|
fbsamples/python-typing-tutorial
|
ffb90f87bb402d3d66d2e2eec4f56197da9f1c64
|
[
"MIT"
] | null | null | null |
typing_pattern_exercises/topic2/override/example3.py
|
fbsamples/python-typing-tutorial
|
ffb90f87bb402d3d66d2e2eec4f56197da9f1c64
|
[
"MIT"
] | null | null | null |
# pyre-ignore-all-errors
# TODO: Change `pyre-ignore-all-errors` to `pyre-strict` on line 1, so we get
# to see all type errors in this file.
# A (hypothetical) Python developer is having trouble with a typing-related
# issue. Here is what the code looks like:
class ConfigA:
pass
class ConfigB:
some_attribute: int = 1
class HelperBase:
def __init__(self, config: ConfigA | ConfigB) -> None:
self.config = config
def common_fn(self) -> None:
pass
class HelperA(HelperBase):
def __init__(self, config: ConfigA) -> None:
super().__init__(config)
class HelperB(HelperBase):
def __init__(self, config: ConfigB) -> None:
super().__init__(config)
def some_fn(self) -> int:
return self.config.some_attribute
# The developer is confused about why Pyre reports a type error on `HelperB.some_fn`.
# Question: Is the reported type error a false positive or not?
# Question: How would you suggest changing the code to avoid the type error?
# Question: Is it a good idea to have `HelperA` and `HelperB` share the same base class?
| 25.627907
| 88
| 0.696915
| 162
| 1,102
| 4.58642
| 0.493827
| 0.067295
| 0.068641
| 0.084791
| 0.12786
| 0.091521
| 0
| 0
| 0
| 0
| 0
| 0.002307
| 0.213249
| 1,102
| 42
| 89
| 26.238095
| 0.854671
| 0.506352
| 0
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0
| 1
| 0.294118
| false
| 0.117647
| 0
| 0.058824
| 0.705882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
d0befbaad9a4fa61cc0520f99e6a47fc3f7aceba
| 168
|
py
|
Python
|
apps/funcionarios/admin.py
|
CleitonCandiotto/gestao_rh
|
ee8d6a25bb97f43d96b5e29e6e938891d82b2fe3
|
[
"MIT"
] | null | null | null |
apps/funcionarios/admin.py
|
CleitonCandiotto/gestao_rh
|
ee8d6a25bb97f43d96b5e29e6e938891d82b2fe3
|
[
"MIT"
] | null | null | null |
apps/funcionarios/admin.py
|
CleitonCandiotto/gestao_rh
|
ee8d6a25bb97f43d96b5e29e6e938891d82b2fe3
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Funcionario
@admin.register(Funcionario)
class FuncionarioAdmin(admin.ModelAdmin):
list_display = ['nome',]
| 18.666667
| 41
| 0.779762
| 19
| 168
| 6.842105
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 168
| 8
| 42
| 21
| 0.884354
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ef9cbcc769d100d6ccbd2e304639d0b2ef321267
| 2,024
|
py
|
Python
|
src/pyfao56/__init__.py
|
kthorp/pyfao56
|
86b71a58ef1d8a414c124f0d7ec00e55079fc7a2
|
[
"CC0-1.0"
] | null | null | null |
src/pyfao56/__init__.py
|
kthorp/pyfao56
|
86b71a58ef1d8a414c124f0d7ec00e55079fc7a2
|
[
"CC0-1.0"
] | null | null | null |
src/pyfao56/__init__.py
|
kthorp/pyfao56
|
86b71a58ef1d8a414c124f0d7ec00e55079fc7a2
|
[
"CC0-1.0"
] | null | null | null |
"""
########################################################################
The pyfao56 Python package facilitates FAO-56 computations of daily soil
water balance using the dual crop coefficient method to estimate crop
evapotranspiration (ET).
The FAO-56 method is described in the following documentation:
Allen, R. G., Pereira, L. S., Raes, D., Smith, M., 1998. FAO Irrigation
and Drainage Paper No. 56. Crop Evapotranspiration: Guidelines for
Computing Crop Water Requirements. Food and Agriculture Organization of
the United Nations, Rome Italy.
http://www.fao.org/3/x0490e/x0490e00.htm
Reference ET is computed using the ASCE Standardized Reference ET
Equation, which is described in the following documentation:
ASCE Task Committee on Standardization of Reference Evapotranspiration
(Walter, I. A., Allen, R. G., Elliott, R., Itenfisu, D., Brown, P.,
Jensen, M. E.,Mecham, B., Howell, T. A., Snyder, R., Eching, S.,
Spofford, T., Hattendorf, M., Martin, D., Cuenca, R. H., Wright, J. L.)
, 2005. The ASCE Standardized Reference Evapotranspiration Equation.
American Society of Civil Engineers, Reston, VA.
https://ascelibrary.org/doi/book/10.1061/9780784408056
The pyfao56 package contains the following modules:
irrigation.py
I/O tools to define irrigation management schedules
model.py
Equations for daily soil water balance computations
parameters.py
I/O tools for required input parameters
refet.py
Equations for computing ASCE Standardized Reference ET
update.py
I/O tools and methods for state variable updating
weather.py
I/O tools for required weather information
01/07/2016 Initial Python functions developed by Kelly Thorp
11/04/2021 Scripts updated for inclusion in the pyfao56 Python package
########################################################################
"""
from .irrigation import Irrigation
from .model import Model
from .parameters import Parameters
from .update import Update
from .weather import Weather
| 37.481481
| 72
| 0.702569
| 268
| 2,024
| 5.30597
| 0.541045
| 0.008439
| 0.011252
| 0.025316
| 0.081575
| 0.081575
| 0
| 0
| 0
| 0
| 0
| 0.038892
| 0.161561
| 2,024
| 53
| 73
| 38.188679
| 0.799057
| 0.919466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ef9de0060c155325d8f45cd9365aa5fe07dcb2d5
| 382
|
py
|
Python
|
qBitrr/__init__.py
|
Drapersniper/Qbitrr
|
9c66b02ce131414aa51ac842ea43f717c41d4a24
|
[
"MIT"
] | 9
|
2021-11-30T18:08:22.000Z
|
2022-03-18T11:09:25.000Z
|
qBitrr/__init__.py
|
Drapersniper/Qbitrr
|
9c66b02ce131414aa51ac842ea43f717c41d4a24
|
[
"MIT"
] | 17
|
2021-11-01T13:10:51.000Z
|
2022-02-28T02:34:53.000Z
|
qBitrr/__init__.py
|
Drapersniper/Qbitrr
|
9c66b02ce131414aa51ac842ea43f717c41d4a24
|
[
"MIT"
] | 4
|
2021-12-12T00:55:09.000Z
|
2022-03-18T14:45:58.000Z
|
import platform
import qBitrr.logger # noqa
try:
if platform.python_implementation() == "CPython":
# Only replace complexjson on CPython
# On PyPy it shows a SystemError when attempting to
# decode the responses from qbittorrentapi
import requests
import ujson
requests.models.complexjson = ujson
except ImportError:
pass
| 23.875
| 59
| 0.683246
| 42
| 382
| 6.190476
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.269634
| 382
| 15
| 60
| 25.466667
| 0.9319
| 0.342932
| 0
| 0
| 0
| 0
| 0.028455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.111111
| 0.555556
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
efc5000c7a1f095db3f4261b60fa663fdc124f35
| 123
|
py
|
Python
|
post/urls.py
|
bozburak/Blog_With_Django
|
beef62228ac7ae5dd445722f078b7fbb6d243556
|
[
"MIT"
] | null | null | null |
post/urls.py
|
bozburak/Blog_With_Django
|
beef62228ac7ae5dd445722f078b7fbb6d243556
|
[
"MIT"
] | null | null | null |
post/urls.py
|
bozburak/Blog_With_Django
|
beef62228ac7ae5dd445722f078b7fbb6d243556
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from .views import post_detail
urlpatterns = [
url(r'^(?P<id>\d+)/$', post_detail),
]
| 20.5
| 40
| 0.674797
| 19
| 123
| 4.263158
| 0.736842
| 0.246914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 123
| 6
| 41
| 20.5
| 0.771429
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
efe9468c7d485b6ed042bbe2ba9c481d076d9ba1
| 146
|
py
|
Python
|
spammer/play.py
|
00-00-00-11/Raid-Toolbox
|
4d24841de5ef112dc15b858f62607e0d6b5277cd
|
[
"0BSD"
] | null | null | null |
spammer/play.py
|
00-00-00-11/Raid-Toolbox
|
4d24841de5ef112dc15b858f62607e0d6b5277cd
|
[
"0BSD"
] | null | null | null |
spammer/play.py
|
00-00-00-11/Raid-Toolbox
|
4d24841de5ef112dc15b858f62607e0d6b5277cd
|
[
"0BSD"
] | 1
|
2021-05-15T11:32:24.000Z
|
2021-05-15T11:32:24.000Z
|
import os
import winsound
winsound.PlaySound('.\\spammer\\song.wav', winsound.SND_FILENAME)
os.system("taskkill /F /pid "+str(os.getpid()))
| 24.333333
| 66
| 0.712329
| 20
| 146
| 5.15
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 146
| 5
| 67
| 29.2
| 0.792308
| 0
| 0
| 0
| 0
| 0
| 0.262411
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
eff7915eb2a49d5ffe87a3ae27a3b857e22197ec
| 293
|
py
|
Python
|
app/domain/repository/interfaces/abstract_repository_update_action.py
|
dmaiabjj/luiza-lab-api-challenger
|
8f55ebbdd49a170ca3fdf38da229d29bd0548adf
|
[
"MIT"
] | null | null | null |
app/domain/repository/interfaces/abstract_repository_update_action.py
|
dmaiabjj/luiza-lab-api-challenger
|
8f55ebbdd49a170ca3fdf38da229d29bd0548adf
|
[
"MIT"
] | null | null | null |
app/domain/repository/interfaces/abstract_repository_update_action.py
|
dmaiabjj/luiza-lab-api-challenger
|
8f55ebbdd49a170ca3fdf38da229d29bd0548adf
|
[
"MIT"
] | null | null | null |
import abc
from abc import abstractmethod
class AbstractRepositoryUpdateAction(abc.ABC):
@abstractmethod
def update_from_dict(self, entity, data):
raise NotImplementedError
@abstractmethod
def update_from_model(self, entity, data):
raise NotImplementedError
| 22.538462
| 46
| 0.750853
| 30
| 293
| 7.2
| 0.5
| 0.157407
| 0.212963
| 0.25
| 0.351852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194539
| 293
| 12
| 47
| 24.416667
| 0.915254
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.222222
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
4bdf577a97e1c8b1196592126aa020270a0807c8
| 354
|
py
|
Python
|
test_si7005.py
|
BMarvi/si7005
|
e9cba271fd87d6290b0100e998b6329d87b9a309
|
[
"Apache-2.0"
] | 1
|
2015-10-02T07:01:25.000Z
|
2015-10-02T07:01:25.000Z
|
test_si7005.py
|
BMarvi/si7005
|
e9cba271fd87d6290b0100e998b6329d87b9a309
|
[
"Apache-2.0"
] | null | null | null |
test_si7005.py
|
BMarvi/si7005
|
e9cba271fd87d6290b0100e998b6329d87b9a309
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
from si import *
s1 = Si7005()
print "Hum: %.2f Temp: %.2f" % (s1.readHumidity(), s1.readTemperature())
#other method, faster if both data is needed:
print "Hum: %.2f Temp: %.2f" % s1.readHumidityTemperature()
# short methods:
print "Hum: %.2f Temp: %.2f" % (s1.getH() , s1.getT())
print "Hum: %.2f Temp: %.2f" % s1.getHT()
| 20.823529
| 73
| 0.615819
| 50
| 354
| 4.36
| 0.56
| 0.146789
| 0.183486
| 0.256881
| 0.330275
| 0.330275
| 0
| 0
| 0
| 0
| 0
| 0.065068
| 0.175141
| 354
| 16
| 74
| 22.125
| 0.681507
| 0.211864
| 0
| 0
| 0
| 0
| 0.306569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.166667
| null | null | 0.666667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
4be2124f1911ab944bce1b9e19a55f92763c9cee
| 100
|
py
|
Python
|
examples/set.remove/ex2.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/set.remove/ex2.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/set.remove/ex2.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
# s.remove(0)
s = {1, 2, 3}
s.remove(0)
print(s)
# bug as it should return a KeyError exception !!!
| 16.666667
| 50
| 0.63
| 20
| 100
| 3.15
| 0.75
| 0.222222
| 0.253968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061728
| 0.19
| 100
| 5
| 51
| 20
| 0.716049
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ef06609364202144f60744c267f4c05cd98e1a59
| 72
|
py
|
Python
|
test.py
|
p1s3cdAN/remember_testing
|
43e4e83ebdd286bb5505addbb80448ad95da5af1
|
[
"MIT"
] | null | null | null |
test.py
|
p1s3cdAN/remember_testing
|
43e4e83ebdd286bb5505addbb80448ad95da5af1
|
[
"MIT"
] | null | null | null |
test.py
|
p1s3cdAN/remember_testing
|
43e4e83ebdd286bb5505addbb80448ad95da5af1
|
[
"MIT"
] | null | null | null |
import time
import qualysapi
qgc = qualysapi.connect("config.txt")
| 8
| 37
| 0.736111
| 9
| 72
| 5.888889
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 8
| 38
| 9
| 0.883333
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ef097aca11e4793703559e4972ec6d13f3c40e1a
| 7,334
|
py
|
Python
|
tests/scoring_tests.py
|
villebro/pyhtzee
|
44bfcc8fb0d5a9f37fc87d6db5a45fafe8dd9ee0
|
[
"MIT"
] | 8
|
2019-08-23T09:04:47.000Z
|
2021-09-16T10:23:37.000Z
|
tests/scoring_tests.py
|
villebro/pyhtzee
|
44bfcc8fb0d5a9f37fc87d6db5a45fafe8dd9ee0
|
[
"MIT"
] | null | null | null |
tests/scoring_tests.py
|
villebro/pyhtzee
|
44bfcc8fb0d5a9f37fc87d6db5a45fafe8dd9ee0
|
[
"MIT"
] | 1
|
2020-08-08T04:32:53.000Z
|
2020-08-08T04:32:53.000Z
|
from pyhtzee.classes import Category, Rule
from pyhtzee.scoring import (
CONSTANT_SCORES_YAHTZEE,
score_upper_section,
score_x_of_a_kind,
score_one_pair,
score_two_pairs,
score_full_house,
score_small_straight,
score_large_straight,
score_yahtzee,
score_chance,
score_upper_section_bonus,
)
from unittest import TestCase
class ScoringTestCase(TestCase):
def test_score_upper_section(self):
self.assertEqual(score_upper_section([1, 1, 1, 2, 3], face=1), 3)
self.assertEqual(score_upper_section([2, 1, 2, 2, 4], face=2), 6)
self.assertEqual(score_upper_section([1, 1, 1, 2, 3], face=6), 0)
def test_score_three_of_a_kind_yahtzee(self):
rule = Rule.YAHTZEE
self.assertEqual(score_x_of_a_kind([1, 1, 1, 2, 2], 3, rule), 7)
self.assertEqual(score_x_of_a_kind([2, 2, 2, 3, 2], 3, rule), 11)
self.assertEqual(score_x_of_a_kind([1, 2, 2, 3, 6], 3, rule), 0)
self.assertEqual(score_x_of_a_kind([6, 6, 6, 6, 6], 3, rule), 30)
def test_score_three_of_a_kind_yatzy(self):
rule = Rule.YATZY
self.assertEqual(score_x_of_a_kind([1, 1, 1, 2, 2], 3, rule), 3)
self.assertEqual(score_x_of_a_kind([2, 2, 2, 3, 2], 3, rule), 6)
self.assertEqual(score_x_of_a_kind([1, 2, 2, 3, 6], 3, rule), 0)
self.assertEqual(score_x_of_a_kind([6, 6, 6, 6, 6], 3, rule), 18)
def test_score_four_of_a_kind_yahtzee(self):
rule = Rule.YAHTZEE
self.assertEqual(score_x_of_a_kind([1, 1, 1, 2, 1], 4, rule), 6)
self.assertEqual(score_x_of_a_kind([2, 2, 2, 3, 2], 4, rule), 11)
self.assertEqual(score_x_of_a_kind([1, 2, 2, 3, 6], 4, rule), 0)
self.assertEqual(score_x_of_a_kind([6, 6, 6, 6, 6], 4, rule), 30)
def test_score_four_of_a_kind_yatzy(self):
rule = Rule.YATZY
self.assertEqual(score_x_of_a_kind([1, 1, 1, 2, 1], 4, rule), 4)
self.assertEqual(score_x_of_a_kind([2, 2, 2, 3, 2], 4, rule), 8)
self.assertEqual(score_x_of_a_kind([1, 2, 2, 3, 6], 4, rule), 0)
self.assertEqual(score_x_of_a_kind([6, 6, 6, 6, 6], 4, rule), 24)
def test_score_one_pair(self):
self.assertEqual(score_one_pair([1, 1, 2, 3, 4]), 2)
self.assertEqual(score_one_pair([1, 1, 2, 2, 3]), 4)
self.assertEqual(score_one_pair([1, 1, 6, 6, 6]), 12)
self.assertEqual(score_one_pair([1, 2, 3, 4, 5]), 0)
def test_score_two_pairs(self):
self.assertEqual(score_two_pairs([1, 1, 2, 2, 4]), 6)
self.assertEqual(score_two_pairs([1, 1, 2, 3, 4]), 0)
self.assertEqual(score_two_pairs([2, 3, 2, 3, 2]), 10)
self.assertEqual(score_two_pairs([1, 2, 3, 4, 5]), 0)
def test_score_full_house_yahtzee(self):
rule = Rule.YAHTZEE
self.assertEqual(score_full_house([1, 1, 1, 2, 2], rule), 25)
self.assertEqual(score_full_house([1, 1, 1, 2, 3], rule), 0)
self.assertEqual(score_full_house([6, 6, 6, 5, 5], rule), 25)
self.assertEqual(score_full_house([6, 6, 6, 6, 6], rule), 0)
def test_score_full_house_yatzy(self):
rule = Rule.YATZY
self.assertEqual(score_full_house([1, 1, 1, 2, 2], rule), 7)
self.assertEqual(score_full_house([1, 1, 1, 2, 3], rule), 0)
self.assertEqual(score_full_house([6, 6, 6, 5, 5], rule), 28)
self.assertEqual(score_full_house([6, 6, 6, 6, 6], rule), 0)
def test_score_small_straight_yahtzee(self):
rule = Rule.YAHTZEE
self.assertEqual(score_small_straight([1, 3, 2, 5, 4], rule), 30)
self.assertEqual(score_small_straight([6, 3, 5, 4, 1], rule), 30)
self.assertEqual(score_small_straight([2, 3, 5, 4, 2], rule), 30)
self.assertEqual(score_small_straight([1, 1, 1, 1, 1], rule), 0)
self.assertEqual(score_small_straight([1, 2, 3, 5, 4], rule), 30)
def test_score_small_straight_yatzy(self):
rule = Rule.YATZY
self.assertEqual(score_small_straight([1, 3, 2, 5, 4], rule), 15)
self.assertEqual(score_small_straight([6, 3, 5, 4, 1], rule), 0)
self.assertEqual(score_small_straight([2, 3, 5, 4, 6], rule), 0)
self.assertEqual(score_small_straight([1, 1, 1, 1, 1], rule), 0)
self.assertEqual(score_small_straight([1, 2, 3, 5, 4], rule), 15)
def test_score_large_straight_yahtzee(self):
rule = Rule.YAHTZEE
self.assertEqual(score_large_straight([1, 3, 2, 5, 4], rule), 40)
self.assertEqual(score_large_straight([6, 3, 5, 4, 2], rule), 40)
self.assertEqual(score_large_straight([2, 3, 5, 4, 2], rule), 0)
self.assertEqual(score_large_straight([1, 1, 1, 1, 1], rule), 0)
def test_score_large_straight_yatzy(self):
rule = Rule.YATZY
self.assertEqual(score_large_straight([1, 3, 2, 5, 4], rule), 0)
self.assertEqual(score_large_straight([6, 3, 5, 4, 2], rule), 20)
self.assertEqual(score_large_straight([2, 3, 5, 4, 2], rule), 0)
self.assertEqual(score_large_straight([1, 1, 1, 1, 1], rule), 0)
def test_score_yahtzee(self):
self.assertEqual(score_yahtzee([1, 1, 1, 1, 1]), 50)
self.assertEqual(score_yahtzee([2, 2, 2, 3, 2]), 0)
self.assertEqual(score_yahtzee([6, 6, 6, 6, 6]), 50)
def test_score_chance(self):
self.assertEqual(score_chance([1, 1, 1, 1, 1]), 5)
self.assertEqual(score_chance([6, 6, 6, 6, 6]), 30)
self.assertEqual(score_chance([1, 2, 3, 4, 5]), 15)
def test_upper_section_bonus_yahtzee(self):
rule = Rule.YAHTZEE
self.assertEqual(score_upper_section_bonus(0, rule), 0)
self.assertEqual(score_upper_section_bonus(63, rule), 35)
self.assertEqual(score_upper_section_bonus(100, rule), 35)
def test_upper_section_bonus_yatzy(self):
rule = Rule.YATZY
self.assertEqual(score_upper_section_bonus(0, rule), 0)
self.assertEqual(score_upper_section_bonus(63, rule), 50)
self.assertEqual(score_upper_section_bonus(100, rule), 50)
def test_score_extra_yahtzee(self):
self.assertEqual(CONSTANT_SCORES_YAHTZEE[Category.YAHTZEE_BONUS], 100)
def test_naive_max_score_yahtzee(self):
rule = Rule.YAHTZEE
upper_section_score = (
score_upper_section([1, 1, 1, 1, 1], 1)
+ score_upper_section([2, 2, 2, 2, 2], 2)
+ score_upper_section([3, 3, 3, 3, 3], 3)
+ score_upper_section([4, 4, 4, 4, 4], 4)
+ score_upper_section([5, 5, 5, 5, 5], 5)
+ score_upper_section([6, 6, 6, 6, 6], 6)
)
upper_section_bonus = score_upper_section_bonus(upper_section_score, rule)
lower_section_score = (
score_x_of_a_kind([6, 6, 6, 6, 6], 3, rule)
+ score_x_of_a_kind([6, 6, 6, 6, 6], 4, rule)
+ score_full_house([6, 6, 6, 5, 5], rule)
+ score_small_straight([1, 2, 3, 4, 5], rule)
+ score_large_straight([1, 2, 3, 4, 5], rule)
+ score_yahtzee([6, 6, 6, 6, 6])
+ score_chance([6, 6, 6, 6, 6])
)
self.assertEqual(upper_section_score, 105)
self.assertEqual(upper_section_bonus, 35)
self.assertEqual(lower_section_score, 235)
self.assertEqual(
upper_section_score + upper_section_bonus + lower_section_score, 375
)
| 45.8375
| 82
| 0.624489
| 1,191
| 7,334
| 3.581024
| 0.059614
| 0.24619
| 0.304807
| 0.025322
| 0.784525
| 0.700586
| 0.672685
| 0.631419
| 0.547948
| 0.486284
| 0
| 0.089415
| 0.228388
| 7,334
| 159
| 83
| 46.125786
| 0.664252
| 0
| 0
| 0.210145
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.507246
| 1
| 0.137681
| false
| 0
| 0.021739
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ef29c6cab3af75346c012d9e0e30d9fb907eb250
| 63
|
py
|
Python
|
Capitulo 1/69 - arquivo2.py
|
mmmacedo/python
|
2e7d99021342a5c7c31fe644ff194b6a8fa88a88
|
[
"MIT"
] | null | null | null |
Capitulo 1/69 - arquivo2.py
|
mmmacedo/python
|
2e7d99021342a5c7c31fe644ff194b6a8fa88a88
|
[
"MIT"
] | null | null | null |
Capitulo 1/69 - arquivo2.py
|
mmmacedo/python
|
2e7d99021342a5c7c31fe644ff194b6a8fa88a88
|
[
"MIT"
] | null | null | null |
with open("arquivo.txt") as arquivo:
print(arquivo.read())
| 31.5
| 37
| 0.68254
| 9
| 63
| 4.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 63
| 2
| 38
| 31.5
| 0.796296
| 0
| 0
| 0
| 0
| 0
| 0.174603
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
ef42b3f7ce94ac30ba41af95ffb8b24f28511bb5
| 2,538
|
py
|
Python
|
pymobiledevice3/__main__.py
|
iOSForensics/pymobiledevice3
|
6b148f4e58cc51cb44c18935913a3e6cec5b60d5
|
[
"MIT"
] | 1
|
2022-01-20T16:53:15.000Z
|
2022-01-20T16:53:15.000Z
|
pymobiledevice3/__main__.py
|
iOSForensics/pymobiledevice3
|
6b148f4e58cc51cb44c18935913a3e6cec5b60d5
|
[
"MIT"
] | null | null | null |
pymobiledevice3/__main__.py
|
iOSForensics/pymobiledevice3
|
6b148f4e58cc51cb44c18935913a3e6cec5b60d5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import logging
import click
import coloredlogs
from pymobiledevice3.cli.activation import cli as activation_cli
from pymobiledevice3.cli.afc import cli as afc_cli
from pymobiledevice3.cli.apps import cli as apps_cli
from pymobiledevice3.cli.backup import cli as backup_cli
from pymobiledevice3.cli.companion_proxy import cli as companion_cli
from pymobiledevice3.cli.crash import cli as crash_cli
from pymobiledevice3.cli.developer import cli as developer_cli
from pymobiledevice3.cli.diagnostics import cli as diagnostics_cli
from pymobiledevice3.cli.list_devices import cli as list_devices_cli
from pymobiledevice3.cli.lockdown import cli as lockdown_cli
from pymobiledevice3.cli.mounter import cli as mounter_cli
from pymobiledevice3.cli.notification import cli as notification_cli
from pymobiledevice3.cli.pcap import cli as pcap_cli
from pymobiledevice3.cli.power_assertion import cli as power_assertion_cli
from pymobiledevice3.cli.processes import cli as ps_cli
from pymobiledevice3.cli.profile import cli as profile_cli
from pymobiledevice3.cli.provision import cli as provision_cli
from pymobiledevice3.cli.restore import cli as restore_cli
from pymobiledevice3.cli.springboard import cli as springboard_cli
from pymobiledevice3.cli.syslog import cli as syslog_cli
from pymobiledevice3.cli.webinspector import cli as webinspector_cli
from pymobiledevice3.exceptions import NoDeviceConnectedError
coloredlogs.install(level=logging.INFO)
logging.getLogger('asyncio').disabled = True
logging.getLogger('parso.cache').disabled = True
logging.getLogger('parso.cache.pickle').disabled = True
logging.getLogger('parso.python.diff').disabled = True
logging.getLogger('humanfriendly.prompts').disabled = True
logging.getLogger('blib2to3.pgen2.driver').disabled = True
logger = logging.getLogger(__name__)
def cli():
cli_commands = click.CommandCollection(sources=[
developer_cli, mounter_cli, apps_cli, profile_cli, lockdown_cli, diagnostics_cli, syslog_cli, pcap_cli,
crash_cli, afc_cli, ps_cli, notification_cli, list_devices_cli, power_assertion_cli, springboard_cli,
provision_cli, backup_cli, restore_cli, activation_cli, companion_cli, webinspector_cli
])
cli_commands.context_settings = dict(help_option_names=['-h', '--help'])
try:
cli_commands()
except NoDeviceConnectedError:
logger.error('Device is not connected')
except ConnectionAbortedError:
logger.error('Device was disconnected')
if __name__ == '__main__':
cli()
| 43.016949
| 111
| 0.815603
| 339
| 2,538
| 5.908555
| 0.238938
| 0.208687
| 0.230654
| 0.249626
| 0.054418
| 0.037943
| 0
| 0
| 0
| 0
| 0
| 0.011586
| 0.115839
| 2,538
| 58
| 112
| 43.758621
| 0.881016
| 0.008274
| 0
| 0
| 0
| 0
| 0.062401
| 0.016693
| 0
| 0
| 0
| 0
| 0.041667
| 1
| 0.020833
| false
| 0
| 0.520833
| 0
| 0.541667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ef5231209e695e7bc0b81bfb727bd5049b2b7461
| 1,080
|
py
|
Python
|
amlutils/collections/lists.py
|
parvoberoi/amlutils
|
afdbb036c5b081ba27f376fb9808435d6b37d66f
|
[
"Apache-2.0"
] | null | null | null |
amlutils/collections/lists.py
|
parvoberoi/amlutils
|
afdbb036c5b081ba27f376fb9808435d6b37d66f
|
[
"Apache-2.0"
] | 3
|
2021-09-08T02:43:05.000Z
|
2022-03-12T00:51:39.000Z
|
amlutils/collections/lists.py
|
parvoberoi/amlutils
|
afdbb036c5b081ba27f376fb9808435d6b37d66f
|
[
"Apache-2.0"
] | null | null | null |
import typing
def flatten_list(list_of_lists: typing.List[typing.List[typing.Any]]) -> typing.List[typing.Any]:
"""flatten a 2 level deep nested list into a single list
Parameters
----------
list_of_lists: typing.List[typing.List[typing.Any]]
2 level nested list
Returns
-------
typing.List[typing.Any]
the unrolled list
"""
return [item for sublist in list_of_lists for item in sublist]
def chunks_of_list(iterable: typing.List[typing.Any], chunk_size: int):
"""generate fixed sized list out of given list
Parameters
----------
iterable: typing.List[typing.Any]
iterable which needs to be broken down into fixed size chunks
chunk_size: int
desired size to divide the iterable into, all chunks except for the last one will be of this size
Returns
-------
Generator[typing.List]
Fixed size chunks of the original list
"""
length = len(iterable)
for ndx in range(0, length, chunk_size):
yield iterable[ndx:min(ndx + chunk_size, length)]
| 28.421053
| 105
| 0.653704
| 152
| 1,080
| 4.559211
| 0.375
| 0.12987
| 0.184704
| 0.164502
| 0.193362
| 0.11544
| 0.11544
| 0.11544
| 0.11544
| 0
| 0
| 0.003667
| 0.242593
| 1,080
| 37
| 106
| 29.189189
| 0.843521
| 0.548148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
ef54fd16de2f94530a5566fe22707dd5ea1e8856
| 30
|
py
|
Python
|
data/studio21_generated/introductory/3255/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/3255/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/3255/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
def only_duplicates(string):
| 15
| 28
| 0.8
| 4
| 30
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 2
| 29
| 15
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
32540fec5a80bc7fac338c07f988b86a9c718aea
| 409
|
py
|
Python
|
python/testData/inspections/PyTypeCheckerInspection/MapReturnElementType.py
|
alwyn/intellij-community
|
22e80b2aa9779d553c44e33929ad49a8a94b8449
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyTypeCheckerInspection/MapReturnElementType.py
|
alwyn/intellij-community
|
22e80b2aa9779d553c44e33929ad49a8a94b8449
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyTypeCheckerInspection/MapReturnElementType.py
|
alwyn/intellij-community
|
22e80b2aa9779d553c44e33929ad49a8a94b8449
|
[
"Apache-2.0"
] | 1
|
2019-02-06T14:50:03.000Z
|
2019-02-06T14:50:03.000Z
|
def test():
xs = map(lambda x: x + 1, [1, 2, 3])
print('foo' + <warning descr="Expected type 'TypeVar('AnyStr', str, unicode)', got 'int' instead">xs[0]</warning>)
ys = map(tuple, iter([1, 2, 3]))
print(1 + <warning descr="Expected type 'int', got 'tuple' instead">ys[0]</warning>, 'bar' + <warning descr="Expected type 'TypeVar('AnyStr', str, unicode)', got 'tuple' instead">ys[1]</warning>)
| 68.166667
| 199
| 0.616137
| 62
| 409
| 4.064516
| 0.435484
| 0.142857
| 0.238095
| 0.285714
| 0.396825
| 0.396825
| 0.396825
| 0.396825
| 0.396825
| 0
| 0
| 0.031977
| 0.158924
| 409
| 5
| 200
| 81.8
| 0.700581
| 0
| 0
| 0
| 0
| 0
| 0.440098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.4
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
32901201a7f56735aba3ae7ea80d803c3a978f8c
| 85
|
py
|
Python
|
twitter.py
|
ksackvil/SocialScraper
|
b2d9c1724e895622e828c558bdefc814f4c52751
|
[
"MIT"
] | null | null | null |
twitter.py
|
ksackvil/SocialScraper
|
b2d9c1724e895622e828c558bdefc814f4c52751
|
[
"MIT"
] | null | null | null |
twitter.py
|
ksackvil/SocialScraper
|
b2d9c1724e895622e828c558bdefc814f4c52751
|
[
"MIT"
] | null | null | null |
from subprocess import call
call(["twint", "-u", "thestrokes", "-o thestrokes.txt"])
| 28.333333
| 56
| 0.682353
| 11
| 85
| 5.272727
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105882
| 85
| 3
| 56
| 28.333333
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
329d316026bc7c58f74e2add9947f6a97366f8dd
| 85
|
py
|
Python
|
habit/apps.py
|
vanflymen/Twenty-One
|
1b425dd11a53b135ead5e6d721980b22f4a54672
|
[
"MIT"
] | 3
|
2017-01-06T20:01:12.000Z
|
2021-04-07T10:59:26.000Z
|
habit/apps.py
|
vanflymen/Twenty-One
|
1b425dd11a53b135ead5e6d721980b22f4a54672
|
[
"MIT"
] | null | null | null |
habit/apps.py
|
vanflymen/Twenty-One
|
1b425dd11a53b135ead5e6d721980b22f4a54672
|
[
"MIT"
] | 2
|
2019-09-17T18:36:56.000Z
|
2021-04-17T13:48:31.000Z
|
from django.apps import AppConfig
class HabitConfig(AppConfig):
name = 'habit'
| 14.166667
| 33
| 0.741176
| 10
| 85
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 85
| 5
| 34
| 17
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
32ac0fcd486a4ae7aea0e13dd36d21e0a0dcd9c5
| 622
|
py
|
Python
|
week-9/pSet-9/finance/forms/register.py
|
AH-SALAH/CS50X
|
1753b0c403968a574821131fa33afc5c86699c86
|
[
"Xnet",
"X11"
] | null | null | null |
week-9/pSet-9/finance/forms/register.py
|
AH-SALAH/CS50X
|
1753b0c403968a574821131fa33afc5c86699c86
|
[
"Xnet",
"X11"
] | null | null | null |
week-9/pSet-9/finance/forms/register.py
|
AH-SALAH/CS50X
|
1753b0c403968a574821131fa33afc5c86699c86
|
[
"Xnet",
"X11"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.fields.html5 import EmailField
from wtforms.validators import required, length, regexp, equal_to, email
class RegisterForm(FlaskForm):
username = StringField('username', validators=[required(), length(min=3)])
email = EmailField('email', validators=[required(), email()])
password = PasswordField('password', validators=[required(), length(min=3), regexp(regex='\w{3,}')])
confirmation = PasswordField('confirmation', validators=[required(), length(min=3), regexp(regex='\w{3,}'), equal_to(fieldname='password')])
| 62.2
| 144
| 0.745981
| 71
| 622
| 6.492958
| 0.394366
| 0.121475
| 0.156182
| 0.175705
| 0.238612
| 0.177874
| 0.177874
| 0.177874
| 0.177874
| 0
| 0
| 0.010714
| 0.099678
| 622
| 10
| 144
| 62.2
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.085072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.444444
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
087492658e117ea4083152a93cfe73eab4e6ec5f
| 4,492
|
py
|
Python
|
colourmaps.py
|
METASPACE2020/sm-standalone
|
14b8ec9edb45254322cb3445df8d07a3f1ea0565
|
[
"Apache-2.0"
] | null | null | null |
colourmaps.py
|
METASPACE2020/sm-standalone
|
14b8ec9edb45254322cb3445df8d07a3f1ea0565
|
[
"Apache-2.0"
] | null | null | null |
colourmaps.py
|
METASPACE2020/sm-standalone
|
14b8ec9edb45254322cb3445df8d07a3f1ea0565
|
[
"Apache-2.0"
] | null | null | null |
def viridis_colormap():
import numpy as np
colors = np.array([(68, 1, 84), (68, 2, 85), (68, 3, 87), (69, 5, 88), (69, 6, 90), (69, 8, 91), (70, 9, 92), (70, 11, 94), (70, 12, 95), (70, 14, 97), (71, 15, 98), (71, 17, 99), (71, 18, 101), (71, 20, 102), (71, 21, 103), (71, 22, 105), (71, 24, 106), (72, 25, 107), (72, 26, 108), (72, 28, 110), (72, 29, 111), (72, 30, 112), (72, 32, 113), (72, 33, 114), (72, 34, 115), (72, 35, 116), (71, 37, 117), (71, 38, 118), (71, 39, 119), (71, 40, 120), (71, 42, 121), (71, 43, 122), (71, 44, 123), (70, 45, 124), (70, 47, 124), (70, 48, 125), (70, 49, 126), (69, 50, 127), (69, 52, 127), (69, 53, 128), (69, 54, 129), (68, 55, 129), (68, 57, 130), (67, 58, 131), (67, 59, 131), (67, 60, 132), (66, 61, 132), (66, 62, 133), (66, 64, 133), (65, 65, 134), (65, 66, 134), (64, 67, 135), (64, 68, 135), (63, 69, 135), (63, 71, 136), (62, 72, 136), (62, 73, 137), (61, 74, 137), (61, 75, 137), (61, 76, 137), (60, 77, 138), (60, 78, 138), (59, 80, 138), (59, 81, 138), (58, 82, 139), (58, 83, 139), (57, 84, 139), (57, 85, 139), (56, 86, 139), (56, 87, 140), (55, 88, 140), (55, 89, 140), (54, 90, 140), (54, 91, 140), (53, 92, 140), (53, 93, 140), (52, 94, 141), (52, 95, 141), (51, 96, 141), (51, 97, 141), (50, 98, 141), (50, 99, 141), (49, 100, 141), (49, 101, 141), (49, 102, 141), (48, 103, 141), (48, 104, 141), (47, 105, 141), (47, 106, 141), (46, 107, 142), (46, 108, 142), (46, 109, 142), (45, 110, 142), (45, 111, 142), (44, 112, 142), (44, 113, 142), (44, 114, 142), (43, 115, 142), (43, 116, 142), (42, 117, 142), (42, 118, 142), (42, 119, 142), (41, 120, 142), (41, 121, 142), (40, 122, 142), (40, 122, 142), (40, 123, 142), (39, 124, 142), (39, 125, 142), (39, 126, 142), (38, 127, 142), (38, 128, 142), (38, 129, 142), (37, 130, 142), (37, 131, 141), (36, 132, 141), (36, 133, 141), (36, 134, 141), (35, 135, 141), (35, 136, 141), (35, 137, 141), (34, 137, 141), (34, 138, 141), (34, 139, 141), (33, 140, 141), (33, 141, 140), (33, 142, 140), (32, 143, 140), (32, 144, 140), (32, 145, 140), (31, 146, 140), (31, 147, 139), (31, 148, 139), (31, 149, 139), (31, 150, 139), (30, 151, 138), (30, 152, 138), (30, 153, 138), (30, 153, 138), (30, 154, 137), (30, 155, 137), (30, 156, 137), (30, 157, 136), (30, 158, 136), (30, 159, 136), (30, 160, 135), (31, 161, 135), (31, 162, 134), (31, 163, 134), (32, 164, 133), (32, 165, 133), (33, 166, 133), (33, 167, 132), (34, 167, 132), (35, 168, 131), (35, 169, 130), (36, 170, 130), (37, 171, 129), (38, 172, 129), (39, 173, 128), (40, 174, 127), (41, 175, 127), (42, 176, 126), (43, 177, 125), (44, 177, 125), (46, 178, 124), (47, 179, 123), (48, 180, 122), (50, 181, 122), (51, 182, 121), (53, 183, 120), (54, 184, 119), (56, 185, 118), (57, 185, 118), (59, 186, 117), (61, 187, 116), (62, 188, 115), (64, 189, 114), (66, 190, 113), (68, 190, 112), (69, 191, 111), (71, 192, 110), (73, 193, 109), (75, 194, 108), (77, 194, 107), (79, 195, 105), (81, 196, 104), (83, 197, 103), (85, 198, 102), (87, 198, 101), (89, 199, 100), (91, 200, 98), (94, 201, 97), (96, 201, 96), (98, 202, 95), (100, 203, 93), (103, 204, 92), (105, 204, 91), (107, 205, 89), (109, 206, 88), (112, 206, 86), (114, 207, 85), (116, 208, 84), (119, 208, 82), (121, 209, 81), (124, 210, 79), (126, 210, 78), (129, 211, 76), (131, 211, 75), (134, 212, 73), (136, 213, 71), (139, 213, 70), (141, 214, 68), (144, 214, 67), (146, 215, 65), (149, 215, 63), (151, 216, 62), (154, 216, 60), (157, 217, 58), (159, 217, 56), (162, 218, 55), (165, 218, 53), (167, 219, 51), (170, 219, 50), (173, 220, 48), (175, 220, 46), (178, 221, 44), (181, 221, 43), (183, 221, 41), (186, 222, 39), (189, 222, 38), (191, 223, 36), (194, 223, 34), (197, 223, 33), (199, 224, 31), (202, 224, 30), (205, 224, 29), (207, 225, 28), (210, 225, 27), (212, 225, 26), (215, 226, 25), (218, 226, 24), (220, 226, 24), (223, 227, 24), (225, 227, 24), (228, 227, 24), (231, 228, 25), (233, 228, 25), (236, 228, 26), (238, 229, 27), (241, 229, 28), (243, 229, 30), (246, 230, 31), (248, 230, 33), (250, 230, 34), (253, 231, 36)], dtype=np.float)
import matplotlib as mpl
import matplotlib.cm as cm
position = np.linspace(0, 1, len(colors))
colors /= 256.0
cdict = {}
for i, cname in enumerate(['red', 'green', 'blue']):
cdict[cname] = [(pos, c[i], c[i]) for pos, c in zip(position, colors)]
cmap = mpl.colors.LinearSegmentedColormap('custom', cdict, 256)
cm.register_cmap(name='viridis', cmap=cmap)
return cmap
| 299.466667
| 4,030
| 0.502671
| 839
| 4,492
| 2.688915
| 0.321812
| 0.008865
| 0.007092
| 0.009752
| 0.018617
| 0
| 0
| 0
| 0
| 0
| 0
| 0.540421
| 0.195904
| 4,492
| 14
| 4,031
| 320.857143
| 0.084164
| 0
| 0
| 0
| 0
| 0
| 0.005565
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.230769
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
08c6e43388818cb390e4c8a791d912754c39da52
| 61
|
py
|
Python
|
code/main.py
|
nazia-alam/SoftwareEngineeringHW1
|
23c60147a99957cc64a855b708dd4780de3b499a
|
[
"MIT"
] | null | null | null |
code/main.py
|
nazia-alam/SoftwareEngineeringHW1
|
23c60147a99957cc64a855b708dd4780de3b499a
|
[
"MIT"
] | null | null | null |
code/main.py
|
nazia-alam/SoftwareEngineeringHW1
|
23c60147a99957cc64a855b708dd4780de3b499a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
def add_numbers(x, y):
return(x+y)
| 15.25
| 22
| 0.639344
| 12
| 61
| 3.166667
| 0.833333
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163934
| 61
| 3
| 23
| 20.333333
| 0.745098
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
08d71f82c6e09fc200c4797e2b0e6c45d885cf46
| 134
|
py
|
Python
|
lib/utils/html/__init__.py
|
arkanister/minitickets
|
e74dd03f1bcaadf2b196a1ce69c7c959dc8925fd
|
[
"Apache-2.0"
] | null | null | null |
lib/utils/html/__init__.py
|
arkanister/minitickets
|
e74dd03f1bcaadf2b196a1ce69c7c959dc8925fd
|
[
"Apache-2.0"
] | 2
|
2015-03-13T19:37:21.000Z
|
2018-06-20T23:06:41.000Z
|
lib/utils/html/__init__.py
|
arkanister/minitickets
|
e74dd03f1bcaadf2b196a1ce69c7c959dc8925fd
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from .base import AttributeDict
from .icons.base import Icon
__version__ = '0.0.1'
__author__ = 'arkanister'
| 19.142857
| 31
| 0.701493
| 18
| 134
| 4.777778
| 0.777778
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035088
| 0.149254
| 134
| 7
| 32
| 19.142857
| 0.719298
| 0.156716
| 0
| 0
| 0
| 0
| 0.133929
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
08eecf3d503a2c460962d2eef68f700790b551db
| 54
|
py
|
Python
|
attacks/autoattack/__init__.py
|
snu-mllab/preemptive-robustification
|
03a88063ac9f4bad6edf86eb47e5cdc4b06ad43c
|
[
"MIT"
] | 2
|
2021-12-14T06:08:12.000Z
|
2021-12-27T06:04:33.000Z
|
attacks/autoattack/__init__.py
|
snu-mllab/preemptive-robustification
|
03a88063ac9f4bad6edf86eb47e5cdc4b06ad43c
|
[
"MIT"
] | null | null | null |
attacks/autoattack/__init__.py
|
snu-mllab/preemptive-robustification
|
03a88063ac9f4bad6edf86eb47e5cdc4b06ad43c
|
[
"MIT"
] | 1
|
2021-12-27T08:09:40.000Z
|
2021-12-27T08:09:40.000Z
|
from .autoattack import AutoAttackL2, AutoAttackLinf
| 18
| 52
| 0.851852
| 5
| 54
| 9.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.111111
| 54
| 2
| 53
| 27
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3ea82bbd922e4fe21323d98b715aae402f11d448
| 14,330
|
py
|
Python
|
fusion-ep-to-np1-sm-buf-overflows/session.testapi.py
|
vlvassilev/ecoc-demo-2018
|
3f813fdff3c2405561b32122bbe3c51c001ff81b
|
[
"BSD-3-Clause"
] | 1
|
2019-03-06T13:52:14.000Z
|
2019-03-06T13:52:14.000Z
|
fusion-ep-to-np1-sm-buf-overflows/session.testapi.py
|
vlvassilev/ecoc-demo-2018
|
3f813fdff3c2405561b32122bbe3c51c001ff81b
|
[
"BSD-3-Clause"
] | null | null | null |
fusion-ep-to-np1-sm-buf-overflows/session.testapi.py
|
vlvassilev/ecoc-demo-2018
|
3f813fdff3c2405561b32122bbe3c51c001ff81b
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
import lxml
from lxml import etree
import time
import sys, os
import argparse
from collections import namedtuple
import tntapi
sys.path.append("../common")
import testsuiteapi
import yangrpc
from yangcli import yangcli
namespaces={"nc":"urn:ietf:params:xml:ns:netconf:base:1.0",
"nd":"urn:ietf:params:xml:ns:yang:ietf-network",
"if":"urn:ietf:params:xml:ns:yang:ietf-interfaces"}
def yangcli_ok_script(yconn, yangcli_script):
for line in yangcli_script.splitlines():
line=line.strip()
if not line:
continue
print("Executing: "+line)
ok = yangcli(yconn, line).xpath('./ok')
assert(len(ok)==1)
def validate_step_common(before, after, step):
mylinks = tntapi.parse_network_links(before)
t1 = tntapi.parse_network_nodes(before)
t2 = tntapi.parse_network_nodes(after)
delta = tntapi.get_network_counters_delta(before,after)
testsuiteapi.print_state(before, after)
after = tntapi.strip_namespaces(after)
print("#1.1 - Asserts for step_%d."%(step))
if(step==2):
interfaces_off={"ge5", "ge6", "ge7", "ge8", "ge9"}
interfaces_on={"ge0", "ge1", "ge2", "ge3", "ge4"}
else:
interfaces_off={}
interfaces_on={"ge0", "ge1", "ge2", "ge3", "ge4", "ge5", "ge6", "ge7", "ge8", "ge9"}
total_expected_xe0=0
for if_name in interfaces_on:
print("Checking on: " + if_name)
assert(delta["remote"][if_name].out_pkts>=1000000)
assert(delta["remote"][if_name].out_pkts==delta["remote"][if_name].in_pkts)
assert((delta["remote"][if_name].in_pkts-delta["local"][if_name].out_pkts)==(delta["remote"][if_name].fusion_ep_to_np1_sm_buf_overflows+delta["local"][if_name].fusion_np1_to_ep_sm_drop_reinsert_buf_overflows))
if(step==2):
assert(delta["remote"][if_name].fusion_ep_to_np1_sm_buf_overflows>0)
else:
assert(delta["remote"][if_name].fusion_ep_to_np1_sm_buf_overflows==0)
total_expected_xe0=total_expected_xe0+delta["local"][if_name].out_pkts
#assert(total_expected_xe0==delta["local"]["xe0"].out_pkts)
for if_name in interfaces_off:
print("Checking off: " + if_name)
assert(delta["local"][if_name].out_pkts==0)
print("Reults!")
time.sleep(10)
def step_common(network, conns, filter, step):
state_before = tntapi.network_get_state(network, conns, filter=filter)
# print("Enable analyzers ...")
# analyzer_config="""
#<config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
# <interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">
# <interface>
# <name>ge0</name>
# <traffic-analyzer xmlns="http://transpacket.com/ns/traffic-analyzer" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="create"></traffic-analyzer>
# </interface>
# </interfaces>
#</config>
#"""
# tntapi.edit_config(conns["analyzer"],analyzer_config)
# tntapi.network_commit(conns)
print("Create loopbacks ...")
config="""
<config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">
"""
for if_name in {"ge0", "ge1", "ge2", "ge3", "ge4", "ge5", "ge6", "ge7", "ge8", "ge9"}:
config=config+"""
<interface>
<name>%(name)s</name>
<type xmlns:ianaift="urn:ietf:params:xml:ns:yang:iana-if-type">ianaift:ethernetCsmacd</type>
<loopback xmlns="http://transpacket.com/ns/transpacket-loopback" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="create">near-end</loopback>
</interface>
"""%{'name':if_name}
config=config+"""
</interfaces>
</config>
"""
tntapi.edit_config(conns["remote"],config)
tntapi.network_commit(conns)
time.sleep(1)
print("Start traffic ...")
config="""
<config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">
"""
if(step==2):
sm_traffic_interfaces={"ge0","ge1","ge2","ge3","ge4"}
gst_traffic_interfaces={"ge5","ge6","ge7","ge8","ge9"}
else:
sm_traffic_interfaces={"ge0", "ge1", "ge2", "ge3", "ge4", "ge5", "ge6", "ge7", "ge8", "ge9"}
gst_traffic_interfaces={}
for if_name in sm_traffic_interfaces:
config=config+"""
<interface>
<name>%(name)s</name>
<type xmlns:ianaift="urn:ietf:params:xml:ns:yang:iana-if-type">ianaift:ethernetCsmacd</type>
<traffic-generator xmlns="http://transpacket.com/ns/transpacket-traffic-generator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="create">
<frame-size>1500</frame-size>
<interframe-gap>12</interframe-gap>
<total-frames>1000000</total-frames>
<ether-type xmlns="http://transpacket.com/ns/transpacket-traffic-generator-ethernet">ABCD</ether-type>
</traffic-generator>
</interface>
"""%{'name':if_name}
for if_name in gst_traffic_interfaces:
config=config+"""
<interface>
<name>%(name)s</name>
<type xmlns:ianaift="urn:ietf:params:xml:ns:yang:iana-if-type">ianaift:ethernetCsmacd</type>
<traffic-generator xmlns="http://transpacket.com/ns/transpacket-traffic-generator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="create">
<frame-size>64</frame-size>
<interframe-gap>12</interframe-gap>
<total-frames>1000000</total-frames>
<ether-type xmlns="http://transpacket.com/ns/transpacket-traffic-generator-ethernet">88F7</ether-type>
</traffic-generator>
</interface>
"""%{'name':if_name}
config=config+"""
</interfaces>
</config>
"""
tntapi.edit_config(conns["remote"],config)
tntapi.network_commit(conns)
if(step==2):
time.sleep(100)
else:
time.sleep(10)
print("Stop traffic ...")
config="""
<config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">
"""
for if_name in sm_traffic_interfaces.union(gst_traffic_interfaces):
config=config+"""
<interface>
<name>%(name)s</name>
<traffic-generator xmlns="http://transpacket.com/ns/transpacket-traffic-generator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="delete"/>
</interface>
"""%{'name':if_name}
config=config+"""
</interfaces>
</config>
"""
tntapi.edit_config(conns["remote"],config)
tntapi.network_commit(conns)
time.sleep(1)
print("Remove loopbacks ...")
config="""
<config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">
"""
for if_name in {"ge0", "ge1", "ge2", "ge3", "ge4", "ge5", "ge6", "ge7", "ge8", "ge9"}:
config=config+"""
<interface>
<name>%(name)s</name>
<loopback xmlns="http://transpacket.com/ns/transpacket-loopback" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="delete"/>
</interface>
"""%{'name':if_name}
config=config+"""
</interfaces>
</config>
"""
tntapi.edit_config(conns["remote"],config)
tntapi.network_commit(conns)
# print("Disable analyzers ...")
# analyzer_config="""
#<config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
# <interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">
# <interface>
# <name>ge0</name>
# <traffic-analyzer xmlns="http://transpacket.com/ns/traffic-analyzer" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0" nc:operation="delete"></traffic-analyzer>
# </interface>
# </interfaces>
#</config>
#"""
# tntapi.edit_config(conns["analyzer"],analyzer_config)
# tntapi.network_commit(conns)
state_after = tntapi.network_get_state(network, conns, filter=filter)
return validate_step_common(state_before, state_after, step)
def terminate(network, conns, yconns, filter):
for name in {"local","remote"}:
ok = yangcli(yconns[name], "delete /fusion-streams").xpath('./ok')
ok = yangcli(yconns[name], "delete /interfaces").xpath('./ok')
#assert(len(ok)==1)
tntapi.network_commit(conns)
def init(network, conns, yconns, filter):
for name in {"local","remote"}:
ok = yangcli(yconns[name], "delete /fusion-streams").xpath('./ok')
ok = yangcli(yconns[name], "delete /interfaces").xpath('./ok')
#assert(len(ok)==1)
tntapi.network_commit(conns)
yangcli_script_local='''
create /fusion-streams/drop-and-forward[in-port='n0'][filter-rule='matched-tpid'] -- priority=gst filter/tpid=88F7 push-vlan-tag/vlanid=100 push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1 forward/pop-vlan-tag=false drop[out-port='e0']/pop-vlan-tag=true drop[out-port='e1']/pop-vlan-tag=true drop[out-port='e2']/pop-vlan-tag=true drop[out-port='e3']/pop-vlan-tag=true drop[out-port='e4']/pop-vlan-tag=true drop[out-port='e5']/pop-vlan-tag=true drop[out-port='e6']/pop-vlan-tag=true drop[out-port='e7']/pop-vlan-tag=true drop[out-port='e8']/pop-vlan-tag=true drop[out-port='e9']/pop-vlan-tag=true
create /fusion-streams/drop-and-forward[in-port='n0'][filter-rule='unmatched'] -- priority=sm push-vlan-tag/vlanid=200 push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1 forward/pop-vlan-tag=false
create /fusion-streams/drop-and-forward[in-port='n1'][filter-rule='100'] -- priority=gst filter/tpid=9100 forward/pop-vlan-tag=true
create /fusion-streams/drop-and-forward[in-port='n1'][filter-rule='200'] -- priority=sm filter/tpid=9100 forward/pop-vlan-tag=true
'''
for i in range(10):
yangcli_script_local=yangcli_script_local+'''
create /fusion-streams/aggregation[in-port='e%d']/sm -- out-port=n1 push-vlan-tag/vlanid=1%d push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1
create /fusion-streams/aggregation[in-port='e%d']/gst -- out-port=n0 filter/ether-type=88F7 filter/tpid=88F7 filter/vlanid=1
create /fusion-streams/drop-and-forward[in-port='n1'][filter-rule='1%d'] -- priority=sm filter/tpid=9100 drop[out-port='e%d']/pop-vlan-tag=true
'''%(i,i,i,i,i)
yangcli_script_remote='''
create /fusion-streams/drop-and-forward[in-port='n0'][filter-rule='matched-tpid'] -- priority=gst filter/tpid=88F7 push-vlan-tag/vlanid=100 push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1 forward/pop-vlan-tag=false
create /fusion-streams/drop-and-forward[in-port='n0'][filter-rule='unmatched'] -- priority=sm push-vlan-tag/vlanid=200 push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1 forward/pop-vlan-tag=false
create /fusion-streams/drop-and-forward[in-port='n1'][filter-rule='100'] -- priority=gst filter/tpid=9100 drop[out-port='e0']/pop-vlan-tag=true drop[out-port='e1']/pop-vlan-tag=true drop[out-port='e2']/pop-vlan-tag=true drop[out-port='e3']/pop-vlan-tag=true drop[out-port='e4']/pop-vlan-tag=true drop[out-port='e5']/pop-vlan-tag=true drop[out-port='e6']/pop-vlan-tag=true drop[out-port='e7']/pop-vlan-tag=true drop[out-port='e8']/pop-vlan-tag=true drop[out-port='e9']/pop-vlan-tag=true forward/pop-vlan-tag=true
create /fusion-streams/drop-and-forward[in-port='n1'][filter-rule='200'] -- priority=sm filter/tpid=9100 forward/pop-vlan-tag=true
'''
for i in range(10):
yangcli_script_remote=yangcli_script_remote+'''
create /fusion-streams/aggregation[in-port='e%d']/gst -- filter/ether-type=88F7 filter/tpid=88F7 filter/vlanid=1 out-port=n1 push-vlan-tag/vlanid=100 push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1
create /fusion-streams/aggregation[in-port='e%d']/sm -- out-port=n1 push-vlan-tag/vlanid=1%d push-vlan-tag/pcp=0 push-vlan-tag/tpid=9100 push-vlan-tag/cfi=1
create /fusion-streams/drop-and-forward[in-port='n1'][filter-rule='1%d'] -- filter/tpid=9100 priority=sm drop[out-port='e%d']/pop-vlan-tag=true
''' % (i,i,i,i,i)
yangcli_ok_script(yconns["local"], yangcli_script_local)
yangcli_ok_script(yconns["remote"], yangcli_script_remote)
tntapi.network_commit(conns)
def step_1(network, conns, filter):
step_common(network, conns, filter, 1)
def step_2(network, conns, yconns, filter):
step_common(network, conns, filter, 2)
def step_3(network, conns, filter):
step_common(network, conns, filter, 3)
def main():
print("""
<test-spec>
===Objective===
Test validating fusion-ep-to-np1-sm-buf-overflows counter registers all SM packets lost due to buffer overflows.
===Procedure===
Generate point-to-point packet streams with TPID=ABCD from remote.ep0-9 to local.ep0-9. Each stream should be as close as possible to the 100% throughput limit so that packet loss due to buf overflow is guaranteed to happen when GST stream with minimal length packets are enabled on remote.ge5-9.
===DUT configuration===
This testcase is enhancing the [[VCU-110_Prototype_r1.6#FPGA_Setup]] configuration by adding identical data paths for ge5-9 interfaces.
====Steps====
# Create near-end loopbacks and generate SM streams of 1500 octet packets and 12 octet interframe gaps from remote.ge0-4 and GST streams of 64 octet frames with 12 octet interframe gaps form remote remote.ge5-ge9
## Assert for name:ge0-4 delta["remot"][name].out_pkts==delta["remote"][name].in_pkts>=1000000
## Assert for name:ge0-4 delta["local"][name].out_pkts==delta["remote"][name].in_pkts
## Assert delta["remote"][name].fusion_ep_to_np1_sm_buf_overflows==0
# Identical to Step 2 but generate SM traffic on all interfaces
## Assert for name:ge0-4 delta["remot"][name].out_pkts==delta["remote"][name].in_pkts>=1000000
## Assert for name:ge0-4 delta["local"][name].out_pkts==delta["remote"][name].in_pkts-delta["remote"][name].fusion_ep_to_np1_sm_buf_overflows
# Repeat the first step.
===Results===
# N.A.
===Acceptance criteria===
# assert((local_pkts_received-remote_pkts_sent==sm_buf_overflows)
</test-spec>
""")
parser = argparse.ArgumentParser()
parser.add_argument("--config", help="Path to the netconf configuration *.xml file defining the configuration according to ietf-networks, ietf-networks-topology and netconf-node models e.g. ../networks.xml")
args = parser.parse_args()
tree=etree.parse(args.config)
network = tree.xpath('/nc:config/nd:networks/nd:network', namespaces=namespaces)[0]
conns = tntapi.network_connect(network)
yconns = tntapi.network_connect_yangrpc(network)
mylinks = tntapi.parse_network_links(network)
filter = testsuiteapi.get_filter()
init(network, conns, yconns, filter=filter)
step_1(network, conns, filter=filter)
step_2(network, conns, yconns, filter=filter)
step_3(network, conns, filter=filter)
terminate(network, conns, yconns, filter=filter)
#Test report
#Acceptance criteria
sys.exit(main())
fusion-ep-to-np1-sm-buf-overflowsfusion-ep-to-np1-sm-buf-overflowsfusion-ep-to-np1-sm-buf-overflows
| 44.228395
| 610
| 0.713538
| 2,198
| 14,330
| 4.5596
| 0.129663
| 0.040511
| 0.029934
| 0.03632
| 0.75474
| 0.720216
| 0.685093
| 0.670325
| 0.627919
| 0.623428
| 0
| 0.029649
| 0.100907
| 14,330
| 323
| 611
| 44.365325
| 0.748215
| 0.080879
| 0
| 0.465021
| 0
| 0.152263
| 0.617627
| 0.325672
| 0
| 0
| 0
| 0
| 0.057613
| 0
| null | null | 0
| 0.041152
| null | null | 0.045267
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3eb44e349546b49d8fecb8f4dd728f0b67f212e3
| 1,811
|
py
|
Python
|
comma.ai/data_generator.py
|
Intenzo21/Brain-Inspired-Deep-Imitation-Learning-for-Autonomous-Driving-Systems
|
31d23471bccf2c5fe64d5c628a21150d65c2aeec
|
[
"Apache-2.0"
] | 5
|
2021-12-21T06:57:12.000Z
|
2022-02-19T10:26:27.000Z
|
comma.ai/data_generator.py
|
Intenzo21/Deep-Imitation-Learning-for-Autonomous-Driving-Vehicles
|
31d23471bccf2c5fe64d5c628a21150d65c2aeec
|
[
"Apache-2.0"
] | 1
|
2021-11-26T11:38:34.000Z
|
2021-12-14T07:22:46.000Z
|
comma.ai/data_generator.py
|
Intenzo21/Deep-Imitation-Learning-for-Autonomous-Driving-Vehicles
|
31d23471bccf2c5fe64d5c628a21150d65c2aeec
|
[
"Apache-2.0"
] | 3
|
2021-06-07T12:51:35.000Z
|
2021-10-05T12:29:16.000Z
|
"""
Generate batches of data.
Program that implements the DataGenerator Sequence class used for data generation.
"""
import numpy as np
from keras.utils import Sequence
from glob import glob # Finds all pathnames matching specified pattern
import matplotlib.pyplot as plt
class DataGenerator(Sequence):
"""
Class that instantiates the Keras data generators.
"""
def __init__(self, x_files, y_files, dims=(80, 80), n_channels=3):
"""
Initialize the data generator instance.
:param x_files: input file names
:param y_files: output (label, target) file names
:param dims: image data dimensions (height x width)
:param n_channels: image data channels (default 3 (RGB))
"""
self.dims = dims
self.x_files = x_files
self.y_files = y_files
self.n_batches = len(self) # Number of batches
self.n_channels = n_channels
def __len__(self):
"""
Denote the number of batches per epoch.
:return: number of batches per epoch (number of the batched input/output files)
"""
return len(self.x_files)
def __getitem__(self, index):
"""
Generate the input and output data files at the given index.
:param index: current data file index
:return: input and output data files at the current index
"""
# Load the input image frames at the current index
x = np.load(self.x_files[index])
# Transpose into human interpretable image (not necessary)
x = x.transpose([0, 2, 3, 1])
# Load the output steering angles at the current index
y = np.load(self.y_files[index])
return x, y
def on_epoch_end(self):
pass
def __data_generation(self):
pass
| 27.029851
| 87
| 0.637769
| 243
| 1,811
| 4.617284
| 0.366255
| 0.032086
| 0.035651
| 0.045455
| 0.090909
| 0.049911
| 0.049911
| 0
| 0
| 0
| 0
| 0.007734
| 0.28603
| 1,811
| 66
| 88
| 27.439394
| 0.860015
| 0.495306
| 0
| 0.090909
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.227273
| false
| 0.090909
| 0.181818
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
3ef01ef9f9d7665862e09117236c3ddf02c3f839
| 280
|
py
|
Python
|
src/TrackPoint.py
|
RadjahDri/paraglide-compet-analytics
|
5f47b931bcc05b7e78f93ac31cd3f475fd2cbc56
|
[
"MIT"
] | null | null | null |
src/TrackPoint.py
|
RadjahDri/paraglide-compet-analytics
|
5f47b931bcc05b7e78f93ac31cd3f475fd2cbc56
|
[
"MIT"
] | null | null | null |
src/TrackPoint.py
|
RadjahDri/paraglide-compet-analytics
|
5f47b931bcc05b7e78f93ac31cd3f475fd2cbc56
|
[
"MIT"
] | null | null | null |
### CLASSES
class TrackPoint:
def __init__(self, time, coordinates, alt):
self.time = time
self.coordinates = coordinates
self.alt = alt
def __repr__(self):
return "%s %s %dm" % (self.time.strftime("%H:%m:%S"), self.coordinates, self.alt)
| 28
| 89
| 0.603571
| 35
| 280
| 4.6
| 0.457143
| 0.149068
| 0.223602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.246429
| 280
| 9
| 90
| 31.111111
| 0.763033
| 0.025
| 0
| 0
| 0
| 0
| 0.063197
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
4101a8b04c1ab8b85154242953d40e078020003c
| 158
|
py
|
Python
|
returntoclinicstation/apps.py
|
DaleProctor/tscharts
|
5447395e0aef0b949bef8426febdec2093cf37ef
|
[
"Apache-2.0"
] | 16
|
2016-08-17T21:39:10.000Z
|
2021-11-24T12:14:28.000Z
|
returntoclinicstation/apps.py
|
DaleProctor/tscharts
|
5447395e0aef0b949bef8426febdec2093cf37ef
|
[
"Apache-2.0"
] | 55
|
2017-04-23T18:12:04.000Z
|
2021-08-08T08:25:18.000Z
|
returntoclinicstation/apps.py
|
DaleProctor/tscharts
|
5447395e0aef0b949bef8426febdec2093cf37ef
|
[
"Apache-2.0"
] | 8
|
2017-08-11T02:11:46.000Z
|
2021-07-06T22:58:42.000Z
|
from __future__ import unicode_literals
from django.apps import AppConfig
class ReturntoclinicstationConfig(AppConfig):
name = 'returntoclinicstation'
| 19.75
| 45
| 0.829114
| 15
| 158
| 8.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126582
| 158
| 7
| 46
| 22.571429
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0.132911
| 0.132911
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
410785edb35f016f9c8f8612f0f9773e55f46124
| 61
|
py
|
Python
|
MAYDAY_STORAGE/stored_facts.py
|
flancast90/MAYDAY_FACTS
|
c3ca4269e9bf4ebbd8d0e880aa3817516a718e3d
|
[
"Apache-2.0"
] | 1
|
2021-09-10T23:15:55.000Z
|
2021-09-10T23:15:55.000Z
|
MAYDAY_STORAGE/stored_facts.py
|
flancast90/MAYDAY_FACTS
|
c3ca4269e9bf4ebbd8d0e880aa3817516a718e3d
|
[
"Apache-2.0"
] | null | null | null |
MAYDAY_STORAGE/stored_facts.py
|
flancast90/MAYDAY_FACTS
|
c3ca4269e9bf4ebbd8d0e880aa3817516a718e3d
|
[
"Apache-2.0"
] | null | null | null |
# saved facts for the program to remember go here at runtime
| 30.5
| 60
| 0.786885
| 11
| 61
| 4.363636
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 61
| 1
| 61
| 61
| 0.979592
| 0.95082
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4112171c6fec68b77cf5e58900c48d46064d4fd0
| 168
|
py
|
Python
|
tests/test_api_docs.py
|
ourresearch/journalsdb
|
169feb9be684eac59f3294dccdb319eb10fe1958
|
[
"MIT"
] | 8
|
2021-02-01T21:00:20.000Z
|
2022-01-25T09:51:24.000Z
|
tests/test_api_docs.py
|
ourresearch/journalsdb
|
169feb9be684eac59f3294dccdb319eb10fe1958
|
[
"MIT"
] | 43
|
2021-04-28T00:20:53.000Z
|
2022-03-09T00:39:56.000Z
|
tests/test_api_docs.py
|
ourresearch/journalsdb
|
169feb9be684eac59f3294dccdb319eb10fe1958
|
[
"MIT"
] | null | null | null |
def test_api_docs(api_client):
rv = api_client.get("/apidocs", follow_redirects=True)
assert rv.status_code == 200
assert b"JournalsDB API Docs" in rv.data
| 33.6
| 58
| 0.72619
| 27
| 168
| 4.296296
| 0.703704
| 0.12069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021429
| 0.166667
| 168
| 4
| 59
| 42
| 0.807143
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
41168ac6ecf24cc5753a8751fad38c42503448ae
| 95,534
|
py
|
Python
|
trunk/MOPS_Train.py
|
n5iln/railmops
|
f7d3b446435b31bad8cddf343f18ca7efb9eac10
|
[
"Unlicense"
] | 1
|
2015-03-30T12:10:56.000Z
|
2015-03-30T12:10:56.000Z
|
trunk/MOPS_Train.py
|
n5iln/railmops
|
f7d3b446435b31bad8cddf343f18ca7efb9eac10
|
[
"Unlicense"
] | null | null | null |
trunk/MOPS_Train.py
|
n5iln/railmops
|
f7d3b446435b31bad8cddf343f18ca7efb9eac10
|
[
"Unlicense"
] | null | null | null |
'''
Train Class
Model Operations Processing System. Copyright Brian Fairbairn 2009-2010. Licenced under the EUPL.
You may not use this work except in compliance with the Licence. You may obtain a copy of the
Licence at http://ec.europa.eu/idabc/eupl or as attached with this application (see Licence file).
Unless required by applicable law or agreed to in writing, software distributed under the Licence
is distributed on an 'AS IS' basis WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed
or implied. See the Licence governing permissions and limitations under the Licence.
Changes:
Rev 1 Added new routines Z040_trimop, Z041_trimop_shutdown and Z042_trimops_update.
Changes to update routines to reference these modules
Fixed failure on terminating unscheduled trains (sql change)
Corrected sql data input to read correct variable in XCARXT (remove car)
Removed import of unused module re. Removed unused variables.
Print consist only printed full list; check added PRCONS
Changed validation on time on REPORT (minutes)
Unused variables removed
Corrected bad reporting of trains running in reverse direction
Added Next station & time to TRAINS enquiry
Added Haulage power to LICONS enquiry
Only expected trains displayed for LINEUP
EST removed from LINEUP enquiry; not used
Strip added to commodity when checking for empty/loaded in LINEUP
Amended count on block to reflect use of word additional
Amended TTRAIN to correctly terminate trains running in 'opposite' direction
Amended REPORT to remove publication of loco details
Amended TTRAIN to handle termination of Unscheduled trains
Corrected sql getting schedule id in TTRAIN
Prevented termination of a train that has already been terminated
Added Ready trains to TRAINS enquiry
TRAINS amended to show a flag if the train terminates
Direction added as field and filer on LINEUP enquiry
Distinguishes between passenger, loaded and empty cars on lineup
Removed FROM station from LINEUP as direction was already given
Added Passenger Car reporting to LICONS
TRAINS enquiry changed to sort by next action time
'''
import MOPS_Element
class cTrains(MOPS_Element.cElement):
"""Trains define a locomotive and cars normally running against a schedule (although it is
possible to run a train without a schedule. Trains have a code, type (Scheduled, Extra or
Unscheduled) and optionally the schedule that they are running against.
"""
def utrain(self, message, Params):
"""Creates an unscheduled train:
schedule train code locomotive time offset
Unscheduled - required required
The departing station is taken from the loco. An Unscheduled train has, initially,
no Running entries as the route is not known.
"""
if self.show_access(message, 'UTRAIN train;^locomotive^', 'N') != 0:
return
#train id ------------------------------------------------------------------------------
train_id, rc = self.extract_field(message, 0, 'TRAIN ID')
if rc > 1:
return
#check to see if a train is already running against this train id
data = (train_id,)
sql = 'select id from train where train = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count != 0:
print('* THIS TRAIN ID ALREADY USED BY ANOTHER TRAIN')
return
#check to see if a schedule is already running against this train id
data = (train_id,)
sql = 'select id from schedule where schedule = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count != 0:
print('* THIS TRAIN ID ALREADY ON USE ON A SCHEDULE')
return
#loco -----------------------------------------------------------------------
loco, rc = self.extract_field(message, 1, 'LOCO ID')
if rc > 1:
return
t = (loco, '')
sql = 'select locomotive.station, locotype.oper_mode, locomotive.is_powered ' +\
'from locomotive, locotype ' +\
'where locomotive.loco = ? and locomotive.locotype = locotype.locotype ' +\
'and locomotive.station != ?'
count, ds_loco = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* LOCOMOTIVE CODE DOES NOT EXIST OR IS ALREADY ON A TRAIN')
return
else:
for row in ds_loco:
at_station = row[0]
oper_mode = row[1]
powered = row[2]
if oper_mode == 'S':
print('* NOT ALLOWED TO CREATE TRAIN WITH SLAVE/DUMMY LOCOMOTIVE')
return
if powered != 'P':
print('* LOCOMOTIVE IS NOT AVAILABLE TO ALLOCATE TO A TRAIN')
return
#carry out the update =====================================================
t = (train_id, 'U', at_station, 'R', '')
sql = 'insert into train values (null, ?, ?, ?, ?, ?)'
if self.db_update(sql, t) != 0:
return
t = ('', 0, train_id, loco)
sql = 'update locomotive set station = ?, place_id = ?, train = ? where loco = ?'
if self.db_update(sql, t) != 0:
return
#update extract to triMOPS
self.Z042_trimops_update (Params, train_id, 0, False) # Rev 1
print('NEW UNSCHEDULED TRAIN:' + train_id + ' LOCO:' + loco + 'CURRENT STATION:' + at_station)
return
def etrain(self, message, Params):
"""Creates an Extra train:
schedule train code locomotive
Extra - required required required
The originating station is taken from the schedule.
The Running table entries are copied from the Timing entries for the schedule for
Extra trainsm using the same running times as the schedule it is running against.
"""
if self.show_access(message,'ETRAIN schedule;train;^locomotive^', 'N') != 0:
return
#schedule id ------------------------------------------------------------------------------
schedule, rc = self.extract_field(message, 0, 'SCHEDULE ID')
if rc > 1:
return
#check to see if the schedule id exists and is in the correct status
t = (schedule, )
sql = 'select id, status, orig_station from schedule where schedule = ?'
count, ds_schedules = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* SCHEDULE ID DOES NOT EXIST')
return
else:
for row in ds_schedules:
status = row[1]
station = row[2]
if not (status == 'R' or status == 'A'):
print('* CAN ONLY RUN EXTRAS AGAINST ACTIVE OR RUNNING SCHEDULES')
return
#train id ------------------------------------------------------------------------------
train_id, rc = self.extract_field(message, 1, 'TRAIN ID')
if rc > 1:
return
t = (train_id,)
#check to see if a train is already running against this train id
sql = 'select id from train where train = ?'
count, dummy = self.db_read(sql, t)
if count < 0:
return
if count != 0:
print('* THIS TRAIN ID ALREADY USED BY ANOTHER TRAIN')
return
if schedule == train_id:
print('* SCHEDULE ID AND TRAIN ID CANNOT BE THE SAME')
return
else:
t = (train_id, )
sql = 'select id, status from schedule where schedule = ?'
count, ds_schedules = self.db_read(sql, t)
if count < 0:
return
if count > 0:
print('* EXTRA ID ALREADY EXISTS AS A SCHEDULE ID')
return
#loco -----------------------------------------------------------------------
loco, rc = self.extract_field(message, 2, 'LOCO ID')
if rc > 1:
return
t = (loco, '')
sql = 'select locomotive.station, locotype.oper_mode, locomotive.is_powered, ' +\
'locomotive.id ' +\
'from locomotive, locotype ' +\
'where locomotive.loco = ? and locomotive.locotype = locotype.locotype ' +\
'and locomotive.station != ?'
count, ds_loco = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* LOCOMOTIVE CODE DOES NOT EXIST OR IS ALREADY ON A TRAIN')
return
else:
for row in ds_loco:
at_station = row[0]
oper_mode = row[1]
powered = row[2]
loco_id = row[3]
if oper_mode == 'S':
print('* NOT ALLOWED TO CREATE TRAIN WITH SLAVE/DUMMY LOCOMOTIVE')
return
if powered != 'P':
print('* LOCOMOTIVE IS NOT AVAILABLE TO ALLOCATE TO A TRAIN')
return
if station != at_station:
print('* LOCOMOTIVE NOT AT DEPARTURE STATION FOR SCHEDULED TRAIN')
return
#carry out the update =====================================================
t = (train_id, 'E', station, 'R', schedule)
sql = 'insert into train values (null, ?, ?, ?, ?, ?)'
if self.db_update(sql, t) != 0:
return
t = ('', 0, train_id, loco_id)
sql = 'update locomotive set station = ?, place_id = ?, train = ? where id = ?'
if self.db_update(sql, t) != 0:
return
t = (schedule,)
sql = 'select section, depart_station, arrive_station, planned_depart, planned_arrive ' +\
'from timings where schedule = ? order by section'
count, ds_schedule = self.db_read(sql, t)
if count < 0:
return
for row in ds_schedule:
t2 = (train_id, row[0], row[1], row[2], row[3], row[4], '', '')
sql = 'insert into running values (null, ?, ?, ?, ?, ?, ?, ?, ?)'
if self.db_update(sql, t2) != 0:
return
#update extract to triMOPS
self.Z042_trimops_update (Params, train_id, schedule, False) # Rev 1
print('EXTRA TRAIN:' + schedule + ' LOCO:' + loco + 'CURRENT STATION:' + at_station)
return
def strain(self, message, Params):
"""Creates a Scheduled train:
schedule train code locomotive time offset
Scheduled - required optional required
For Scheduled and Extra trains, the station is taken from teh schedule.
The Running table entries are copied from the Timing entries for the schedule for
Scheduled and Extra trains (with the Running estimates offset by the give time entries).
"""
if self.show_access(message, 'STRAIN schedule;(train);^locomotive^', 'N') != 0:
return
#schedule id ------------------------------------------------------------------------------
schedule, rc = self.extract_field(message, 0, 'SCHEDULE ID')
if rc > 1:
return
if schedule != '':
#check to see if the schedule id exists and is in the correct status
t = (schedule, 'A')
sql = 'select id, status, orig_station from schedule where schedule = ? and status = ?'
count, ds_schedules = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* SCHEDULE ID DOES NOT EXIST OR IS NOT IN ACTIVE STATUS')
return
else:
for row in ds_schedules:
schedule_id = row[0]
station = row[2]
#train id ------------------------------------------------------------------------------
value, rc = self.extract_field(message, 1, '')
if rc == 1:
train_id= value
else:
train_id = ''
if train_id == '':
train_id = schedule
#check to see if a train is already running against this train id
data = (train_id,)
sql = 'select id from train where train = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count != 0:
print('* THIS TRAIN ID ALREADY USED BY ANOTHER TRAIN')
return
#loco -----------------------------------------------------------------------
loco, rc = self.extract_field(message, 2, 'LOCO ID')
if rc > 1:
return
t = (loco, '')
sql = 'select locomotive.station, locotype.oper_mode, locomotive.is_powered, ' +\
'locomotive.id ' +\
'from locomotive, locotype ' +\
'where locomotive.loco = ? and locomotive.locotype = locotype.locotype ' +\
'and locomotive.station != ?'
count, ds_loco = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* LOCOMOTIVE CODE DOES NOT EXIST OR IS ALREADY ON A TRAIN')
return
else:
for row in ds_loco:
at_station = row[0]
oper_mode = row[1]
powered = row[2]
loco_id = row[3]
if oper_mode == 'S':
print('* NOT ALLOWED TO CREATE TRAIN WITH SLAVE/DUMMY LOCOMOTIVE')
return
if powered != 'P':
print('* LOCOMOTIVE IS NOT AVAILABLE TO ALLOCATE TO A TRAIN')
return
if station != at_station:
print('* LOCOMOTIVE NOT AT DEPARTURE STATION FOR SCHEDULED TRAIN')
return
#carry out the update =====================================================
t = (train_id, 'S', station, 'R', schedule)
sql = 'insert into train values (null, ?, ?, ?, ?, ?)'
if self.db_update(sql, t) != 0:
return
t = ('R', schedule_id)
sql = 'update schedule set status = ? where id = ?'
if self.db_update(sql, t) != 0:
return
t = ('', 0, train_id, loco_id)
sql = 'update locomotive set station = ?, place_id = ?, train = ? where id = ?'
if self.db_update(sql, t) != 0:
return
t = (schedule,)
sql = 'select section, depart_station, arrive_station, planned_depart, planned_arrive ' +\
'from timings where schedule = ? order by section'
count, ds_schedule = self.db_read(sql, t)
if count < 0:
return
for row in ds_schedule:
t2 = (train_id, row[0], row[1], row[2], row[3], row[4], '', '')
sql = 'insert into running values (null, ?, ?, ?, ?, ?, ?, ?, ?)'
if self.db_update(sql, t2) != 0:
return
#update extract to triMOPS
self.Z042_trimops_update (Params, train_id, schedule, False) # Rev 1
print("NEW TRAIN: " + schedule + " LOCO: " + loco + " CURRENT STATION: " + at_station)
return
def ltrain(self, message, Flash, Params):
"""Creates a train starting at a later origin:
type schedule train code locomotive
Scheduled - S required optional required
Extra - E required required required
"""
if self.show_access(message, 'LTRAIN type[E/S];schedule;(train);^locomotive^'
, 'N') != 0:
return
#train type---------------------------------------------------------------------------------
train_type, rc = self.extract_field(message, 0, 'TRAIN TYPE')
if rc > 1:
return
if not (train_type == 'S' or train_type == 'E'):
print('* TRAIN TYPE MUST BE (S)CHEDULED OR (E)XTRA')
return
#schedule id ------------------------------------------------------------------------------
schedule, rc = self.extract_field(message, 1, 'SCHEDULE ID')
if rc > 1:
return
if schedule != '':
#check to see if the schedule id exists and is in the correct status
t = (schedule, )
sql = 'select id, status from schedule where schedule = ?'
count, ds_schedules = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* SCHEDULE ID DOES NOT EXIST')
return
else:
for row in ds_schedules:
schedule_id = row[0]
status = row[1]
if not (status == 'A' or status == 'R'):
print('* STATUS MUST BE IN ACTIVE OR RUNNING STATUS')
return
#train id ------------------------------------------------------------------------------
value, rc = self.extract_field(message, 2, '')
if rc == 0:
train_id= value
else:
train_id = ''
if train_id == '' and train_type == 'E':
print('* TRAIN ID REQUIRED FOR EXTRA TRAIN')
return
if train_id == '':
train_id = schedule
#check to see if a train is already running against this train id
data = (train_id,)
sql = 'select id from train where train = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count != 0:
print('* THIS TRAIN ID ALREADY USED BY ANOTHER TRAIN')
return
#check to see if an extra train is using another schedule
if train_type == 'E':
data = (train_id,)
sql = 'select id from schedule where schedule = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count != 0:
print('* THIS TRAIN ID ALREADY USED BY A SCHEDULE')
return
#loco -----------------------------------------------------------------------
loco, rc = self.extract_field(message, 3, 'LOCO ID')
if rc > 1:
return
t = (loco, '')
sql = 'select locomotive.station, locotype.oper_mode, locomotive.is_powered, ' +\
'locomotive.id ' +\
'from locomotive, locotype ' +\
'where locomotive.loco = ? and locomotive.locotype = locotype.locotype ' +\
'and locomotive.station != ?'
count, ds_loco = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* LOCOMOTIVE CODE DOES NOT EXIST OR IS ALREADY ON A TRAIN')
return
else:
for row in ds_loco:
station = row[0]
oper_mode = row[1]
powered = row[2]
loco_id = row[3]
if oper_mode == 'S':
print('* NOT ALLOWED TO CREATE TRAIN WITH SLAVE/DUMMY LOCOMOTIVE')
return
if powered != 'P':
print('* LOCOMOTIVE IS NOT AVAILABLE TO ALLOCATE TO A TRAIN')
return
data = (schedule, station)
sql = 'select section, depart_station from timings where schedule = ? ' +\
'and depart_station = ? order by section'
count, ds_schedule = self.db_read(sql, data)
if count < 0:
return
if count == 0:
print('* DEPARTING STATION NOT ON SCHEDULE')
return
else:
for row in ds_schedule:
start_section = row[0]
#carry out the update =====================================================
t = (train_id, train_type, station, 'R', schedule)
sql = 'insert into train values (null, ?, ?, ?, ?, ?)'
if self.db_update(sql, t) != 0:
return
t = ('R', schedule_id)
sql = 'update schedule set status = ? where id = ?'
if self.db_update(sql, t) != 0:
return
t = ('', 0, train_id, loco_id)
sql = 'update locomotive set station = ?, place_id = ?, train = ? where id = ?'
if self.db_update(sql, t) != 0:
return
t = (schedule, start_section)
sql = 'select section, depart_station, arrive_station, planned_depart, planned_arrive ' +\
'from timings where schedule = ? and section >= ? order by section'
count, ds_schedule = self.db_read(sql, t)
if count < 0:
return
for row in ds_schedule:
t2 = (train_id, row[0], row[1], row[2], row[3], row[4], '', '')
sql = 'insert into running values (null, ?, ?, ?, ?, ?, ?, ?, ?)'
if self.db_update(sql, t2) != 0:
return
message = ('LATE ORIGIN TRAIN:' + train_id + ' LOCO:' + loco + ' STARTING STATION:' + station)
print(message)
Flash.Z003_generate_flash_message(message, Params)
#update extract to triMOPS
self.Z042_trimops_update (Params, train_id, schedule, False) # Rev 1
return
def lineup(self, message, Params):
"""Shows to the screen trains on their way to a particular station. Provide the following
detail for each Train: Train Code, PlannedArrive, ArriveEstimate (at that Station),
StationFrom, StationTo, Class, #Locos, #Cars, #Loaded, #Empty, Weight of Train
"""
if self.show_access(message, 'LINEUP ^station^;(direction)', 'N') != 0:
return
#station-----------------------------------------------------------------------
station, rc = self.extract_field(message, 0, 'STATION')
if rc > 1:
return
data = (station,)
sql = 'select short_name from station where station = ?'
count, ds_stations = self.db_read(sql, data)
if count < 0:
return
if count == 0:
print('* STATION DOES NOT EXIST')
return
else:
for row in ds_stations:
station_name = row[0]
#direction---------------------------------------------------------------------
value, rc = self.extract_field(message, 1, '')
if rc == 0:
direction = value
else:
direction = '*'
print('LINE UP ENQUIRY FOR ' + station + ' ' + station_name)
# build the column titles
titles = self.x_field('TRAIN=', 6) + ' ' +\
self.x_field('DUE=', 4) + ' ' +\
self.x_field('ORIGIN===', self.staxsize) + ' ' +\
self.x_field('DEST=====', self.staxsize) + ' ' +\
self.x_field('C', 1) + ' ' +\
self.x_field('D', 1) + ' ' +\
self.x_field('#LOCO', 5) + ' ' +\
self.x_field('#PASS', 5) + ' ' +\
self.x_field('#LOAD', 5) + ' ' +\
self.x_field('#MTY=', 5) + ' ' +\
self.x_field('WGHT=', 5) + ' ' +\
self.x_field('LNGTH', 5)
data = (station, '')
sql = 'select running.train, running.depart_station, running.est_depart, ' +\
'running.act_depart, running.est_arrive, train.schedule, ' +\
'schedule.orig_station, schedule.dest_station, schedule.class, ' +\
'schedule.direction ' +\
'from running, train, schedule ' +\
'where running.arrive_station = ? and ' +\
'running.train = train.train and train.schedule = schedule.schedule ' +\
'and running.act_arrive = ? order by running.est_arrive' #Rev 1
count, ds_timings = self.db_read(sql, data)
if count < 0:
return
line_count = 0
counter = 0
for row in ds_timings:
train_id = row[0]
depart = row[1]
est_arrive = row[4]
orig_station = row[6]
dest_station = row[7]
sched_class = row[8]
schedule_direction = row[9] #Rev 1
loco_count = 0
pass_count = 0
load_count = 0
empty_count = 0
weight_count = 0
length_count = 0
data = (train_id,)
sql = 'select locotype.length, locomotive.weight from locomotive, locotype ' +\
'where locomotive.train = ? ' +\
'and locomotive.locotype = locotype.locotype'
loco_count, ds_locos = self.db_read(sql, data)
if loco_count < 0:
return
for lrow in ds_locos:
weight_count = weight_count + lrow[1]
length_count = length_count + lrow[0]
pass_count = 0
sql = 'select cartype.length, cartype.unladen_weight, ' +\
'car.weight_loaded, car.commodity, car.carclass ' +\
'from car, cartype ' +\
'where car.train = ? ' +\
'and car.cartype = cartype.cartype'
car_count, ds_cars = self.db_read(sql, data)
if car_count < 0:
return
for crow in ds_cars:
weight_count = weight_count + crow[1] + crow[2]
length_count = length_count + crow[0]
carclass = crow[4] #Rev 1
passenger = Params.get_param_value('PASSENGER', 'XXX') #Rev 1
if passenger == carclass: #Rev 1
pass_count = pass_count + 1 #Rev 1
else:
if crow[3].strip() == '': #Rev 1
empty_count = empty_count + 1
else:
load_count = load_count + 1
if line_count == 0:
print(titles)
if (direction == '*') or (schedule_direction == direction): #Rev 1
print(self.x_field(train_id, 6) + " " +
self.x_field(est_arrive, 4) + " " +
self.x_field(orig_station, self.staxsize) + " " +
self.x_field(dest_station, self.staxsize) + " " +
self.x_field(sched_class, 1) + " " +
self.x_field(schedule_direction, 1) + " " +
self.x_field(loco_count, 5, 'R') + " " +
self.x_field(pass_count, 5, 'R') + " " +
self.x_field(load_count, 5, 'R') + " " +
self.x_field(empty_count, 5, 'R') + " " +
self.x_field(weight_count, 5, 'R') + " " +
self.x_field(length_count, 5, 'R'))
counter = counter + 1
line_count = line_count + 1
if line_count > 20:
line_count = 0
reply = raw_input('+')
if reply == 'x':
break
print(' ** END OF DATA: ' + str(counter) + ' RECORDS DISPLAYED **')
return
def alocot(self, message):
"""Allocate a locomotive to a train. The train must already exist, and be reported as
being at the same place as the locomotive.
"""
if self.show_access(message, 'ALOCOT ^locomotive^;^train^', 'N') != 0:
return
#loco---------------------------------------------------------------------------------------
loco, rc = self.extract_field(message, 0, 'LOCOMOTIVE')
if rc > 1:
return
t = (loco,)
sql = 'select station, is_powered, id from locomotive where loco = ?'
count, ds_station = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* LOCOMOTIVE CODE DOES NOT EXIST')
return
else:
for row in ds_station:
loco_at_station = row[0]
power = row[1]
loco_id = row[2]
if not (power == 'U' or power == 'P'):
print('* LOCOMOTIVE IS NOT ABLE TO BE MOVED (FUELLING/MAINTENANCE)')
return
#train reference ---------------------------------------------------------------------------
train, rc = self.extract_field(message, 1, 'TRAIN REFERENCE')
if rc > 1:
return
#check that the train exists and get the station
t = (train,)
sql = 'select id, station from train where train = ?'
count, ds_trains = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* TRAIN DOES NOT EXIST')
return
else:
for row in ds_trains:
station = row[1]
if station != loco_at_station:
print('* LOCOMOTIVE NOT AT SAME STATION AS CURRENT STATION OF TRAIN')
return
#carry out the update ----------------------------------------------------------------------
t = ('', 0, train, loco_id)
sql = 'update locomotive set station = ?, place_id = ?, train = ? ' +\
'where id = ?'
if self.db_update(sql, t) != 0:
return
print('LOCOMOTIVE '+ loco+ ' ATTACHED TO TRAIN: '+ train)
return
def xlocot(self, message):
"""Removes a locomotive from a train. The train must be at a station.
"""
if self.show_access(message, 'XLOCOT ^locomotive^', 'N') != 0:
return
#loco---------------------------------------------------------------------------------------
loco, rc = self.extract_field(message, 0, 'LOCOMOTIVE')
if rc > 1:
return
t = (loco,)
sql = 'select train, id from locomotive where loco = ?'
count, ds_loco = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* LOCOMOTIVE CODE DOES NOT EXIST')
return
else:
for row in ds_loco:
train = row[0]
loco_id = row[1]
if train == '':
print('* LOCOMOTIVE IS NOT ON A TRAIN')
return
#check that the train exists and get the station
t = (train,)
sql = 'select id, station from train where train = ?'
count, ds_trains = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* TRAIN DOES NOT EXIST')
return
else:
for row in ds_trains:
station = row[1]
if station == '':
print('* TRAIN IS NOT AT A STATION')
return
#carry out the update ----------------------------------------------------------------------
t = (station, '', loco_id)
sql = 'update locomotive set station = ?, train = ? where id = ?'
if self.db_update(sql, t) != 0:
return
print('LOCOMOTIVE '+ loco+ ' DETACHED FROM TRAIN: '+ train+ ' AND NOW AT '+ station)
return
def acarxt(self, message):
"""Allocate a car to a train. The train must already exist, and be reported as
being at the same place as the car.
"""
if self.show_access(message, 'ACARXT car;^train^', 'N') != 0:
return
#car --------------------------------------------------------------------------------------
car, rc = self.extract_field(message, 0, 'CAR')
if rc > 1:
return
t = (car,)
sql = 'select station, time_in_maint, id, block from car where car = ?'
count, ds_cars = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* CAR CODE DOES NOT EXIST')
return
else:
for row in ds_cars:
car_at_station = row[0]
maint = row[1]
car_id = row[2]
block = row[3]
if maint != 0:
print('* CAR IS IN MAINTENANCE AND CANNOT BE MOVED')
return
#train reference ---------------------------------------------------------------------------
train, rc = self.extract_field(message, 1, 'TRAIN REFERENCE')
if rc > 1:
return
#check that the train exists and get the station
t = (train,)
sql = 'select id, station from train where train = ?'
count, ds_trains = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* TRAIN DOES NOT EXIST')
return
else:
for row in ds_trains:
station = row[1]
if station != car_at_station:
print('* CAR NOT AT SAME STATION AS CURRENT STATION OF TRAIN')
return
#carry out the update =====================================================
t = ('', 0, train, car_id)
sql = 'update car set station = ?, place_id = ?, train = ? where id = ?'
if self.db_update(sql, t) != 0:
return
#if the car was attached to a block, move the rest of the block as well
if block != '':
t = ('', 0, train, block)
sql = 'update car set station = ?, place_id = ?, train = ? where block = ?'
if self.db_update(sql, t) != 0:
return
print('CAR ' + car + ' ATTACHED TO TRAIN: ' + train)
if block != '':
data = (block,)
sql = 'select car from car where block = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
print(count-1, ' ADDITIONAL CARS ADDED TO TRAIN AS PART OF BLOCK') #Rev 1
return
def xcarxt(self, message):
"""Removes a car from a train. The train must be at a station.
"""
if self.show_access(message, 'XCARXT car', 'N') != 0:
return
#car---------------------------------------------------------------------------------------
car, rc = self.extract_field(message, 0, 'CAR')
if rc > 1:
return
t = (car,)
sql = 'select train, id, block from car where car = ?'
count, ds_car = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* CAR NUMBER DOES NOT EXIST')
return
else:
for row in ds_car:
train = row[0]
car_id = row[1]
block = row[2]
if train == '':
print('* CAR IS NOT ON A TRAIN')
return
#check that the train exists and get the station
t = (train,)
sql = 'select id, station from train where train = ?'
count, ds_trains = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* TRAIN DOES NOT EXIST')
return
else:
for row in ds_trains:
station = row[1]
if station == '':
print('* TRAIN IS NOT AT A STATION')
return
#carry out the update ----------------------------------------------------------------------
t = (station, '', car_id)
sql = 'update car set station = ?, train = ? where id = ?'
if self.db_update(sql, t) != 0:
return
#if the car was attached to a block, move the rest of the block as well
if block != '':
t = (station, '', block)
sql = 'update car set station = ?, train = ? where block = ?'
if self.db_update(sql, t) != 0:
return
print('CAR '+ car+ ' DETACHED FROM TRAIN: '+ train+ ' AND NOW AT '+ station)
if block != '':
data = (block,) # Rev 1
sql = 'select car from car where block = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
print(count-1, ' ADDITIONAL CARS DETACHED FROM TRAIN AS PART OF BLOCK') #Rev 1
return
def report(self, message, Params):
"""reports a train arriving or departing a station. processing depends on whether it is
an unscheduled train or a scheduled/extra train. For unscheduled train the timings
record may need to be created or updated as there will be no information. other train
types need only update an existing timings record (which must exist)
"""
if self.show_access(message, 'REPORT train;update[DEP/ARR/THR];^station^;(time)','N') != 0:
return
#train id ------------------------------------------------------------------------------
train, rc = self.extract_field(message, 0, 'TRAIN ID')
if rc > 0:
return
t = (train,)
sql = 'select train, type from train where train = ?'
count, ds_trains = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* TRAIN CODE DOES NOT EXIST')
return
else:
for row in ds_trains:
train_type = row[1]
#arrival/depart ----------------------------------------------------------------------------
action, rc = self.extract_field(message, 1, 'ACTION')
if rc > 0:
return
if not (action == 'ARR' or action == 'DEP' or action == 'THR'):
print('* INDICATE WHETHER THIS IS AN ARRIVAL (arr), DEPARTURE (dep) OR THROUGH (thr) REPORT')
return
#station -----------------------------------------------------------------------------------
station, rc = self.extract_field(message, 2, 'STATION')
if rc > 0:
return
t = (station,)
sql = 'select long_name from station where station = ?'
count, ds_stations = self.db_read(sql, t)
if count < 0:
return
if count == 0:
print('* TRAIN CODE DOES NOT EXIST')
return
else:
for row in ds_stations:
station_name = row[0]
#act_time ----------------------------------------------------------------------------------
value, rc = self.extract_field(message, 3, '')
if rc == 0:
act_time = value
else:
act_time = Params.get_time()
act_time = act_time[0:2] + act_time[3:5]
if len(act_time) != 4:
print('* TIME MUST BE IN HHMM FORMAT')
return
hours = act_time[0:2]
mins = act_time[2:4]
try:
if int(hours) > 23:
print('* HOURS MUST BE IN RANGE 00-23')
return
if int(mins) > 59:
print('* MINUTES MUST BE IN RANGE 00-59')
return
except:
print('* TIME MUST BE ENTERED IN HOURS AND MINUTES')
return
#validate the update will be good ----------------------------------------------------------
if train_type != 'U':
if action == "ARR" or action == 'THR':
data = (train, station, '')
sql = 'select est_arrive from running ' +\
'where train = ? and arrive_station = ? and act_arrive = ?'
count, ds_timings = self.db_read(sql, data)
if count < 0:
return
if count == 0:
print('NO TIMINGS RECORD TO UPDATE')
return
if action == "DEP" or action == 'THR':
data = (train, station, '')
sql = 'select est_depart from running ' +\
'where train = ? and depart_station = ? and act_depart = ?'
count, ds_timings = self.db_read(sql, data)
if count < 0:
return
if count == 0:
print('NO TIMINGS RECORD TO UPDATE')
return
# update the database ----------------------------------------------------------------------
if train_type == 'U':
last_id = 0
data = (train, )
sql = 'select id from running where train = ? order by id'
rec_count, ds_timings = self.db_read(sql, data)
if rec_count < 0:
return
if rec_count > 0:
for row in ds_timings:
last_id = row[0]
#if its an arrive or thru we need to update
if action == 'ARR' or action == 'THR':
data = (station, act_time, last_id)
sql = 'update running set arrive_station = ?, act_arrive = ? where id = ?'
if self.db_update(sql, data) != 0:
return
#if its a depart or a thru we need to enter a new record
if action == 'DEP' or action == 'THR':
last_id = last_id + 1
data = (train, last_id, station, '', '', '', act_time, '')
sql = 'insert into running values (null, ?, ?, ?, ?, ?, ?, ?, ?)'
if self.db_update(sql, data) != 0:
return
else:
if action == 'ARR' or action == 'THR':
data = (act_time, train, station)
sql = 'update running set act_arrive = ? where train = ? and arrive_station = ?'
if self.db_update(sql, data) != 0:
return
if action == 'ARR':
data = (station, train)
sql = 'update train set station = ? where train = ?'
if self.db_update(sql, data) != 0:
return
if action == 'DEP' or action == 'THR':
data = (act_time, train, station)
sql = 'update running set act_depart = ? where train = ? and depart_station = ?'
if self.db_update(sql, data) != 0:
return
data = ('', train)
sql = 'update locomotive set station = ? where train = ?'
if self.db_update(sql, data) != 0:
return
#update extract to triMOPS
data = (train,)
sql = 'select schedule from train where train = ?'
count, ds_schedule = self.db_read(sql, data)
if count < 0:
return
for row in ds_schedule:
schedule = row[0]
self.Z042_trimops_update (Params, train, schedule, True)
if action == 'DEP':
print(train + ' DEPARTED ' + station + " " + station_name + ' AT ' + act_time)
if action == 'ARR':
print(train + ' ARRIVED ' + station + " " + station_name + ' AT ' + act_time)
if action == 'THR':
print(train + ' THRU ' + station + " " + station_name + ' AT ' + act_time)
return
def ttrain(self, message, Flash, Params):
"""terminates a train. train is set to completed. rolling stock is set to the
current station of the train. schedule is set to C - Complete
"""
if self.show_access(message, 'TTRAIN train;(confirm[/YES])','N') != 0:
return
#train id ------------------------------------------------------------------------------
train, rc = self.extract_field(message, 0, 'TRAIN ID')
if rc > 0:
return
#terminates early--------------------------------------------------------------------------
value, rc = self.extract_field(message, 1, '')
if rc == 0:
early = value
if early != 'YES':
early = ''
else:
early = ''
schedule_dest = ''
schedule = ''
data = (train,)
sql = 'select schedule.dest_station, schedule.id, train.type, schedule.orig_station, schedule.schedule, ' +\
'train.status from train left outer join schedule on train.schedule = schedule.schedule ' +\
'where train.train = ?' #Rev 1
count, ds_schedule = self.db_read(sql, data)
if count < 0:
return
for row in ds_schedule:
schedule_dest = row[0]
schedule_id = row[1]
train_type = row[2]
schedule = row[4]
train_status = str(row[5]) #Rev 1
if train_status == 'C': #Rev 1
print('* TRAIN HAS ALREADY BEEN TERMINATED') #Rev 1
return #Rev 1
data = (schedule_id,) #Rev 1 start
sql = 'select schedule.direction, route.default_direction from route, schedule ' +\
'where schedule.route = route.route and schedule.id = ?' #Rev 1
count, ds_routes = self.db_read(sql, data)
if count < 0:
return
for routes in ds_routes:
direction = routes[0]
default_dir = routes[1] #Rev 1 end
if train_type == 'S':
data = ('C', schedule_id)
sql = 'update schedule set status = ? where id = ?'
if self.db_update(sql, data) != 0:
return
if train_type == 'U': #Rev 1
direction = 'x'
default_dir = 'x'
data = (train,)
if direction == default_dir: #Rev 1
sql = 'select arrive_station, act_arrive from running where train = ? order by timings'
else: #Rev 1
sql = 'select arrive_station, act_arrive from running where train = ? order by timings desc' #Rev 1
count, ds_running = self.db_read(sql, data)
if count < 0:
return
for row in ds_running:
arr_station = row[0]
arr_time = row[1]
if arr_time == '':
print('* TRAIN NEEDS TO BE REPORTED AT STATION BEFORE TERMINATION')
return
if arr_station != schedule_dest:
if early != 'YES':
print('* CONFIRM EARLY TERMINATION OF TRAIN')
return
else:
message = ('EARLY TERMINATION OF TRAIN: ' + train + ' AT: ' + arr_station)
Flash.Z003_generate_flash_message(message, Params)
data = ('C', train)
sql = 'update train set status = ? where train = ?'
if self.db_update(sql, data) != 0:
return
data = (arr_station, '', train)
sql = 'update locomotive set station = ?, train = ? where train = ?'
if self.db_update(sql, data) != 0:
return
sql = 'update car set station = ?, train = ? where train = ?'
if self.db_update(sql, data) != 0:
return
#update extract to triMOPS # Rev 1
self.Z042_trimops_update (Params, train, schedule, True)
print('TRAIN '+ train+ ' TERMINATED AT '+ arr_station)
return
def Z040_trimop(self, Params):
"""write out a session start set of records for TriMOPS. This includes an initialiser
record which will tell TriMOPS to reset all existing schedule train and running
records. This starter module also includes all station detail.
"""
#see if we want to process the messages
process_trimops = Params.get_param_value('TRIMOPS', 'NO')
if process_trimops != 'YES':
return
#append the updated details to the history file
filename = self.directory + 'exMOPStrains.txt'
with open(filename, "a") as f:
#header record
f.write('00' + Params.get_date() + Params.get_time() + 'RESET RECORD' + '\n')
#current list of stations
sql = 'select station, short_name, long_name, area, stationtype, alias from station'
count, ds_station = self.db_read(sql, '')
if count < 0:
return
for row in ds_station:
station = row[0]
short_name = row[1]
long_name = row[2]
area = row[3]
station_type = row[4]
alias = row[5]
record_data = self.x_field(station, 10) +\
self.x_field(short_name, 8) +\
self.x_field(long_name, 30) +\
self.x_field(area, 10) +\
self.x_field(station_type, 10) +\
self.x_field(alias, 10)
f.write('10' + Params.get_date() + Params.get_time() + record_data + '\n')
#current list of schedules
data = 'I'
sql = 'select schedule, route, name, class, status, run_days, ' +\
'orig_station, dest_station, direction from schedule ' +\
'where status != ?'
count, ds_schedules = self.db_read(sql, data)
if count < 0:
return
for row in ds_schedules:
schedule = row[0]
route = row[1]
name = row[2]
sch_class = row[3]
status = row[4]
run_days = row[5]
orig = row[6]
dest = row[7]
direction = row[8]
record_data = self.x_field(schedule, 5) +\
self.x_field(route, 10) +\
self.x_field(name, 30) +\
self.x_field(sch_class, 1) +\
self.x_field(status, 1) +\
self.x_field(run_days, 8) +\
self.x_field(orig, 10) +\
self.x_field(dest, 10) +\
self.x_field(direction, 1)
f.write('20' + Params.get_date() + Params.get_time() + record_data + '\n')
#current list of schedules
sql = 'select train, type, station, status, schedule from train'
count, ds_trains = self.db_read(sql, '')
if count < 0:
return
for row in ds_trains:
train = row[0]
traintype = row[1]
station = row[2]
status = row[3]
schedule = row[4]
record_data = self.x_field(train, 5) +\
self.x_field(traintype, 1) +\
self.x_field(station, 10) +\
self.x_field(status, 1) +\
self.x_field(schedule, 5)
f.write('30' + Params.get_date() + Params.get_time() + record_data + '\n')
#current state of running
sql = 'select train, depart_station, arrive_station, ' +\
'est_depart, est_arrive, act_depart, act_arrive from running'
count, ds_runnings = self.db_read(sql, '')
if count < 0:
return
for row in ds_runnings:
train = row[0]
departs = row[1]
arrives = row[2]
est_depart = row[3]
est_arrive = row[4]
act_depart = row[5]
act_arrive = row[6]
record_data = self.x_field(train, 5) +\
self.x_field(departs, 10) +\
self.x_field(arrives, 10) +\
self.x_field(est_depart, 4) +\
self.x_field(est_arrive, 4) +\
self.x_field(act_depart, 4) +\
self.x_field(act_arrive, 4)
f.write('40' + Params.get_date() + Params.get_time() + record_data + '\n')
return
def Z041_trimop_shutdown(self, Params):
"""send an end-of session message
"""
#see if we want to process the messages
process_trimops = Params.get_param_value('TRIMOPS', 'NO')
if process_trimops != 'YES':
return
#append the updated details to the history file
filename = self.directory + 'exMOPStrains.txt'
with open(filename, "a") as f:
#header record
f.write('99' + Params.get_date() + Params.get_time() + 'MOPS SHUTDOWN' + '\n')
return
def Z042_trimops_update(self, Params, train, schedule, update_schedule):
""" creates an update record for triMOPS
"""
#see if we want to process the messages
process_trimops = Params.get_param_value('TRIMOPS', 'NO')
if process_trimops != 'YES':
return
#append the updated details to the history file
filename = self.directory + 'exMOPStrains.txt'
with open(filename, "a") as f:
#schedule being updated
data = (schedule, )
sql = 'select schedule, route, name, class, status, run_days, ' +\
'orig_station, dest_station, direction from schedule ' +\
'where schedule = ?'
count, ds_schedules = self.db_read(sql, data)
if count > 0:
for row in ds_schedules:
schedule = row[0]
route = row[1]
name = row[2]
sch_class = row[3]
status = row[4]
run_days = row[5]
orig = row[6]
dest = row[7]
direction = row[8]
record_data = self.x_field(schedule, 5) +\
self.x_field(route, 10) +\
self.x_field(name, 30) +\
self.x_field(sch_class, 1) +\
self.x_field(status, 1) +\
self.x_field(run_days, 8) +\
self.x_field(orig, 10) +\
self.x_field(dest, 10) +\
self.x_field(direction, 1)
f.write('21' + Params.get_date() + Params.get_time() + record_data + '\n')
#train being updated
data = (train, )
sql = 'select train, type, station, status, schedule from train where train = ?'
count, ds_trains = self.db_read(sql, data)
if count > 0:
for row in ds_trains:
train = row[0]
traintype = row[1]
station = row[2]
status = row[3]
schedule = row[4]
data = ('P', train)
sql = 'select cartype.oper_mode from cartype, car where car.cartype = cartype.cartype ' +\
'and cartype.oper_mode = ? and car.train = ?'
pass_cars, dummy = self.db_read(sql, data)
if pass_cars < 0:
return
record_data = self.x_field(train, 5) +\
self.x_field(traintype, 1) +\
self.x_field(station, 10) +\
self.x_field(status, 1) +\
self.x_field(schedule, 5) +\
self.x_field(pass_cars, 5, 'R')
f.write('31' + Params.get_date() + Params.get_time() + record_data + '\n')
#running info being updated
if update_schedule:
data = (train, )
sql = 'select train, depart_station, arrive_station, ' +\
'est_depart, est_arrive, act_depart, act_arrive from running where train = ?'
count, ds_runnings = self.db_read(sql, data)
if count > 0:
for row in ds_runnings:
train = row[0]
departs = row[1]
arrives = row[2]
est_depart = row[3]
est_arrive = row[4]
act_depart = row[5]
act_arrive = row[6]
record_data = self.x_field(train, 5) +\
self.x_field(departs, 10) +\
self.x_field(arrives, 10) +\
self.x_field(est_depart, 4) +\
self.x_field(est_arrive, 4) +\
self.x_field(act_depart, 4) +\
self.x_field(act_arrive, 4)
f.write('41' + Params.get_date() + Params.get_time() + record_data + '\n')
return
def trains(self, message):
"""reports a train arriving or departing a station. processing depends on whether it is
an unscheduled train or a scheduled/extra train. other train
types need only update an existing timings record (which must exist)
"""
if self.show_access(message, 'TRAINS (^route^);(direction[N/S/E/W/U/D])','R') != 0:
return
#work out the various parameters------------------------------------------------------------
value, rc = self.extract_field(message, 0, '')
if rc == 0:
filter_route = value
else:
filter_route = '*'
value, rc = self.extract_field(message, 1, '')
if rc == 0:
filter_dir = value
else:
filter_dir = '*'
# build the column titles
titles = self.x_field('TRAIN=', 6) + ' ' +\
self.x_field('C', 1) + ' ' +\
self.x_field('TYPE', 4) + ' ' +\
self.x_field('ROUTE', self.routsize) + ' ' +\
self.x_field('SCHEDULE', self.schdsize) + ' ' +\
self.x_field('D', 1) + ' ' +\
self.x_field('ORIGINATING', self.staxsize) + ' ' +\
self.x_field('DESTINATION', self.staxsize) + ' ' +\
self.x_field(' ', 3) + ' ' +\
self.x_field('LAST STATION', self.staxsize) + ' ' +\
self.x_field('DUE.', 4) + ' ' +\
self.x_field('ACT.', 4) + ' ' +\
self.x_field('NEXT STATION', self.staxsize) + ' ' +\
self.x_field('EXP.', 4)
default_dir = 'x'
data = ('C',)
sql = 'select train.train, train.type, train.station, train.schedule, ' +\
'schedule.class, schedule.route, schedule.orig_station,' +\
'schedule.dest_station, schedule.direction ' +\
'from train left outer join schedule on train.schedule = schedule.schedule ' +\
'where train.status != ? order by schedule.orig_station' #Rev 1
count, ds_trains = self.db_read(sql, data)
if count < 0:
return
counter = 0
extract = {}
for row in ds_trains:
train = row[0]
train_type = row[1]
schedule = row[3]
train_class = row[4]
train_route = row[5]
orig = row[6]
dest = row[7]
direction = row[8]
if train_class == None:
train_class = ''
if train_route == None:
train_route = ''
if orig == None:
orig = ''
if dest == None:
dest = ''
if direction == None:
direction = ''
else: #Rev 1 start
sql = 'select route.default_direction from route, schedule ' +\
'where schedule.route = route.route ' +\
'and schedule.schedule = ?'
data = (schedule,)
count, ds_route = self.db_read(sql, data)
if count < 0:
return
for route in ds_route:
default_dir = route[0] #Rev 1 end
if ((filter_route == train_route) or (filter_route == '*')):
if ((filter_dir == direction) or (filter_dir == '*')):
counter = counter + 1
if train_type == 'S':
p_train_type = 'SCHD'
elif train_type == 'E':
p_train_type = 'XTRA'
else:
p_train_type = 'UNSC'
sql = 'select '
last_report = ' '
last_station = ' '
last_est = ' '
last_act = ' '
thru_train = ' '
data = (train,)
if (direction == default_dir) or (default_dir == 'x'):
sql = 'select depart_station, arrive_station, est_depart, est_arrive, ' +\
'act_depart, act_arrive from running where train = ? order by timings'
else: #Rev 1 else clause added
sql = 'select depart_station, arrive_station, est_depart, est_arrive, ' +\
'act_depart, act_arrive from running where train = ? order by timings DESC'
t_count, ds_run = self.db_read(sql, data)
if t_count < 0:
return
next_item = 'Departure'
next_station = ''
next_est = ' '
terminates = ''
thru_train = ' '
for t_row in ds_run:
terminates = ' ' #Rev 1
dep_station = t_row[0]
arr_station = t_row[1]
est_depart = t_row[2]
est_arrive = t_row[3]
act_depart = t_row[4]
act_arrive = t_row[5]
if next_item == 'Departure':
next_station = 'DEP:'
next_est = est_depart
next_item = ''
if next_item == 'Arrival':
if next_est != est_depart:
thru_train = '+'
next_item = ''
if act_depart != '':
last_report = 'DEP'
last_station = dep_station
last_est = est_depart
last_act = act_depart
next_station = arr_station
next_est = est_arrive
next_item = 'Arrival'
if act_arrive != '':
last_report = 'ARR'
last_station = arr_station
last_est = est_arrive
last_act = act_arrive
next_item = 'Departure'
if next_item == 'Departure': #Rev 1
next_station = ' ' #Rev 1
next_est = ' ' #Rev 1
next_est = next_est + terminates + thru_train #Rev 1
extract[next_est + direction + train] = self.x_field(train, 6) + ' ' +\
self.x_field(train_class,1) + ' ' +\
self.x_field(p_train_type,4) + ' ' +\
self.x_field(train_route, self.routsize) + ' ' +\
self.x_field(schedule, self.schdsize) + ' ' +\
self.x_field(direction, 1) + ' ' +\
self.x_field(orig, self.staxsize) + ' ' +\
self.x_field(dest, self.staxsize) + ' ' +\
self.x_field(last_report, 3) + ' ' +\
self.x_field(last_station, self.staxsize) + ' ' +\
self.x_field(last_est, 4) + ' ' +\
self.x_field(last_act, 4) + ' ' +\
self.x_field(next_station, self.staxsize) + ' ' +\
self.x_field(next_est, 6)
data_sort = list(extract.keys())
data_sort.sort()
line_count = 0
for x in data_sort:
if line_count == 0:
print(titles)
print(extract[x])
line_count = line_count + 1
if line_count > 22:
line_count = 0
reply = raw_input('+')
if reply == 'x':
break
print(' ** END OF DATA **')
return
def licons(self, message, Params):
"""lists a consist for a train: first current train details, then a summary of
train information. If a Full list is required then show details of individual cars
"""
if self.show_access(message, 'LICONS train;(report[F])','R') != 0:
return
#work out the various parameters
train_id, rc = self.extract_field(message, 0, 'TRAIN ID')
if rc > 1:
return
value, rc = self.extract_field(message, 1, '')
if rc == 0:
full_consist = value
else:
full_consist = '*'
# build the column titles
col_of_lenc = self.railsize + self.carxsize + self.staxsize + self.classize + self.commsize + 24
titles = self.x_field('----------', self.railsize) + ' ' +\
self.x_field('CAR=======', self.carxsize) + ' ' +\
self.x_field('DESTINATION', self.staxsize) + ' ' +\
self.x_field('========', 8) + ' ' +\
self.x_field('CUSTOMER', 10) + ' ' +\
self.x_field('BLOCK ', 6) + ' ' +\
self.x_field('CLASS=====', self.classize) + ' ' +\
self.x_field('COMMODITY=', self.commsize) + ' ' +\
self.x_field('-----', 5) + ' ' +\
self.x_field('-----', 5)
#get the train information
sql = 'select train.train, train.type, train.station, train.schedule, ' +\
'schedule.class, schedule.route, schedule.orig_station,' +\
'schedule.dest_station, schedule.direction ' +\
'from train ' +\
'left outer join schedule on train.schedule = schedule.schedule ' +\
'where train.train = ?'
data = (train_id,)
count, ds_trains = self.db_read(sql, data)
if count < 0:
return
#return the train information and print a summary
counter = 0
line_count = 0
for row in ds_trains:
train = row[0]
train_type = row[1]
station = row[2]
schedule = row[3]
train_class = row[4]
train_route = row[5]
orig = row[6]
dest = row[7]
direction = row[8]
if schedule == None:
schedule = 'N/A'
if train_class == None:
train_class = 'N/A'
if train_route == None:
train_route = 'N/A'
if orig == None:
orig = 'N/A'
if dest == None:
dest = 'N/A'
if direction == None:
direction = 'N/A'
counter = counter + 1
if train_type == 'S':
p_train_type = 'SCHD'
elif train_type == 'E':
p_train_type = 'XTRA'
else:
p_train_type = 'USCD'
#go to the schedule and get the last reporting points
last_report = ' '
last_est = ' '
last_act = ' '
data = (train,)
sql = 'select depart_station, arrive_station, est_depart, est_arrive, ' +\
'act_depart, act_arrive from running where train = ? order by timings'
t_count, ds_run = self.db_read(sql, data)
if t_count < 0:
return
for t_row in ds_run:
act_depart = t_row[4]
act_arrive = t_row[5]
if act_depart != '':
last_report = 'DEP'
last_est = t_row[2]
last_act = t_row[4]
if act_arrive != '':
last_report = 'ARR'
last_est = t_row[3]
last_act = t_row[5]
short_orig = 'N/A'
data = (orig,)
sql = 'select short_name from station where station = ?'
s_count, ds_stax = self.db_read(sql, data)
if s_count < 0:
return
for s_row in ds_stax:
short_orig = s_row[0]
short_dest = 'N/A'
data = (dest,)
sql = 'select short_name from station where station = ?'
s_count, ds_stax = self.db_read(sql, data)
if s_count < 0:
return
for s_row in ds_stax:
short_dest = s_row[0]
short_station = 'N/A'
data = (station,)
sql = 'select short_name from station where station = ?'
s_count, ds_stax = self.db_read(sql, data)
if s_count < 0:
return
for s_row in ds_stax:
short_station = s_row[0]
print(' TRAIN:' + train + ' CLASS:' + train_class + ' TYPE:' + p_train_type +
' ROUTE:' + train_route + ' SCHEDULE:' + schedule +
' DIRECTION:' + direction)
print('ORIG:' + orig + short_orig + ' DEST:' + dest + short_dest)
#get a summary of the train information
weight_count = 0
length_count = 0
load_count = 0
empty_count = 0
loco_hauls = 0 #Rev 1
data = (train_id,)
sql = 'select locotype.length, locomotive.weight, locotype.haulage from locomotive, locotype ' +\
'where locomotive.train = ? ' +\
'and locomotive.locotype = locotype.locotype' #Rev 1
loco_count, ds_locos = self.db_read(sql, data)
if loco_count < 0:
return
for lrow in ds_locos:
weight_count = weight_count + lrow[1]
length_count = length_count + lrow[0]
loco_hauls = loco_hauls + lrow[2] #Rev 1
sql = 'select cartype.length, cartype.unladen_weight, ' +\
'car.weight_loaded, car.commodity, car.carclass ' +\
'from car, cartype ' +\
'where car.train = ? ' +\
'and car.cartype = cartype.cartype'
car_count, ds_cars = self.db_read(sql, data)
if car_count < 0:
return
pass_count = 0
for crow in ds_cars:
car_class = crow[4]
car_commodity = crow[3]
weight_count = weight_count + crow[1] + crow[2]
length_count = length_count + crow[0]
if car_commodity.strip() == '': #Rev 1
passenger = Params.get_param_value('PASSENGER', 'XXX') #Rev 1
if passenger == car_class: #Rev 1
pass_count = pass_count + 1 #Rev 1
else: #Rev 1
empty_count = empty_count + 1
else:
load_count = load_count + 1
print(last_report + ' AT: ' + station + short_station + ' DUE: ' + last_est + ' ACT: ' + last_act)
print(' LOCOS: ' + str(loco_count) + ' PASS: ' + str(pass_count) + ' LOAD: ' + str(load_count) + ' MTY: ' + str(empty_count) +
' LEN: ' + str(length_count) + ' HAUL: ' + str(loco_hauls) + 'WGHT:' + str(weight_count)) #Rev 1
if full_consist != 'F':
return
# build the column titles
col_of_lenl = self.railsize + self.locosize + self.staxsize + 28
coldiff = col_of_lenc - col_of_lenl
if coldiff > 0:
addin = col_of_lenc - col_of_lenl - 1
print(self.x_field('RAILROAD==', self.railsize) + ' ' +\
self.x_field('LOCO======', self.locosize) + ' ' +\
self.x_field('POWER=', 6) + ' ' +\
self.x_field('TYPE==', 6) + ' ' +\
self.x_field('HAULS', 5, 'R') + ' ' +\
self.x_field('MODE==', 6) + ' ' +\
self.x_field('FUEL==', 5) + ' ' +\
self.x_field('HOME======', self.staxsize) + ' ' +\
self.x_field(' ', addin),
self.x_field('LNGTH', 5) + ' ' +\
self.x_field('WGHT=', 5) )
#detail wanted, so get the loco details
data = (train_id,)
sql = 'select locomotive.railroad, locomotive.loco, ' +\
'locomotive.is_powered, locotype.power_type, locotype.haulage, ' +\
'locotype.oper_mode, locotype.length, locomotive.weight, locomotive.fuel, ' +\
'locomotive.home_station ' +\
'from locomotive, locotype ' +\
'where locomotive.train = ? ' +\
'and locomotive.locotype = locotype.locotype'
loco_count, ds_locos = self.db_read(sql, data)
if loco_count < 0:
return
for lrow in ds_locos:
if lrow[2] == 'P':
powerstate = ' '
else:
powerstate = 'UNPWRD'
if lrow[3] == 'D':
plant = 'DIESEL'
elif lrow[3] == 'E':
plant = 'ELCTRC'
elif lrow[3] == 'S':
plant = 'STEAM '
else:
plant = ' '
if lrow[5] == 'I':
mode = ' '
elif lrow[5] == 'D':
mode = 'SLAVE '
elif lrow[5] == 'M':
mode = 'M.UNIT'
else:
mode = 'OTHER '
print(self.x_field(lrow[0], self.railsize) + " " +
self.x_field(lrow[1], self.locosize) + " " +
self.x_field(powerstate, 6) + " " +
self.x_field(plant, 6) + " " +
self.x_field(lrow[4], 5, 'R') + " " +
self.x_field(mode,6) + " " +
self.x_field(lrow[8], 5, 'R') + " " +
self.x_field(lrow[9], self.staxsize) + " " +
self.x_field(' ', addin) + " " +
self.x_field(lrow[6], 5, 'R') + " " +
self.x_field(lrow[7], 5, 'R'))
#get the car details
data = (train_id,)
sql = 'select car.railroad, car.car, place.station, ' +\
'station.short_name, waybill.destination, ' +\
'car.block, car.carclass, car.commodity, ' +\
'cartype.length, cartype.unladen_weight, ' +\
'car.weight_loaded ' +\
'from car, cartype ' +\
'left outer join waybill on car.carorder = waybill.id ' +\
'left outer join place on waybill.destination = place.industry ' +\
'left outer join station on place.station = station.station ' +\
'where car.train = ? ' +\
'and car.cartype = cartype.cartype ' +\
'order by place.industry, car.block, car.railroad, car.car'
car_count, ds_cars = self.db_read(sql, data)
if car_count < 0:
return
line_count = 0
for crow in ds_cars:
railroad = crow[0]
car = crow[1]
if line_count == 0:
print(titles)
station = crow[2]
if station == None:
station = ''
stax_name = crow[3]
if stax_name == None:
stax_name = ''
waybill_dest = crow[4]
if waybill_dest == None:
waybill_dest = ''
car_block = crow[5]
car_class = crow[6]
car_commodity = crow[7]
car_length = crow[8]
weight = crow[9] + crow[10]
print(self.x_field(railroad, self.railsize) + " " +
self.x_field(car, self.carxsize) + " " +
self.x_field(station, self.staxsize) + " " +
self.x_field(stax_name, 8) + " " +
self.x_field(waybill_dest, 10) + " " +
self.x_field(car_block, 6) + " " +
self.x_field(car_class, self.classize) + " " +
self.x_field(car_commodity, self.commsize) + " " +
self.x_field(car_length, 5, 'R') + " " +
self.x_field(weight, 5, 'R'))
line_count = line_count + 1
if line_count > 20:
line_count = 0
reply = raw_input('+')
if reply == 'x':
break
print(' ** END OF DATA **')
return
def prcons(self, message, Params):
if self.show_access(message, 'LICONS train;(report[F])','R') != 0:
return
#work out the various parameters
train_id, rc = self.extract_field(message, 0, 'TRAIN ID')
if rc > 1:
return
value, rc = self.extract_field(message, 1, '')
if rc == 0:
full_consist = value
else:
full_consist = '*'
# build the column titles
col_of_lenc = self.railsize + self.carxsize + self.staxsize + self.classize + self.commsize + 24
titles = self.x_field('RAILROAD==', self.railsize) + ' ' +\
self.x_field('CAR=======', self.carxsize) + ' ' +\
self.x_field('DESTINATION', self.staxsize) + ' ' +\
self.x_field('========', 8) + ' ' +\
self.x_field('CUSTOMER', 10) + ' ' +\
self.x_field('BLOCK ', 6) + ' ' +\
self.x_field('CLASS=====', self.classize) + ' ' +\
self.x_field('COMMODITY=', self.commsize) + ' ' +\
self.x_field('LNGTH', 5) + ' ' +\
self.x_field('WGHT=', 5)
#print attributes
self.temp = {}
row_no = 0
#get the train information
sql = 'select train.train, train.type, train.station, train.schedule, ' +\
'schedule.class, schedule.route, schedule.orig_station,' +\
'schedule.dest_station, schedule.direction ' +\
'from train ' +\
'left outer join schedule on train.schedule = schedule.schedule ' +\
'where train.train = ?'
data = (train_id,)
count, ds_trains = self.db_read(sql, data)
if count < 0:
return
#return the train information and print a summary
counter = 0
for row in ds_trains:
train = row[0]
train_type = row[1]
station = row[2]
schedule = row[3]
train_class = row[4]
train_route = row[5]
orig = row[6]
dest = row[7]
direction = row[8]
if schedule == None:
schedule = 'N/A'
if train_class == None:
train_class = 'N/A'
if train_route == None:
train_route = 'N/A'
if orig == None:
orig = 'N/A'
if dest == None:
dest = 'N/A'
if direction == None:
direction = 'N/A'
counter = counter + 1
if train_type == 'S':
p_train_type = 'SCHD'
elif train_type == 'E':
p_train_type = 'XTRA'
else:
p_train_type = 'USCD'
#go to the schedule and get the last reporting points
last_report = ' '
last_est = ' '
last_act = ' '
data = (train,)
sql = 'select depart_station, arrive_station, est_depart, est_arrive, ' +\
'act_depart, act_arrive from running where train = ? order by timings'
t_count, ds_run = self.db_read(sql, data)
if t_count < 0:
return
for t_row in ds_run:
act_depart = t_row[4]
act_arrive = t_row[5]
if act_depart != '':
last_report = 'DEP'
last_est = t_row[2]
last_act = t_row[4]
if act_arrive != '':
last_report = 'ARR'
last_est = t_row[3]
last_act = t_row[5]
short_orig = 'N/A'
data = (orig,)
sql = 'select short_name from station where station = ?'
s_count, ds_stax = self.db_read(sql, data)
if s_count < 0:
return
for s_row in ds_stax:
short_orig = s_row[0]
short_dest = 'N/A'
data = (dest,)
sql = 'select short_name from station where station = ?'
s_count, ds_stax = self.db_read(sql, data)
if s_count < 0:
return
for s_row in ds_stax:
short_dest = s_row[0]
short_station = 'N/A'
data = (station,)
sql = 'select short_name from station where station = ?'
s_count, ds_stax = self.db_read(sql, data)
if s_count < 0:
return
for s_row in ds_stax:
short_station = s_row[0]
print_line = ('TRAIN:' + train + ' ' +
'CLASS:' + train_class + ' ' +
'TYPE:' + p_train_type + ' ' +
'ROUTE:' + train_route + ' ' +
'SCHEDULE:' + schedule + ' ' +
'DIRECTION:' + direction)
row_no = row_no + 1
self.temp[row_no] = print_line
print_line = ('ORIG:' + orig+ ' ' +
short_orig+ ' ' +
'DEST:' + dest+ ' ' +
short_dest+ ' ' +
last_report+ ' ' +
'AT:' + station + ' ' +
short_station)
row_no = row_no + 1
self.temp[row_no] = print_line
#get a summary of the train information
weight_count = 0
length_count = 0
load_count = 0
empty_count = 0
data = (train_id,)
sql = 'select locotype.length, locomotive.weight from locomotive, locotype ' +\
'where locomotive.train = ? ' +\
'and locomotive.locotype = locotype.locotype'
loco_count, ds_locos = self.db_read(sql, data)
if loco_count < 0:
return
for lrow in ds_locos:
weight_count = weight_count + lrow[1]
length_count = length_count + lrow[0]
sql = 'select cartype.length, cartype.unladen_weight, ' +\
'car.weight_loaded, car.commodity ' +\
'from car, cartype ' +\
'where car.train = ? ' +\
'and car.cartype = cartype.cartype'
car_count, ds_cars = self.db_read(sql, data)
if car_count < 0:
return
for crow in ds_cars:
weight_count = weight_count + crow[1] + crow[2]
length_count = length_count + crow[0]
if row[3] == '':
empty_count = empty_count + 1
else:
load_count = load_count + 1
print_line = ('DUE:' + last_est + ' ' +
'ACT:' + last_act + ' ' +
'LOCOS:' + str(loco_count) + ' ' +
'LOAD:' + str(load_count) + ' ' +
'MTY:' + str(empty_count) + ' ' +
'L:' + str(length_count) + ' ' +
'W:' + str(weight_count))
row_no = row_no + 1
self.temp[row_no] = print_line
print_line = ' '
row_no = row_no + 1
self.temp[row_no] = print_line
if full_consist != 'F': #Rev 1
return
# build the column titles
col_of_lenl = self.railsize + self.locosize + self.staxsize + 28
coldiff = col_of_lenc - col_of_lenl
if coldiff > 0:
addin = col_of_lenc - col_of_lenl - 1
print_line = (self.x_field('RAILROAD==', self.railsize) + ' ' +\
self.x_field('LOCO======', self.locosize) + ' ' +\
self.x_field('POWER=', 6) + ' ' +\
self.x_field('TYPE==', 6) + ' ' +\
self.x_field('HAULS', 5, 'R') + ' ' +\
self.x_field('MODE==', 6) + ' ' +\
self.x_field('FUEL==', 5) + ' ' +\
self.x_field('HOME======', self.staxsize) + ' ' +\
self.x_field(' ', addin) + ' ' +\
self.x_field('LNGTH', 5) + ' ' +\
self.x_field('WGHT=', 5) )
row_no = row_no + 1
self.temp[row_no] = print_line
#detail wanted, so get the loco details
data = (train_id,)
sql = 'select locomotive.railroad, locomotive.loco, ' +\
'locomotive.is_powered, locotype.power_type, locotype.haulage, ' +\
'locotype.oper_mode, locotype.length, locomotive.weight, locomotive.fuel, ' +\
'locomotive.home_station ' +\
'from locomotive, locotype ' +\
'where locomotive.train = ? ' +\
'and locomotive.locotype = locotype.locotype'
loco_count, ds_locos = self.db_read(sql, data)
if loco_count < 0:
return
for lrow in ds_locos:
if lrow[2] == 'P':
powerstate = ' '
else:
powerstate = 'UNPWRD'
if lrow[3] == 'D':
plant = 'DIESEL'
elif lrow[3] == 'E':
plant = 'ELCTRC'
elif lrow[3] == 'S':
plant = 'STEAM '
else:
plant = ' '
if lrow[5] == 'I':
mode = ' '
elif lrow[5] == 'D':
mode = 'SLAVE '
elif lrow[5] == 'M':
mode = 'M.UNIT'
else:
mode = 'OTHER '
print_line = (self.x_field(lrow[0], self.railsize) + ' ' +
self.x_field(lrow[1], self.locosize) + ' ' +
self.x_field(powerstate, 6) + ' ' +
self.x_field(plant, 6) + ' ' +
self.x_field(lrow[4], 5, 'R') + ' ' +
self.x_field(mode,6) + ' ' +
self.x_field(lrow[8], 5, 'R') + ' ' +
self.x_field(lrow[9], self.staxsize) + ' ' +
self.x_field(' ', addin) + ' ' +
self.x_field(lrow[6], 5, 'R') + ' ' +
self.x_field(lrow[7], 5, 'R'))
row_no = row_no + 1
self.temp[row_no] = print_line
print_line = ' '
row_no = row_no + 1
self.temp[row_no] = print_line
print_line = titles
row_no = row_no + 1
self.temp[row_no] = print_line
#get the car details
data = (train_id,)
sql = 'select car.railroad, car.car, place.station, ' +\
'station.short_name, waybill.destination, ' +\
'car.block, car.carclass, car.commodity, ' +\
'cartype.length, cartype.unladen_weight, ' +\
'car.weight_loaded ' +\
'from car, cartype ' +\
'left outer join waybill on car.carorder = waybill.id ' +\
'left outer join place on waybill.destination = place.industry ' +\
'left outer join station on place.station = station.station ' +\
'where car.train = ? ' +\
'and car.cartype = cartype.cartype ' +\
'order by place.industry, car.block, car.railroad, car.car'
car_count, ds_cars = self.db_read(sql, data)
if car_count < 0:
return
for crow in ds_cars:
weight = crow[9] + crow[10]
print_line = (self.x_field(crow[0], self.railsize) + ' ' +
self.x_field(crow[1], self.carxsize) + ' ' +
self.x_field(crow[2], self.staxsize) + ' ' +
self.x_field(crow[3], 8) + ' ' +
self.x_field(crow[4], 10) + ' ' +
self.x_field(crow[5], 6) + ' ' +
self.x_field(crow[6], self.classize) + ' ' +
self.x_field(crow[7], self.commsize) + ' ' +
self.x_field(crow[8], 5, 'R') + ' ' +
self.x_field(weight, 5, 'R'))
row_no = row_no + 1
self.temp[row_no] = print_line
#report the extracted data
self.print_report (titles = titles,
report_id = 'PRCONS',
report_name = 'CONSIST FOR TRAIN ' + train_id,
Params = Params)
return
| 42.215643
| 139
| 0.436117
| 9,648
| 95,534
| 4.18097
| 0.056385
| 0.022931
| 0.045862
| 0.022559
| 0.764193
| 0.728122
| 0.702613
| 0.684466
| 0.662775
| 0.641356
| 0
| 0.015689
| 0.44089
| 95,534
| 2,262
| 140
| 42.234306
| 0.739507
| 0.119549
| 0
| 0.727222
| 0
| 0.000562
| 0.189626
| 0.010574
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009561
| false
| 0.008436
| 0.000562
| 0
| 0.131046
| 0.060742
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
411ed9f3064ab0f16944c89f4cdd2752fd12e5e0
| 274
|
py
|
Python
|
classificador_lyra/procedencia/__init__.py
|
MinisterioPublicoRJ/classificador-lyra
|
da840cb4ea9f3298c1813a1e147f5651873d2762
|
[
"MIT"
] | 8
|
2018-10-24T13:33:31.000Z
|
2022-02-21T13:11:50.000Z
|
classificador_lyra/procedencia/__init__.py
|
adelsonsena/classificador-lyra
|
da840cb4ea9f3298c1813a1e147f5651873d2762
|
[
"MIT"
] | 3
|
2019-06-18T20:05:53.000Z
|
2021-06-01T22:51:55.000Z
|
classificador_lyra/procedencia/__init__.py
|
adelsonsena/classificador-lyra
|
da840cb4ea9f3298c1813a1e147f5651873d2762
|
[
"MIT"
] | 2
|
2019-05-24T12:38:27.000Z
|
2020-08-30T20:10:53.000Z
|
from .procedencia import (
ExtincaoPunibilidadeClassifier,
ImprocedenteClassifier,
ProcedenteClassifier,
classificadores
)
__all__ = [
"classificadores",
"ExtincaoPunibilidadeClassifier",
"ImprocedenteClassifier",
"ProcedenteClassifier",
]
| 18.266667
| 37
| 0.737226
| 12
| 274
| 16.5
| 0.666667
| 0.525253
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186131
| 274
| 14
| 38
| 19.571429
| 0.887892
| 0
| 0
| 0
| 0
| 0
| 0.317518
| 0.189781
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
f5b47fa889efecdbbc6fcb511b9522e53a0a6b17
| 213
|
py
|
Python
|
part_1_read_test_dat.py
|
colczr/cc_tools
|
4b367b75b695c66de57091e03ab9990d7e620226
|
[
"MIT"
] | null | null | null |
part_1_read_test_dat.py
|
colczr/cc_tools
|
4b367b75b695c66de57091e03ab9990d7e620226
|
[
"MIT"
] | null | null | null |
part_1_read_test_dat.py
|
colczr/cc_tools
|
4b367b75b695c66de57091e03ab9990d7e620226
|
[
"MIT"
] | null | null | null |
import cc_dat_utils
#Part 1
input_dat_file = "data/pfgd_test.dat"
#Use cc_dat_utils.make_cc_data_from_dat() to load the file specified by input_dat_file
print(cc_dat_utils.make_cc_data_from_dat(input_dat_file))
| 26.625
| 86
| 0.840376
| 43
| 213
| 3.674419
| 0.465116
| 0.094937
| 0.189873
| 0.177215
| 0.341772
| 0.341772
| 0.341772
| 0.341772
| 0
| 0
| 0
| 0.005155
| 0.089202
| 213
| 7
| 87
| 30.428571
| 0.809278
| 0.42723
| 0
| 0
| 0
| 0
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
f5b513dbe514976c3bd3faab8104566dee62e1dd
| 1,817
|
py
|
Python
|
test/test_radar_api.py
|
hi-artem/twistlock-py
|
9888e905f5b9d3cc00f9b84244588c0992f8e4f4
|
[
"RSA-MD"
] | null | null | null |
test/test_radar_api.py
|
hi-artem/twistlock-py
|
9888e905f5b9d3cc00f9b84244588c0992f8e4f4
|
[
"RSA-MD"
] | null | null | null |
test/test_radar_api.py
|
hi-artem/twistlock-py
|
9888e905f5b9d3cc00f9b84244588c0992f8e4f4
|
[
"RSA-MD"
] | null | null | null |
# coding: utf-8
"""
Prisma Cloud Compute API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 21.04.439
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import openapi_client
from openapi_client.api.radar_api import RadarApi # noqa: E501
from openapi_client.rest import ApiException
class TestRadarApi(unittest.TestCase):
"""RadarApi unit test stubs"""
def setUp(self):
self.api = openapi_client.api.radar_api.RadarApi() # noqa: E501
def tearDown(self):
pass
def test_api_v1_radar_container_clusters_get(self):
"""Test case for api_v1_radar_container_clusters_get
"""
pass
def test_api_v1_radar_container_get(self):
"""Test case for api_v1_radar_container_get
"""
pass
def test_api_v1_radar_container_namespaces_get(self):
"""Test case for api_v1_radar_container_namespaces_get
"""
pass
def test_api_v1_radar_host_get(self):
"""Test case for api_v1_radar_host_get
"""
pass
def test_api_v1_radar_serverless_get(self):
"""Test case for api_v1_radar_serverless_get
"""
pass
def test_api_v1_radar_serverless_progress_get(self):
"""Test case for api_v1_radar_serverless_progress_get
"""
pass
def test_api_v1_radar_serverless_scan_post(self):
"""Test case for api_v1_radar_serverless_scan_post
"""
pass
def test_api_v1_radar_serverless_stop_post(self):
"""Test case for api_v1_radar_serverless_stop_post
"""
pass
if __name__ == '__main__':
unittest.main()
| 22.158537
| 124
| 0.681893
| 239
| 1,817
| 4.761506
| 0.271967
| 0.070299
| 0.140598
| 0.098418
| 0.595782
| 0.546573
| 0.479789
| 0.405097
| 0.232865
| 0
| 0
| 0.023948
| 0.241607
| 1,817
| 81
| 125
| 22.432099
| 0.801887
| 0.412218
| 0
| 0.321429
| 1
| 0
| 0.008024
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.357143
| false
| 0.321429
| 0.178571
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
f5b8fcfa324db0f808fb2e11cbe11dacc2489c42
| 444
|
py
|
Python
|
pyscheme/test/test_package.py
|
heidikira/startables-python
|
3a063daed38d6ffb48ea54aec2644addbf5c771b
|
[
"BSD-3-Clause"
] | 8
|
2019-09-05T11:01:52.000Z
|
2020-07-01T07:40:25.000Z
|
pyscheme/test/test_package.py
|
heidikira/startables-python
|
3a063daed38d6ffb48ea54aec2644addbf5c771b
|
[
"BSD-3-Clause"
] | 11
|
2019-10-17T10:26:27.000Z
|
2020-12-17T15:53:13.000Z
|
pyscheme/test/test_package.py
|
heidikira/startables-python
|
3a063daed38d6ffb48ea54aec2644addbf5c771b
|
[
"BSD-3-Clause"
] | 4
|
2019-09-26T07:03:11.000Z
|
2020-12-17T14:45:26.000Z
|
from pyscheme import parse_expression, make_root_environment
def test_main_use_case():
root = make_root_environment()
root.define('zFoo', parse_expression('(+ aPar 7)'))
root.define('aPar', 3)
assert root.evaluate(parse_expression('(- zFoo 1)')) == 9
root.define('aPar', 4)
assert root.evaluate(parse_expression('zFoo')) == 11
assert root.evaluate(parse_expression('zFoo - 1')) == 10
# TODO: Test notifications
| 27.75
| 61
| 0.693694
| 58
| 444
| 5.103448
| 0.465517
| 0.253378
| 0.182432
| 0.233108
| 0.381757
| 0.381757
| 0.256757
| 0
| 0
| 0
| 0
| 0.02681
| 0.15991
| 444
| 15
| 62
| 29.6
| 0.766756
| 0.054054
| 0
| 0
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0.066667
| 0.333333
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
eb012bb42e3e7e36c4ab9c36426a705046ff2919
| 23
|
py
|
Python
|
samtranslator/__init__.py
|
kavindasr/serverless-application-model
|
016646db01fcf2283a82446b49677fe94925dd27
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
samtranslator/__init__.py
|
kavindasr/serverless-application-model
|
016646db01fcf2283a82446b49677fe94925dd27
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
samtranslator/__init__.py
|
kavindasr/serverless-application-model
|
016646db01fcf2283a82446b49677fe94925dd27
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
__version__ = "1.43.0"
| 11.5
| 22
| 0.652174
| 4
| 23
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.130435
| 23
| 1
| 23
| 23
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
eb16ae370458f241ba4879a70ccf2c03e0062fb7
| 288
|
py
|
Python
|
StarSoup/__init__.py
|
bolverk/gaia_black_hole_binaries
|
966c35cf122e46923fcb8c0b4d9a6bbb66174751
|
[
"MIT"
] | 1
|
2018-06-30T01:15:10.000Z
|
2018-06-30T01:15:10.000Z
|
StarSoup/__init__.py
|
bolverk/gaia_black_hole_binaries
|
966c35cf122e46923fcb8c0b4d9a6bbb66174751
|
[
"MIT"
] | 5
|
2017-11-09T18:46:04.000Z
|
2018-02-16T16:53:52.000Z
|
StarSoup/__init__.py
|
bolverk/gaia_black_hole_binaries
|
966c35cf122e46923fcb8c0b4d9a6bbb66174751
|
[
"MIT"
] | null | null | null |
from fiducial import fiducial
from cook import cook
from whittle_gaia import whittle_gaia
from whittle_hipparcos import whittle_hipparcos
import CommonEnvelope
import EvolutionSequence
import NatalKicks
from PowerLawDistribution import PowerLawDistribution
from hurley import HurleyModel
| 28.8
| 53
| 0.895833
| 34
| 288
| 7.470588
| 0.382353
| 0.086614
| 0.173228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 288
| 9
| 54
| 32
| 0.984496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.