hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8d21693c2cfa1a74fa1fb40bc808d07ee22e1aae
| 27
|
py
|
Python
|
donkeypart_sombrero/__init__.py
|
basketwang/donkeypart_sombrero
|
71ac5f75a9619c49122a8c838437119c802e2ca5
|
[
"MIT"
] | 1
|
2020-01-04T23:27:14.000Z
|
2020-01-04T23:27:14.000Z
|
donkeypart_sombrero/__init__.py
|
basketwang/donkeypart_sombrero
|
71ac5f75a9619c49122a8c838437119c802e2ca5
|
[
"MIT"
] | null | null | null |
donkeypart_sombrero/__init__.py
|
basketwang/donkeypart_sombrero
|
71ac5f75a9619c49122a8c838437119c802e2ca5
|
[
"MIT"
] | 1
|
2019-05-11T23:39:57.000Z
|
2019-05-11T23:39:57.000Z
|
from .part import Sombrero
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8d3b23ed6ba1c7f945bc220d45870ce1ae02b3b1
| 82
|
py
|
Python
|
Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/parse/__init__.py
|
dendisuhubdy/Vitis-AI
|
524f65224c52314155dafc011d488ed30e458fcb
|
[
"Apache-2.0"
] | 1
|
2021-04-01T06:38:48.000Z
|
2021-04-01T06:38:48.000Z
|
Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/parse/__init__.py
|
dendisuhubdy/Vitis-AI
|
524f65224c52314155dafc011d488ed30e458fcb
|
[
"Apache-2.0"
] | null | null | null |
Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/parse/__init__.py
|
dendisuhubdy/Vitis-AI
|
524f65224c52314155dafc011d488ed30e458fcb
|
[
"Apache-2.0"
] | null | null | null |
from .trace_helper import *
from .parser import *
from .node_transformer import *
| 20.5
| 31
| 0.780488
| 11
| 82
| 5.636364
| 0.636364
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 82
| 3
| 32
| 27.333333
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a5f1f1037f1d681c37f4c6a726a070ab215139cb
| 763
|
py
|
Python
|
octicons16px/mark_github.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | 1
|
2021-01-28T06:47:39.000Z
|
2021-01-28T06:47:39.000Z
|
octicons16px/mark_github.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
octicons16px/mark_github.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
OCTICON_MARK_GITHUB = """
<svg class="octicon octicon-mark-github" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path></svg>
"""
| 152.6
| 731
| 0.605505
| 230
| 763
| 2
| 0.4
| 0.017391
| 0.026087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.490647
| 0.089122
| 763
| 4
| 732
| 190.75
| 0.171223
| 0
| 0
| 0
| 0
| 0.333333
| 0.961942
| 0.372703
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
93de13a79b4140d19b57c601b58eb601436d7482
| 356
|
py
|
Python
|
allennlp_models/syntax/constituency_parser/__init__.py
|
jens321/allennlp-models
|
cee3a7507cf8d15cd8520808bd9c6381369e868e
|
[
"Apache-2.0"
] | 1
|
2020-05-19T05:14:50.000Z
|
2020-05-19T05:14:50.000Z
|
allennlp_models/syntax/constituency_parser/__init__.py
|
jens321/allennlp-models
|
cee3a7507cf8d15cd8520808bd9c6381369e868e
|
[
"Apache-2.0"
] | null | null | null |
allennlp_models/syntax/constituency_parser/__init__.py
|
jens321/allennlp-models
|
cee3a7507cf8d15cd8520808bd9c6381369e868e
|
[
"Apache-2.0"
] | null | null | null |
from allennlp_models.syntax.constituency_parser.constituency_parser_model import (
SpanConstituencyParser,
)
from allennlp_models.syntax.constituency_parser.constituency_parser_predictor import (
ConstituencyParserPredictor,
)
from allennlp_models.syntax.constituency_parser.penn_tree_bank import (
PennTreeBankConstituencySpanDatasetReader,
)
| 35.6
| 86
| 0.865169
| 33
| 356
| 8.969697
| 0.454545
| 0.304054
| 0.182432
| 0.243243
| 0.547297
| 0.547297
| 0.405405
| 0.405405
| 0
| 0
| 0
| 0
| 0.08427
| 356
| 9
| 87
| 39.555556
| 0.907975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
93f5f6192d3c9986958c7769c0cf82aab9f413aa
| 26
|
py
|
Python
|
imgtda/locally_striped/__init__.py
|
rachellevanger/tda-image-analysis
|
5560e6ec9e2b7b74f91cad25a9cdefa1df172711
|
[
"MIT"
] | 6
|
2017-04-08T19:46:14.000Z
|
2019-11-12T04:43:10.000Z
|
imgtda/locally_striped/__init__.py
|
rachellevanger/tda-image-analysis
|
5560e6ec9e2b7b74f91cad25a9cdefa1df172711
|
[
"MIT"
] | 1
|
2019-08-23T21:20:03.000Z
|
2019-08-23T21:20:03.000Z
|
imgtda/locally_striped/__init__.py
|
rachellevanger/tda-image-analysis
|
5560e6ec9e2b7b74f91cad25a9cdefa1df172711
|
[
"MIT"
] | null | null | null |
from image import Image
| 6.5
| 23
| 0.769231
| 4
| 26
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 26
| 3
| 24
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f516f9b86a6a1180095b40fc5c51c2bd1522eb1c
| 133
|
py
|
Python
|
Florence/FunctionSpace/TwoDimensional/Quad/__init__.py
|
jdlaubrie/florence
|
830dca4a34be00d6e53cbec3007c10d438b27f57
|
[
"MIT"
] | 65
|
2017-08-04T10:21:13.000Z
|
2022-02-21T21:45:09.000Z
|
Florence/FunctionSpace/TwoDimensional/Quad/__init__.py
|
jdlaubrie/florence
|
830dca4a34be00d6e53cbec3007c10d438b27f57
|
[
"MIT"
] | 6
|
2018-06-03T02:29:20.000Z
|
2022-01-18T02:30:22.000Z
|
Florence/FunctionSpace/TwoDimensional/Quad/__init__.py
|
jdlaubrie/florence
|
830dca4a34be00d6e53cbec3007c10d438b27f57
|
[
"MIT"
] | 10
|
2018-05-30T09:44:10.000Z
|
2021-05-18T08:06:51.000Z
|
from .QuadLagrangeGaussLobatto import LagrangeGaussLobatto, GradLagrangeGaussLobatto
from .QuadLagrange import Lagrange, GradLagrange
| 66.5
| 84
| 0.902256
| 10
| 133
| 12
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067669
| 133
| 2
| 85
| 66.5
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f52e59e54c763c6e94e2adaa03491e143a00a435
| 25,105
|
py
|
Python
|
tests/test_integrations.py
|
nicolasramy/optimove-client
|
d492ddaa7b20493c1077308a2404994730f8c6cc
|
[
"MIT"
] | 3
|
2016-08-10T14:14:54.000Z
|
2016-09-01T05:48:03.000Z
|
tests/test_integrations.py
|
nicolasramy/optimove-client
|
d492ddaa7b20493c1077308a2404994730f8c6cc
|
[
"MIT"
] | 6
|
2016-08-23T13:03:29.000Z
|
2018-02-08T17:01:59.000Z
|
tests/test_integrations.py
|
nicolasramy/optimove-client
|
d492ddaa7b20493c1077308a2404994730f8c6cc
|
[
"MIT"
] | 3
|
2016-09-01T09:58:26.000Z
|
2019-10-16T13:27:58.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
import random
import string
import unittest
from six.moves.urllib.parse import parse_qs, urlparse
from optimove.client import Client
from optimove.constants import DEFAULT_URL
from optimove.integrations import Integrations
import responses
from tests.constants import HEADERS
from tests.helpers import login_callback, token_required
"""Callbacks"""
@token_required
def add_promotions_callback(request):
payload = json.loads(request.body)
resp_body = all([True if item['PromotionName'] is not None and item['PromoCode'] is not None else False
for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
@token_required
def get_promotions_callback(request):
resp_body = [
{'PromoCode': 'WB23', 'PromotionName': 'Welcome back Promo'},
{'PromoCode': 'NV10', 'PromotionName': 'New VIP 10% Discount'}
]
return 200, HEADERS['json'], json.dumps(resp_body)
@token_required
def delete_promotions_callback(request):
payload = json.loads(request.body)
resp_body = all([True if item['PromoCode'] is not None else False for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
@token_required
def add_channel_templates_callback(request):
payload = json.loads(request.body)
params = parse_qs(urlparse(request.url).query)
if params['ChannelID'][0] == '3':
resp_body = all([True if item['TemplateID'] is not None and item['TemplateName'] is not None else False
for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
else:
return 404, HEADERS['text'], 'Not Found'
@token_required
def get_channel_templates_callback(request):
params = parse_qs(urlparse(request.url).query)
if params['ChannelID'][0] == '3':
resp_body = [
{'TemplateID': 1, 'TemplateName': 'Welcome Back English'},
{'TemplateID': 2, 'TemplateName': 'Welcome Back Spanish'}
]
return 200, HEADERS['json'], json.dumps(resp_body)
else:
return 404, HEADERS['text'], 'Not Found'
@token_required
def delete_channel_templates_callback(request):
payload = json.loads(request.body)
resp_body = all([True if item['ChannelID'] is not None and item['TemplateID'] is not None else False
for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
@token_required
def add_channel_apps_callback(request):
payload = json.loads(request.body)
params = parse_qs(urlparse(request.url).query)
if params['ChannelID'][0] == '3':
resp_body = all([True if item['AppID'] is not None and item['AppName'] is not None else False
for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
else:
return 404, HEADERS['text'], 'Not Found'
@token_required
def delete_channel_apps_callback(request):
payload = json.loads(request.body)
resp_body = all([True if item['ChannelID'] is not None and item['AppID'] is not None else False
for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
@token_required
def update_campaign_metrics_callback(request):
payload = json.loads(request.body)
resp_body = all([True if item['ChannelID'] is not None
and item['CampaignID'] is not None
and item['TemplateID'] is not None
and item['MetricID'] is not None
and item['MetricValue'] is not None else False for item in payload])
return 200, HEADERS['json'], json.dumps(resp_body)
"""Tests"""
class TestIntegrations(unittest.TestCase):
@responses.activate
def test_add_promotions(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddPromotions',
callback=add_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.add_promotions({
'WB23': 'Welcome back Promo',
'NV10': 'New VIP 10% Discount',
})
self.assertTrue(data)
@responses.activate
def test_add_promotions_with_empty_promotions(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddPromotions',
callback=add_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.add_promotions, None)
@responses.activate
def test_add_promotions_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddPromotions',
callback=add_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_promotions = {}
for it in range(150):
promo_code = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(5)])
too_much_promotions[promo_code] = promo_code
self.assertRaises(Exception, client.integrations.add_promotions, too_much_promotions)
@responses.activate
def test_get_promotions(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.GET,
DEFAULT_URL + '/current/integrations/GetPromotions',
callback=get_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.get_promotions()
self.assertEqual(data, {
'WB23': 'Welcome back Promo',
'NV10': 'New VIP 10% Discount'
})
@responses.activate
def test_delete_promotions(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeletePromotions',
callback=delete_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.delete_promotions(['WB23', 'NV10'])
self.assertTrue(data)
@responses.activate
def test_delete_promotions_with_empty_promotions(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeletePromotions',
callback=delete_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.delete_promotions, None)
@responses.activate
def test_delete_promotions_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeletePromotions',
callback=delete_promotions_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_promotions = []
for it in range(150):
promo_code = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(5)])
too_much_promotions.append(promo_code)
self.assertRaises(Exception, client.integrations.delete_promotions, too_much_promotions)
@responses.activate
def test_add_channel_templates(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddChannelTemplates',
callback=add_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.add_channel_templates(3, [
{'id': 1, 'name': 'Welcome Back English'},
{'id': 2, 'name': 'Welcome Back Spanish', 'app_id': 'app123'},
])
self.assertTrue(data)
@responses.activate
def test_add_channel_templates_with_empty_channel_id(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddChannelTemplates',
callback=add_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.add_channel_templates, None, [
{'id': 1, 'name': 'Welcome Back English'},
{'id': 2, 'name': 'Welcome Back Spanish', 'app_id': 'app123'},
])
@responses.activate
def test_add_channel_templates_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddChannelTemplates',
callback=add_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_channel_templates = []
for channel_template_id in range(150):
channel_template = {
'id': channel_template_id,
'name': ''.join([random.choice(string.ascii_uppercase + string.digits + ' ') for _ in range(50)])
}
if random.choice([True, False]):
channel_template['app_id'] = ''.join([random.choice(string.ascii_uppercase + string.digits)
for _ in range(5)])
too_much_channel_templates.append(channel_template)
self.assertRaises(Exception, client.integrations.add_channel_templates, 3, too_much_channel_templates)
@responses.activate
def test_get_channels_templates(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.GET,
DEFAULT_URL + '/current/integrations/GetChannelTemplates',
callback=get_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.get_channel_templates(3)
self.assertEqual(data, {
1: 'Welcome Back English',
2: 'Welcome Back Spanish'
})
@responses.activate
def test_get_channels_templates_with_empty_channel_id(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.GET,
DEFAULT_URL + '/current/integrations/GetChannelTemplates',
callback=get_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.get_channel_templates, None)
@responses.activate
def test_get_channels_templates_with_wrong_channel_id(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.GET,
DEFAULT_URL + '/current/integrations/GetChannelTemplates',
callback=get_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.get_channel_templates(4)
self.assertFalse(data)
@responses.activate
def test_delete_channel_templates(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeleteChannelTemplates',
callback=delete_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.delete_channel_templates([
{'channel_id': 3, 'template_id': 15},
{'channel_id': 4, 'template_id': 26}
])
self.assertTrue(data)
@responses.activate
def test_delete_channel_templates_with_empty_channel_templates(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeleteChannelTemplates',
callback=delete_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.delete_channel_templates, None)
@responses.activate
def test_delete_channel_templates_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeleteChannelTemplates',
callback=delete_channel_templates_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_channel_templates = []
for channel_template_id in range(150):
channel_template = {
'template_id': channel_template_id,
'channel_id': random.randint(1, 4)
}
too_much_channel_templates.append(channel_template)
self.assertRaises(Exception, client.integrations.delete_channel_templates, too_much_channel_templates)
@responses.activate
def test_add_channel_apps(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddChannelApps',
callback=add_channel_apps_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.add_channel_apps(3, {
1: 'Bingo Mania',
2: 'Super Slots'
})
self.assertTrue(data)
@responses.activate
def test_add_channel_apps_with_empty_channel_id(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddChannelApps',
callback=add_channel_apps_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.add_channel_apps, None, {
1: 'Bingo Mania',
2: 'Super Slots'
})
@responses.activate
def test_add_channel_apps_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/AddChannelApps',
callback=add_channel_apps_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_channel_apps = {}
for app_id in range(150):
too_much_channel_apps[app_id] = \
''.join([random.choice(string.ascii_uppercase + string.digits + ' ') for _ in range(50)])
self.assertRaises(Exception, client.integrations.add_channel_apps, 3, too_much_channel_apps)
@responses.activate
def test_delete_channel_apps(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeleteChannelApps',
callback=delete_channel_apps_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.delete_channel_apps([
{'channel_id': 3, 'app_id': 1},
{'channel_id': 3, 'app_id': 2}
])
self.assertTrue(data)
@responses.activate
def test_delete_channel_apps_with_empty_channel_apps(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeleteChannelApps',
callback=delete_channel_apps_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.delete_channel_apps, None)
@responses.activate
def test_delete_channel_apps_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/DeleteChannelApps',
callback=delete_channel_apps_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_channel_apps = []
for channel_app_id in range(150):
channel_app = {
'app_id': channel_app_id,
'channel_id': random.randint(1, 4)
}
too_much_channel_apps.append(channel_app)
self.assertRaises(Exception, client.integrations.delete_channel_apps, too_much_channel_apps)
@responses.activate
def test_update_campaign_metrics(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/UpdateCampaignMetrics',
callback=update_campaign_metrics_callback,
content_type='application/json'
)
client = Client('username', 'password')
data = client.integrations.update_campaign_metrics([
{'channel_id': 3, 'campaign_id': 42, 'template_id': 8,
'metric': Integrations.METRIC_SENT, 'value': 925},
{'channel_id': 3, 'campaign_id': 42, 'template_id': 8,
'metric': Integrations.METRIC_DELIVERED, 'value': 809},
{'channel_id': 3, 'campaign_id': 42, 'template_id': 8,
'metric': Integrations.METRIC_OPENED, 'value': 250},
{'channel_id': 3, 'campaign_id': 42, 'template_id': 8,
'metric': Integrations.METRIC_CLICKED, 'value': 122},
{'channel_id': 3, 'campaign_id': 42, 'template_id': 8,
'metric': Integrations.METRIC_UNSUBSCRIBED, 'value': 11}
])
self.assertTrue(data)
@responses.activate
def test_update_campaign_metrics_with_empty_metrics(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/UpdateCampaignMetrics',
callback=update_campaign_metrics_callback,
content_type='application/json'
)
client = Client('username', 'password')
self.assertRaises(Exception, client.integrations.update_campaign_metrics, None)
@responses.activate
def test_update_campaign_metrics_overflow(self):
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/general/login',
callback=login_callback,
content_type='application/json'
)
responses.add_callback(
responses.POST,
DEFAULT_URL + '/current/integrations/UpdateCampaignMetrics',
callback=update_campaign_metrics_callback,
content_type='application/json'
)
client = Client('username', 'password')
too_much_metrics = []
for campaign_id in range(25):
template_id = random.randint(1, 99)
too_much_metrics.append({
'channel_id': 3,
'campaign_id': campaign_id,
'template_id': template_id,
'metric': Integrations.METRIC_SENT,
'value': random.choice(range(1000))
})
too_much_metrics.append({
'channel_id': 3,
'campaign_id': campaign_id,
'template_id': template_id,
'metric': Integrations.METRIC_DELIVERED,
'value': random.choice(range(1000))
})
too_much_metrics.append({
'channel_id': 3,
'campaign_id': campaign_id,
'template_id': template_id,
'metric': Integrations.METRIC_OPENED,
'value': random.choice(range(1000))
})
too_much_metrics.append({
'channel_id': 3,
'campaign_id': campaign_id,
'template_id': template_id,
'metric': Integrations.METRIC_CLICKED,
'value': random.choice(range(1000))
})
too_much_metrics.append({
'channel_id': 3,
'campaign_id': campaign_id,
'template_id': template_id,
'metric': Integrations.METRIC_UNSUBSCRIBED,
'value': random.choice(range(1000))
})
self.assertRaises(Exception, client.integrations.update_campaign_metrics, too_much_metrics)
| 34.532325
| 113
| 0.613902
| 2,469
| 25,105
| 5.989874
| 0.069664
| 0.034485
| 0.067618
| 0.098046
| 0.920887
| 0.89695
| 0.884576
| 0.853405
| 0.801474
| 0.776794
| 0
| 0.010733
| 0.287433
| 25,105
| 726
| 114
| 34.57989
| 0.815976
| 0.000836
| 0
| 0.694215
| 0
| 0
| 0.16076
| 0.061303
| 0
| 0
| 0
| 0
| 0.041322
| 1
| 0.056198
| false
| 0.041322
| 0.019835
| 0
| 0.097521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f55446b80bc28b5b7ce98b699369d283e3d26628
| 20
|
py
|
Python
|
torch/for_onnx/__init__.py
|
wenhaopeter/read_pytorch_code
|
491f989cd918cf08874dd4f671fb7f0142a0bc4f
|
[
"Intel",
"X11"
] | 206
|
2020-11-28T22:56:38.000Z
|
2022-03-27T02:33:04.000Z
|
torch/for_onnx/__init__.py
|
wenhaopeter/read_pytorch_code
|
491f989cd918cf08874dd4f671fb7f0142a0bc4f
|
[
"Intel",
"X11"
] | 19
|
2020-12-09T23:13:14.000Z
|
2022-01-24T23:24:08.000Z
|
torch/for_onnx/__init__.py
|
wenhaopeter/read_pytorch_code
|
491f989cd918cf08874dd4f671fb7f0142a0bc4f
|
[
"Intel",
"X11"
] | 33
|
2020-02-18T16:15:48.000Z
|
2022-03-24T15:12:05.000Z
|
from .onnx import *
| 10
| 19
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1947f94158d290361b61cb811111e70d702f5f97
| 2,358
|
py
|
Python
|
main_app/migrations/0026_auto_20210225_1533.py
|
AlexGeniusMan/ARTWAY-PROJECT
|
0430cf2359e3b78ef4eb25466e5871ab2ff2bfcd
|
[
"Apache-2.0"
] | 1
|
2020-12-12T13:07:34.000Z
|
2020-12-12T13:07:34.000Z
|
main_app/migrations/0026_auto_20210225_1533.py
|
AlexGeniusMan/ARTWAY-PROJECT
|
0430cf2359e3b78ef4eb25466e5871ab2ff2bfcd
|
[
"Apache-2.0"
] | null | null | null |
main_app/migrations/0026_auto_20210225_1533.py
|
AlexGeniusMan/ARTWAY-PROJECT
|
0430cf2359e3b78ef4eb25466e5871ab2ff2bfcd
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.5 on 2021-02-25 12:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0025_auto_20210225_1427'),
]
operations = [
migrations.RemoveField(
model_name='artifact',
name='video',
),
migrations.AddField(
model_name='artifact',
name='link_name_1',
field=models.CharField(blank=True, max_length=50, verbose_name='Название ссылки #1'),
),
migrations.AddField(
model_name='artifact',
name='link_name_2',
field=models.CharField(blank=True, max_length=50, verbose_name='Название ссылки #2'),
),
migrations.AddField(
model_name='artifact',
name='link_name_3',
field=models.CharField(blank=True, max_length=50, verbose_name='Название ссылки #3'),
),
migrations.AddField(
model_name='artifact',
name='link_name_4',
field=models.CharField(blank=True, max_length=50, verbose_name='Название ссылки #4'),
),
migrations.AddField(
model_name='artifact',
name='link_name_5',
field=models.CharField(blank=True, max_length=50, verbose_name='Название ссылки #5'),
),
migrations.AddField(
model_name='artifact',
name='link_value_1',
field=models.CharField(blank=True, max_length=1000, verbose_name='Ссылка #1'),
),
migrations.AddField(
model_name='artifact',
name='link_value_2',
field=models.CharField(blank=True, max_length=1000, verbose_name='Ссылка #2'),
),
migrations.AddField(
model_name='artifact',
name='link_value_3',
field=models.CharField(blank=True, max_length=1000, verbose_name='Ссылка #3'),
),
migrations.AddField(
model_name='artifact',
name='link_value_4',
field=models.CharField(blank=True, max_length=1000, verbose_name='Ссылка #4'),
),
migrations.AddField(
model_name='artifact',
name='link_value_5',
field=models.CharField(blank=True, max_length=1000, verbose_name='Ссылка #5'),
),
]
| 34.676471
| 97
| 0.579729
| 252
| 2,358
| 5.206349
| 0.194444
| 0.075457
| 0.14253
| 0.176067
| 0.848323
| 0.848323
| 0.848323
| 0.848323
| 0.472561
| 0.472561
| 0
| 0.048943
| 0.298134
| 2,358
| 67
| 98
| 35.19403
| 0.743807
| 0.019084
| 0
| 0.52459
| 1
| 0
| 0.161835
| 0.009952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016393
| 0
| 0.065574
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2716441dc44bfb6ffd60fa0e02e2a39d4c9c274f
| 208
|
py
|
Python
|
charts/openedx/settings/partials/common_test.py
|
thphuong/sunasteriskrnd-helm-charts
|
7c6e70a2f437c85549129139ee7c994b34eda481
|
[
"MIT"
] | null | null | null |
charts/openedx/settings/partials/common_test.py
|
thphuong/sunasteriskrnd-helm-charts
|
7c6e70a2f437c85549129139ee7c994b34eda481
|
[
"MIT"
] | null | null | null |
charts/openedx/settings/partials/common_test.py
|
thphuong/sunasteriskrnd-helm-charts
|
7c6e70a2f437c85549129139ee7c994b34eda481
|
[
"MIT"
] | 1
|
2022-03-08T18:14:03.000Z
|
2022-03-08T18:14:03.000Z
|
# TODO clean this up
import os
os.environ["EDXAPP_TEST_MONGO_HOST"] = "mongodb"
from ..test import *
# Fix MongoDb connection credentials
DOC_STORE_CONFIG["user"] = None
DOC_STORE_CONFIG["password"] = None
| 20.8
| 48
| 0.759615
| 30
| 208
| 5.033333
| 0.733333
| 0.10596
| 0.18543
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129808
| 208
| 9
| 49
| 23.111111
| 0.834254
| 0.254808
| 0
| 0
| 0
| 0
| 0.269737
| 0.144737
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
27667d22e5a4427ebad3c576df55aaf00a294946
| 3,034
|
py
|
Python
|
gwlfe/MultiUse_Fxns/Erosion/AvStreamBankNSum.py
|
mudkipmaster/gwlf-e
|
9e058445537dd32d1916f76c4b73ca64261771cd
|
[
"Apache-2.0"
] | null | null | null |
gwlfe/MultiUse_Fxns/Erosion/AvStreamBankNSum.py
|
mudkipmaster/gwlf-e
|
9e058445537dd32d1916f76c4b73ca64261771cd
|
[
"Apache-2.0"
] | 6
|
2018-07-24T22:46:28.000Z
|
2018-07-29T19:13:09.000Z
|
gwlfe/MultiUse_Fxns/Erosion/AvStreamBankNSum.py
|
mudkipmaster/gwlf-e
|
9e058445537dd32d1916f76c4b73ca64261771cd
|
[
"Apache-2.0"
] | 1
|
2018-07-24T18:22:01.000Z
|
2018-07-24T18:22:01.000Z
|
from numpy import sum as npsum
from numpy import zeros
from gwlfe.Output.Loading.StreamBankN_1 import StreamBankN_1
from gwlfe.Output.Loading.StreamBankN_1 import StreamBankN_1_f
def AvStreamBankNSum(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0, CNP_0, Imper,
ISRR, ISRA, CN, UnsatStor_0, KV, PcntET, DayHrs, MaxWaterCap, SatStor_0, RecessionCoef, SeepCoef,
Qretention, PctAreaInfil, n25b, Landuse, TileDrainDensity, PointFlow, StreamWithdrawal,
GroundWithdrawal, NumAnimals, AvgAnimalWt, StreamFlowVolAdj, SedAFactor_0, AvKF, AvSlope,
SedAAdjust, StreamLength, n42b, n46c, n85d, AgLength, n42, n54, n85, UrbBankStab, SedNitr,
BankNFrac, n69c, n45, n69):
AvStreamBankN = zeros(12)
for Y in range(NYrs):
for i in range(12):
AvStreamBankN[i] += \
StreamBankN_1(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area,
CNI_0, AntMoist_0, Grow_0, CNP_0, Imper, ISRR, ISRA, CN,
UnsatStor_0, KV, PcntET, DayHrs, MaxWaterCap, SatStor_0,
RecessionCoef, SeepCoef, Qretention, PctAreaInfil, n25b, Landuse,
TileDrainDensity, PointFlow, StreamWithdrawal, GroundWithdrawal,
NumAnimals, AvgAnimalWt, StreamFlowVolAdj, SedAFactor_0, AvKF,
AvSlope, SedAAdjust, StreamLength, n42b, AgLength,
UrbBankStab, SedNitr, BankNFrac, n69c, n45, n69, n46c, n42)[Y][i] / NYrs
return sum(AvStreamBankN)
def AvStreamBankNSum_f(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0, CNP_0,
Imper, ISRR, ISRA, CN, UnsatStor_0, KV, PcntET, DayHrs, MaxWaterCap, SatStor_0, RecessionCoef,
SeepCoef, Qretention, PctAreaInfil, n25b, Landuse, TileDrainDensity, PointFlow, StreamWithdrawal,
GroundWithdrawal, NumAnimals, AvgAnimalWt, StreamFlowVolAdj, SedAFactor_0, AvKF, AvSlope,
SedAAdjust, StreamLength, n42b, n46c, n85d, AgLength, n42, n54, n85, UrbBankStab, SedNitr,
BankNFrac, n69c, n45, n69):
return npsum(StreamBankN_1_f(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0,
CNP_0, Imper, ISRR, ISRA, CN, UnsatStor_0, KV, PcntET, DayHrs, MaxWaterCap, SatStor_0,
RecessionCoef, SeepCoef, Qretention, PctAreaInfil, n25b, Landuse, TileDrainDensity,
PointFlow, StreamWithdrawal, GroundWithdrawal, NumAnimals, AvgAnimalWt,
StreamFlowVolAdj,
SedAFactor_0, AvKF, AvSlope, SedAAdjust, StreamLength, n42b, AgLength,
UrbBankStab, SedNitr, BankNFrac, n69c, n45, n69, n46c, n42)) / NYrs
| 72.238095
| 120
| 0.613052
| 301
| 3,034
| 6.043189
| 0.255814
| 0.039582
| 0.037383
| 0.054975
| 0.882903
| 0.882903
| 0.882903
| 0.882903
| 0.882903
| 0.825728
| 0
| 0.052331
| 0.307185
| 3,034
| 41
| 121
| 74
| 0.813035
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.111111
| 0.027778
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
27a250650ed09e49662be8f1bf444fcfd2eade3f
| 10,172
|
py
|
Python
|
inpval/inpval_ut.py
|
BizTheDad/inpval
|
d83f5518bb8fa52fe626ac609049d418041baf80
|
[
"MIT"
] | null | null | null |
inpval/inpval_ut.py
|
BizTheDad/inpval
|
d83f5518bb8fa52fe626ac609049d418041baf80
|
[
"MIT"
] | null | null | null |
inpval/inpval_ut.py
|
BizTheDad/inpval
|
d83f5518bb8fa52fe626ac609049d418041baf80
|
[
"MIT"
] | null | null | null |
import unittest
from re import fullmatch
from inpval_formats import Formats, UnknownOptionError
class TestPatternMatching(unittest.TestCase):
def test_exceptions(self):
with self.assertRaises(UnknownOptionError):
Formats.matches_format('wegw', 'input string')
Formats.get_option_pattern('asdf')
#
# The fragment tests will do the bounds testing. All other match testing for
# dates and times will be one random True and one random False test.
#
def test_frags(self):
self.assertTrue(fullmatch(Formats.dt_frags['days_2'], '01', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_2'], '31', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_2'], '10', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_2'], '29', 0))
self.assertFalse(fullmatch(Formats.dt_frags['days_2'], '00', 0))
self.assertFalse(fullmatch(Formats.dt_frags['days_2'], '32', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_1_or_2'], '1', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_1_or_2'], '01', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_1_or_2'], '31', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_1_or_2'], '10', 0))
self.assertTrue(fullmatch(Formats.dt_frags['days_1_or_2'], '29', 0))
self.assertFalse(fullmatch(Formats.dt_frags['days_1_or_2'], '0', 0))
self.assertFalse(fullmatch(Formats.dt_frags['days_1_or_2'], '00', 0))
self.assertFalse(fullmatch(Formats.dt_frags['days_1_or_2'], '32', 0))
self.assertTrue(fullmatch(Formats.dt_frags['dt12_2'], '01', 0))
self.assertTrue(fullmatch(Formats.dt_frags['dt12_2'], '12', 0))
self.assertFalse(fullmatch(Formats.dt_frags['dt12_2'], '00', 0))
self.assertFalse(fullmatch(Formats.dt_frags['dt12_2'], '13', 0))
self.assertTrue(fullmatch(Formats.dt_frags['time24_2'], '01', 0))
self.assertTrue(fullmatch(Formats.dt_frags['time24_2'], '09', 0))
self.assertTrue(fullmatch(Formats.dt_frags['time24_2'], '10', 0))
self.assertTrue(fullmatch(Formats.dt_frags['time24_2'], '19', 0))
self.assertTrue(fullmatch(Formats.dt_frags['time24_2'], '20', 0))
self.assertTrue(fullmatch(Formats.dt_frags['time24_2'], '23', 0))
self.assertFalse(fullmatch(Formats.dt_frags['time24_2'], '1', 0))
self.assertFalse(fullmatch(Formats.dt_frags['time24_2'], '24', 0))
self.assertTrue(fullmatch(Formats.dt_frags['dt12_1_or_2'], '1', 0))
self.assertTrue(fullmatch(Formats.dt_frags['dt12_1_or_2'], '01', 0))
self.assertTrue(fullmatch(Formats.dt_frags['dt12_1_or_2'], '12', 0))
self.assertFalse(fullmatch(Formats.dt_frags['dt12_1_or_2'], '0', 0))
self.assertFalse(fullmatch(Formats.dt_frags['dt12_1_or_2'], '00', 0))
self.assertFalse(fullmatch(Formats.dt_frags['dt12_1_or_2'], '13', 0))
self.assertTrue(fullmatch(Formats.dt_frags['d_delims'], '.', 0))
self.assertTrue(fullmatch(Formats.dt_frags['d_delims'], '/', 0))
self.assertTrue(fullmatch(Formats.dt_frags['d_delims'], '-', 0))
self.assertFalse(fullmatch(Formats.dt_frags['d_delims'], '1', 0))
self.assertFalse(fullmatch(Formats.dt_frags['d_delims'], ' ', 0))
self.assertFalse(fullmatch(Formats.dt_frags['d_delims'], '*', 0))
self.assertTrue(fullmatch(Formats.dt_frags['years_2'], '11', 0))
self.assertFalse(fullmatch(Formats.dt_frags['years_2'], '1', 0))
self.assertFalse(fullmatch(Formats.dt_frags['years_2'], '111', 0))
self.assertTrue(fullmatch(Formats.dt_frags['years_4'], '1111', 0))
self.assertFalse(fullmatch(Formats.dt_frags['years_4'], '111', 0))
self.assertFalse(fullmatch(Formats.dt_frags['years_4'], '11111', 0))
self.assertTrue(fullmatch(Formats.dt_frags['num60_2'], '00', 0))
self.assertTrue(fullmatch(Formats.dt_frags['num60_2'], '59', 0))
self.assertFalse(fullmatch(Formats.dt_frags['num60_2'], '6', 0))
self.assertFalse(fullmatch(Formats.dt_frags['num60_2'], '591', 0))
self.assertFalse(fullmatch(Formats.dt_frags['num60_2'], '-1', 0))
self.assertTrue(fullmatch(Formats.dt_frags['t_delims'], ':', 0))
self.assertTrue(fullmatch(Formats.dt_frags['t_delims'], ' ', 0))
self.assertFalse(fullmatch(Formats.dt_frags['t_delims'], '.', 0))
self.assertFalse(fullmatch(Formats.dt_frags['t_delims'], '/', 0))
self.assertTrue(fullmatch(Formats.ip_frags['ip_255'], "0", 0))
self.assertTrue(fullmatch(Formats.ip_frags['ip_255'], "1", 0))
self.assertTrue(fullmatch(Formats.ip_frags['ip_255'], "200", 0))
self.assertTrue(fullmatch(Formats.ip_frags['ip_255'], "249", 0))
self.assertTrue(fullmatch(Formats.ip_frags['ip_255'], "255", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], "-1", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], "00", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], "000", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], "256", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], "256", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], "0001", 0))
self.assertFalse(fullmatch(Formats.ip_frags['ip_255'], '', 0))
def test_date_4_formats(self):
self.assertTrue(Formats.matches_format('-date_4md', '0101'))
self.assertFalse(Formats.matches_format('-date_4md', '0132'))
self.assertTrue(Formats.matches_format('-date_4md_d', '1.23'))
self.assertFalse(Formats.matches_format('-date_4md_d', '1229'))
def test_date_6_formats(self):
self.assertTrue(Formats.matches_format('-date_6mdy', '013100'))
self.assertFalse(Formats.matches_format('-date_6mdy', '32434'))
self.assertTrue(Formats.matches_format('-date_6mdy_d', '1.23.00'))
self.assertFalse(Formats.matches_format('-date_6mdy_d', '122988'))
self.assertTrue(Formats.matches_format('-date_6dmy', '301100'))
self.assertFalse(Formats.matches_format('-date_6dmy', '32434'))
self.assertTrue(Formats.matches_format('-date_6dmy_d', '30-11.00'))
self.assertFalse(Formats.matches_format('-date_6dmy_d', '01-3214'))
def test_date_8_formats(self):
self.assertTrue(Formats.matches_format('-date_8mdy', '01310001'))
self.assertFalse(Formats.matches_format('-date_8mdy', '01230000'))
self.assertTrue(Formats.matches_format('-date_8mdy_d', '10.10.1001'))
self.assertFalse(Formats.matches_format('-date_8mdy_d', '01/30/1'))
def test_date_9_formats(self):
self.assertTrue(Formats.matches_format('-date_9mdy_d', 'jan-6-2441'))
self.assertTrue(Formats.matches_format('-date_9mdy_d', 'feb.10.1001'))
self.assertFalse(Formats.matches_format('-date_9mdy_d', 'feds.6.3252'))
self.assertFalse(Formats.matches_format('-date_9mdy_d', 'dec.6.311'))
def test_word_format(self):
self.assertTrue(Formats.matches_format('-word', 'aliugaugyLgUKyyu'))
self.assertTrue(Formats.matches_format('-word', '2732fsgdgs_'))
self.assertFalse(Formats.matches_format('-word', 'wo rd'))
self.assertFalse(Formats.matches_format('-word', '^%$&djdrjd%'))
self.assertFalse(Formats.matches_format('-word', 'word^'))
def test_time_formats(self):
self.assertTrue(Formats.matches_format('-time_12', '12:43 AM'))
self.assertFalse(Formats.matches_format('-time_12', '2:51 Ao'))
self.assertTrue(Formats.matches_format('-time_12_s', '12:43:45 AM'))
self.assertFalse(Formats.matches_format('-time_12_s', '1:25:253 P'))
self.assertTrue(Formats.matches_format('-time_24', '15:43'))
self.assertFalse(Formats.matches_format('-time_24', '24:09'))
self.assertTrue(Formats.matches_format('-time_24_s', '17:43:45'))
self.assertFalse(Formats.matches_format('-time_24_s', '09:73:13 PM'))
def test_email_format(self):
self.assertTrue(Formats.matches_format('-email', 'justyhg.egife@hotmail.com'))
self.assertTrue(Formats.matches_format('-email', 'HUGEHG.EGFUI@gasdg.com'))
self.assertFalse(Formats.matches_format('-email', 'HUGEHG. EGFUI@gasdg.com'))
self.assertFalse(Formats.matches_format('-email', '&%^%HUGE(HG.EGFUI@gasdg.com'))
self.assertFalse(Formats.matches_format('-email', 'HUGEHG.EGFUIgasdg.com'))
def test_ip4_format(self):
self.assertTrue(Formats.matches_format('-ip4', '0.0.0.0'))
self.assertTrue(Formats.matches_format('-ip4', '1.1.1.1'))
self.assertTrue(Formats.matches_format('-ip4', '255.255.255.255'))
self.assertFalse(Formats.matches_format('-ip4', '98.46.141.255:1000'))
self.assertFalse(Formats.matches_format('-ip4', '12.6.1.4/24'))
self.assertFalse(Formats.matches_format('-ip4', '256.0.0.1'))
self.assertFalse(Formats.matches_format('-ip4', 'sag.14.f31t1'))
self.assertFalse(Formats.matches_format('-ip4', '1531'))
def test_ip4_range_format(self):
self.assertTrue(Formats.matches_format('-ip4_range', '0.0.0.0/0'))
self.assertTrue(Formats.matches_format('-ip4_range', '0.0.0.0/255'))
self.assertFalse(Formats.matches_format('-ip4_range', '98.46.141.255:1000'))
self.assertFalse(Formats.matches_format('-ip4_range', '256.0.0.1'))
self.assertFalse(Formats.matches_format('-ip4_range', 'sag.14.f31t1'))
self.assertFalse(Formats.matches_format('-ip4_range', '1531'))
def test_url_format(self):
self.assertFalse(Formats.matches_format('-url', 'espn.com'))
self.assertTrue(Formats.matches_format('-url', 'www.espn.com'))
self.assertTrue(Formats.matches_format('-url', 'https://espn.com'))
self.assertTrue(Formats.matches_format('-url', 'http://url.com'))
self.assertFalse(Formats.matches_format('-url', '213459aav876va'))
self.assertFalse(Formats.matches_format('-url', ';287t8c;.com'))
self.assertFalse(Formats.matches_format('-url', '141.iyiuyiu@hg'))
if __name__ == '__main__':
unittest.main(verbosity=2)
| 61.277108
| 89
| 0.673417
| 1,358
| 10,172
| 4.818115
| 0.124448
| 0.050436
| 0.183402
| 0.186306
| 0.865964
| 0.865964
| 0.812777
| 0.65016
| 0.53584
| 0.339905
| 0
| 0.073903
| 0.148643
| 10,172
| 166
| 90
| 61.277108
| 0.68164
| 0.013862
| 0
| 0.013793
| 0
| 0
| 0.172933
| 0.009474
| 0
| 0
| 0
| 0
| 0.862069
| 1
| 0.082759
| false
| 0
| 0.02069
| 0
| 0.110345
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7e04fa24660f6a5ae0d144822791bf69960e4d5e
| 178
|
py
|
Python
|
hall/__init__.py
|
jorenham/p-y
|
6ebe8b49f8e061fa1c0f4425a6e4919777574cac
|
[
"BSD-3-Clause"
] | 3
|
2021-07-16T05:43:08.000Z
|
2021-07-25T18:48:50.000Z
|
hall/__init__.py
|
jorenham/p-y
|
6ebe8b49f8e061fa1c0f4425a6e4919777574cac
|
[
"BSD-3-Clause"
] | 1
|
2021-12-15T15:37:05.000Z
|
2021-12-15T15:37:05.000Z
|
hall/__init__.py
|
jorenham/p-y
|
6ebe8b49f8e061fa1c0f4425a6e4919777574cac
|
[
"BSD-3-Clause"
] | null | null | null |
from ._core import Distribution, RandomVar
from .analysis import *
from .backend import *
from .continuous import *
from .discrete import *
from .stats import *
mp_configure()
| 17.8
| 42
| 0.764045
| 22
| 178
| 6.090909
| 0.545455
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157303
| 178
| 9
| 43
| 19.777778
| 0.893333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fda5aff29d3023ba48ccd1cf805fd713dbcff8a4
| 3,089
|
py
|
Python
|
app/local_system.py
|
rayrayy/pirasite
|
bd5c5b48baed731a591dd94aae0dc822bdd59eb1
|
[
"MIT"
] | 1
|
2021-06-24T15:43:33.000Z
|
2021-06-24T15:43:33.000Z
|
app/local_system.py
|
rayrayy/pirasite
|
bd5c5b48baed731a591dd94aae0dc822bdd59eb1
|
[
"MIT"
] | null | null | null |
app/local_system.py
|
rayrayy/pirasite
|
bd5c5b48baed731a591dd94aae0dc822bdd59eb1
|
[
"MIT"
] | 2
|
2021-06-24T15:43:13.000Z
|
2021-08-29T13:31:59.000Z
|
import logging
import subprocess
import time
# import boot
logger = logging.getLogger(__name__)
class Error(Exception):
pass
class ShutdownError(Error):
pass
def shutdown():
logger.info('Shutting down system')
return _exec_shutdown(mode = "boot")
def restart():
logger.info('Rebooting system')
return _exec_shutdown(mode = "reboot")
def _exec_shutdown(mode):
if mode == "reboot":
try:
result = subprocess.Popen('source app/boot.sh',
# capture_output=True,
# text=True,
# check=True,
# shell=False,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
shell=True,
executable='/bin/bash')
except subprocess.CalledProcessError as e:
raise ShutdownError(e) from e
# if 'failed' in result.stderr.lower():
# raise ShutdownError(result.stdout + result.stderr)
if result.stdout:
logger.info(result.stdout.read())
if result.stderr:
logger.info(result.stderr.read())
time.sleep(5)
try:
result = subprocess.Popen('source app/boot.sh',
# capture_output=True,
# text=True,
# check=True,
# shell=False,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
shell=True,
executable='/bin/bash')
except subprocess.CalledProcessError as e:
raise ShutdownError(e) from e
# if 'failed' in result.stderr.lower():
# raise ShutdownError(result.stdout + result.stderr)
if result.stdout:
logger.info(result.stdout.read())
if result.stderr:
logger.info(result.stderr.read())
if mode == "boot":
try:
result = subprocess.Popen('source app/boot.sh',
# capture_output=True,
# text=True,
# check=True,
# shell=False,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
shell=True,
executable='/bin/bash')
except subprocess.CalledProcessError as e:
raise ShutdownError(e) from e
# if 'failed' in result.stderr.lower():
# raise ShutdownError(result.stdout + result.stderr)
if result.stdout:
logger.info(result.stdout.read())
if result.stderr:
logger.info(result.stderr.read())
return True
| 35.102273
| 64
| 0.456458
| 256
| 3,089
| 5.457031
| 0.222656
| 0.103078
| 0.068719
| 0.051539
| 0.815319
| 0.775233
| 0.775233
| 0.775233
| 0.775233
| 0.775233
| 0
| 0.000597
| 0.45743
| 3,089
| 88
| 65
| 35.102273
| 0.832936
| 0.15021
| 0
| 0.690909
| 0
| 0
| 0.052571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054545
| false
| 0.036364
| 0.054545
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fdabfd7d70a9c96cf318bb6925b51ca4ef1e6a28
| 21
|
py
|
Python
|
pdflu/__init__.py
|
sdahdah/pdflu
|
17ce6b7bba7b3519d85f77856b6b27afd8909afa
|
[
"MIT"
] | null | null | null |
pdflu/__init__.py
|
sdahdah/pdflu
|
17ce6b7bba7b3519d85f77856b6b27afd8909afa
|
[
"MIT"
] | null | null | null |
pdflu/__init__.py
|
sdahdah/pdflu
|
17ce6b7bba7b3519d85f77856b6b27afd8909afa
|
[
"MIT"
] | null | null | null |
from .pdflu import *
| 10.5
| 20
| 0.714286
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fdbe3c3277aee6b4c038d45d7252e99935d880a0
| 68
|
py
|
Python
|
src/web/users/models/__init__.py
|
fossabot/SIStema
|
1427dda2082688a9482c117d0e24ad380fdc26a6
|
[
"MIT"
] | 5
|
2018-03-08T17:22:27.000Z
|
2018-03-11T14:20:53.000Z
|
src/web/users/models/__init__.py
|
fossabot/SIStema
|
1427dda2082688a9482c117d0e24ad380fdc26a6
|
[
"MIT"
] | 263
|
2018-03-08T18:05:12.000Z
|
2022-03-11T23:26:20.000Z
|
src/web/users/models/__init__.py
|
fossabot/SIStema
|
1427dda2082688a9482c117d0e24ad380fdc26a6
|
[
"MIT"
] | 6
|
2018-03-12T19:48:19.000Z
|
2022-01-14T04:58:52.000Z
|
from .profile import *
from .user import *
from .user_list import *
| 17
| 24
| 0.735294
| 10
| 68
| 4.9
| 0.5
| 0.408163
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 68
| 3
| 25
| 22.666667
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
fdd96af019b0dbb8a74dbdfcb2a13b90fc3d09a3
| 168
|
py
|
Python
|
ps_signal/interfaces/gui/__init__.py
|
golgor/ps_signal
|
0dc93bd3a88d30778968eb102435ffc8cb69a5fb
|
[
"MIT"
] | 1
|
2021-08-02T22:46:34.000Z
|
2021-08-02T22:46:34.000Z
|
ps_signal/interfaces/gui/__init__.py
|
golgor/ps-signal
|
0dc93bd3a88d30778968eb102435ffc8cb69a5fb
|
[
"MIT"
] | 7
|
2020-07-04T13:14:12.000Z
|
2020-07-07T12:27:30.000Z
|
ps_signal/interfaces/gui/__init__.py
|
golgor/ps_signal
|
0dc93bd3a88d30778968eb102435ffc8cb69a5fb
|
[
"MIT"
] | null | null | null |
"""Package that implements a CLI. Import structure will make the entry point
of this package to :func:`.gui.run_gui`. Currently not implemented.
"""
from .gui import *
| 33.6
| 76
| 0.75
| 26
| 168
| 4.807692
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14881
| 168
| 4
| 77
| 42
| 0.874126
| 0.839286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e300d083babedde05449f6b3494b462276c3d8ad
| 21
|
py
|
Python
|
hmap/layout/__init__.py
|
HiDiHlabs/hmap
|
5cd05ee2fec8087b8a08fc73e2c86135e5f2183e
|
[
"BSD-3-Clause"
] | null | null | null |
hmap/layout/__init__.py
|
HiDiHlabs/hmap
|
5cd05ee2fec8087b8a08fc73e2c86135e5f2183e
|
[
"BSD-3-Clause"
] | null | null | null |
hmap/layout/__init__.py
|
HiDiHlabs/hmap
|
5cd05ee2fec8087b8a08fc73e2c86135e5f2183e
|
[
"BSD-3-Clause"
] | null | null | null |
from . import layout
| 10.5
| 20
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e328ae1cefc1819c907d2401a32808ff4f82fd5e
| 201
|
py
|
Python
|
ctd_processing/data_delivery.py
|
sharksmhi/ctd_processing
|
616df4cd7ed626b678622448a08a0356086a8a3f
|
[
"MIT"
] | null | null | null |
ctd_processing/data_delivery.py
|
sharksmhi/ctd_processing
|
616df4cd7ed626b678622448a08a0356086a8a3f
|
[
"MIT"
] | null | null | null |
ctd_processing/data_delivery.py
|
sharksmhi/ctd_processing
|
616df4cd7ed626b678622448a08a0356086a8a3f
|
[
"MIT"
] | null | null | null |
def create_sensorinfo_file(directory):
pass
def create_metadata_file(directory):
pass
def create_delivery_note_file(directory):
pass
def create_data_delivery(directory):
pass
| 11.166667
| 41
| 0.751244
| 25
| 201
| 5.68
| 0.4
| 0.253521
| 0.359155
| 0.422535
| 0.549296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189055
| 201
| 17
| 42
| 11.823529
| 0.871166
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
e371d204322c0a8934361c70c7e6502cf384fbbc
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/shellingham/nt.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/shellingham/nt.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/shellingham/nt.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/9b/a2/7a/4aec32a95565c574fd05cfbd17fd71f93e6ed2008516b4401762c891e8
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.427083
| 0
| 96
| 1
| 96
| 96
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8b5616131930d46bbe1e87bce780a80ccf2daef4
| 58
|
py
|
Python
|
code1/archs/segmentation/baselines/__init__.py
|
nickpezzotti1/IIC3
|
9edd27b28d2bddec4c3979ea4591380f5c411c8e
|
[
"MIT"
] | 61
|
2019-04-05T17:52:24.000Z
|
2022-01-26T13:08:31.000Z
|
code1/archs/segmentation/baselines/__init__.py
|
nickpezzotti1/IIC3
|
9edd27b28d2bddec4c3979ea4591380f5c411c8e
|
[
"MIT"
] | 1
|
2020-07-11T12:16:31.000Z
|
2020-07-14T18:47:29.000Z
|
code1/archs/segmentation/baselines/__init__.py
|
nickpezzotti1/IIC3
|
9edd27b28d2bddec4c3979ea4591380f5c411c8e
|
[
"MIT"
] | 29
|
2019-04-19T19:47:44.000Z
|
2021-12-20T05:51:43.000Z
|
from .net10a_doersch import *
from .net10a_isola import *
| 19.333333
| 29
| 0.793103
| 8
| 58
| 5.5
| 0.625
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.137931
| 58
| 2
| 30
| 29
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8bdab30547f627c7ae91b96022a56486c8e33dbf
| 150
|
py
|
Python
|
nanome/_internal/_network/_commands/_serialization/_file/_deprecated/__init__.py
|
nanome-ai/nanome-plugin-api
|
f2ce6a5e3123ee7449a90c2659f3891124289f4a
|
[
"MIT"
] | 3
|
2020-07-02T13:08:27.000Z
|
2021-11-24T14:32:53.000Z
|
nanome/_internal/_network/_commands/_serialization/_file/_deprecated/__init__.py
|
nanome-ai/nanome-plugin-api
|
f2ce6a5e3123ee7449a90c2659f3891124289f4a
|
[
"MIT"
] | 11
|
2020-09-14T17:01:47.000Z
|
2022-02-18T04:00:52.000Z
|
nanome/_internal/_network/_commands/_serialization/_file/_deprecated/__init__.py
|
nanome-ai/nanome-plugin-api
|
f2ce6a5e3123ee7449a90c2659f3891124289f4a
|
[
"MIT"
] | 5
|
2020-08-12T16:30:03.000Z
|
2021-12-06T18:04:23.000Z
|
from . import *
# classes
from ._directory_request import _DirectoryRequest
from ._file_request import _FileRequest
from ._file_save import _FileSave
| 25
| 49
| 0.84
| 18
| 150
| 6.5
| 0.555556
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 150
| 5
| 50
| 30
| 0.886364
| 0.046667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
473535ce927f28faa53b0fd8d0f7f06aaff2f5a6
| 86
|
py
|
Python
|
silver/tests/api/utils/path.py
|
atkinsond/silver
|
7e88db324ea7380dbc1b03cf18911a614a51e2b3
|
[
"Apache-2.0"
] | null | null | null |
silver/tests/api/utils/path.py
|
atkinsond/silver
|
7e88db324ea7380dbc1b03cf18911a614a51e2b3
|
[
"Apache-2.0"
] | null | null | null |
silver/tests/api/utils/path.py
|
atkinsond/silver
|
7e88db324ea7380dbc1b03cf18911a614a51e2b3
|
[
"Apache-2.0"
] | 1
|
2021-02-26T10:52:11.000Z
|
2021-02-26T10:52:11.000Z
|
def absolute_url(relative_url):
return 'http://testserver{}'.format(relative_url)
| 28.666667
| 53
| 0.755814
| 11
| 86
| 5.636364
| 0.727273
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 54
| 43
| 0.794872
| 0
| 0
| 0
| 0
| 0
| 0.22093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
474b296d6be02c9dde0b5820d709546fe62f40d8
| 389
|
py
|
Python
|
abupy/MLBu/ABuMLApi.py
|
luqin/firefly
|
2e5ab17f2d20deb3c68c927f6208ea89db7c639d
|
[
"MIT"
] | 1
|
2019-05-28T05:54:42.000Z
|
2019-05-28T05:54:42.000Z
|
abupy/MLBu/ABuMLApi.py
|
momantang/cobrass
|
f11435d4836aa29078a3cd4beb4ca88967300c84
|
[
"Apache-2.0"
] | 9
|
2020-03-24T16:45:25.000Z
|
2022-03-11T23:40:51.000Z
|
abupy/MLBu/ABuMLApi.py
|
luqin/firefly
|
2e5ab17f2d20deb3c68c927f6208ea89db7c639d
|
[
"MIT"
] | 1
|
2021-09-08T17:39:58.000Z
|
2021-09-08T17:39:58.000Z
|
from __future__ import absolute_import
# noinspection PyUnresolvedReferences
from .ABuMLBinsCs import *
# noinspection PyUnresolvedReferences
from .ABuMLExecute import *
# noinspection PyUnresolvedReferences
from .ABuMLGrid import *
# noinspection PyUnresolvedReferences
from .ABuML import EMLFitType
# noinspection PyUnresolvedReferences
from .ABuMLPd import BtcBigWaveClf, ClosePredict
| 29.923077
| 48
| 0.858612
| 33
| 389
| 9.969697
| 0.424242
| 0.516717
| 0.577508
| 0.534954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105398
| 389
| 12
| 49
| 32.416667
| 0.945402
| 0.460154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
47560b676571477a512e719e797fb07320253470
| 190
|
py
|
Python
|
gitgetpm/commands/__init__.py
|
awesmubarak/gitget
|
35b193c120b3c26ba55f972f67a7b18d158f0077
|
[
"MIT"
] | 6
|
2019-09-29T04:11:04.000Z
|
2021-01-22T10:27:51.000Z
|
gitgetpm/commands/__init__.py
|
abactel/git-get
|
35b193c120b3c26ba55f972f67a7b18d158f0077
|
[
"MIT"
] | 6
|
2017-12-04T20:40:59.000Z
|
2019-08-13T20:43:22.000Z
|
gitgetpm/commands/__init__.py
|
awesmubarak/gitget
|
35b193c120b3c26ba55f972f67a7b18d158f0077
|
[
"MIT"
] | 2
|
2017-12-06T06:58:41.000Z
|
2019-09-09T15:46:27.000Z
|
from .doctor import *
from .edit import *
from .help import *
from .install import *
from .list import *
from .move import *
from .remove import *
from .setup import *
from .update import *
| 19
| 22
| 0.715789
| 27
| 190
| 5.037037
| 0.407407
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189474
| 190
| 9
| 23
| 21.111111
| 0.883117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
47637041698bd95afaef70860169e4627e57f2cb
| 81
|
py
|
Python
|
TryPack/packtry.py
|
EranOvadia/TryPack
|
9dde1f19f2533b234cc099787529f3a98952f5a3
|
[
"MIT"
] | null | null | null |
TryPack/packtry.py
|
EranOvadia/TryPack
|
9dde1f19f2533b234cc099787529f3a98952f5a3
|
[
"MIT"
] | null | null | null |
TryPack/packtry.py
|
EranOvadia/TryPack
|
9dde1f19f2533b234cc099787529f3a98952f5a3
|
[
"MIT"
] | null | null | null |
class Packtry(object):
@staticmethod
def print():
print('print')
| 16.2
| 22
| 0.592593
| 8
| 81
| 6
| 0.75
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.271605
| 81
| 4
| 23
| 20.25
| 0.813559
| 0
| 0
| 0
| 0
| 0
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
47a1912e3f43b2a5189937b67dffbe1a9d22ddde
| 1,879
|
py
|
Python
|
tests/integration/operators_test/abs_sign_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | 61
|
2020-07-06T17:11:46.000Z
|
2022-03-12T14:42:51.000Z
|
tests/integration/operators_test/abs_sign_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | 1
|
2021-02-25T01:30:29.000Z
|
2021-11-09T11:13:14.000Z
|
tests/integration/operators_test/abs_sign_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | 6
|
2020-07-15T12:33:13.000Z
|
2021-11-07T06:55:00.000Z
|
# Copyright (c) 2019 Graphcore Ltd. All rights reserved.
import numpy as np
import popart
import torch
import pytest
from op_tester import op_tester
def test_abs_training(op_tester):
d1 = np.random.rand(10).astype(np.float32)
# random numbers is range -3,3
d1 = 6 * d1 - 3
# Make sure we have a 0 case
d1[3] = 0
def init_builder(builder):
i1 = builder.addInputTensor(d1)
o = builder.aiOnnx.abs([i1], "test_abs")
builder.addOutputTensor(o)
return [
o,
popart.reservedGradientPrefix() + o,
popart.reservedGradientPrefix() + i1,
]
def reference(ref_data):
t1 = torch.tensor(d1, requires_grad=True)
out = torch.abs(t1)
d__o = ref_data.getOutputTensorGrad(0)
out.backward(torch.tensor(d__o))
return [out, d__o, t1.grad]
op_tester.setPatterns(['AbsGradOp'], enableRuntimeAsserts=False)
op_tester.run(init_builder, reference, 'train')
def test_sign_training(op_tester):
d1 = np.random.rand(10).astype(np.float32)
# random numbers is range -3,3
d1 = 6 * d1 - 3
# Make sure we have a 0 case
d1[3] = 0
def init_builder(builder):
i1 = builder.addInputTensor(d1)
o = builder.aiOnnx.sign([i1], "test_sign")
builder.addOutputTensor(o)
return [
o,
popart.reservedGradientPrefix() + o,
popart.reservedGradientPrefix() + i1,
]
def reference(ref_data):
t1 = torch.tensor(d1, requires_grad=True)
out = torch.sign(t1)
d__o = ref_data.getOutputTensorGrad(0)
out.backward(torch.tensor(d__o))
return [out, d__o, t1.grad]
op_tester.setPatterns(['AbsGradOp', 'ZerosLikeOpPattern'],
enableRuntimeAsserts=False)
op_tester.run(init_builder, reference, 'train')
| 26.842857
| 68
| 0.617882
| 237
| 1,879
| 4.746835
| 0.291139
| 0.056889
| 0.103111
| 0.032
| 0.830222
| 0.830222
| 0.830222
| 0.830222
| 0.830222
| 0.721778
| 0
| 0.03804
| 0.272485
| 1,879
| 69
| 69
| 27.231884
| 0.784931
| 0.088345
| 0
| 0.666667
| 0
| 0
| 0.036907
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 1
| 0.125
| false
| 0
| 0.104167
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
47bbba99175b43bfd7630d6111d41b4987fa207b
| 105
|
py
|
Python
|
YikYak_Frequency/test.py
|
dpeters1/YakFrequency
|
f9dd504ed0dfc3667bddf046643d5feb1c9add91
|
[
"MIT"
] | null | null | null |
YikYak_Frequency/test.py
|
dpeters1/YakFrequency
|
f9dd504ed0dfc3667bddf046643d5feb1c9add91
|
[
"MIT"
] | null | null | null |
YikYak_Frequency/test.py
|
dpeters1/YakFrequency
|
f9dd504ed0dfc3667bddf046643d5feb1c9add91
|
[
"MIT"
] | null | null | null |
from sort import sortYaks
print sortYaks(1459656000, 1462665600, 0, 6, 0, 24, False, False, "Carleton")
| 26.25
| 77
| 0.742857
| 15
| 105
| 5.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 0.142857
| 105
| 3
| 78
| 35
| 0.588889
| 0
| 0
| 0
| 0
| 0
| 0.07619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
47ce7c1f5e1be2c76e135661f88e47037970c415
| 68
|
py
|
Python
|
FormulaBasedMaterials/__init__.py
|
CMMAi/FormulaBasedMaterials
|
57b3b46d123011ee4e829f2e70f02ad27845d6f4
|
[
"MIT"
] | null | null | null |
FormulaBasedMaterials/__init__.py
|
CMMAi/FormulaBasedMaterials
|
57b3b46d123011ee4e829f2e70f02ad27845d6f4
|
[
"MIT"
] | null | null | null |
FormulaBasedMaterials/__init__.py
|
CMMAi/FormulaBasedMaterials
|
57b3b46d123011ee4e829f2e70f02ad27845d6f4
|
[
"MIT"
] | null | null | null |
from .SingleFormulaBasedMaterial import SingleFormulaBasedMaterial
| 34
| 67
| 0.911765
| 4
| 68
| 15.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073529
| 68
| 1
| 68
| 68
| 0.984127
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9a3d3b3c97ef2834b1a567731d42efe1ce55d461
| 37
|
py
|
Python
|
fold/plugins/config/__init__.py
|
modora/fold
|
c2eded4480cf715794b8f0585df7dba2cc1348f3
|
[
"MIT"
] | null | null | null |
fold/plugins/config/__init__.py
|
modora/fold
|
c2eded4480cf715794b8f0585df7dba2cc1348f3
|
[
"MIT"
] | null | null | null |
fold/plugins/config/__init__.py
|
modora/fold
|
c2eded4480cf715794b8f0585df7dba2cc1348f3
|
[
"MIT"
] | null | null | null |
from .common import ConfigFilePlugin
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d04e3200c74441c3ec00627d9980ece3cb425fdc
| 253
|
py
|
Python
|
src/editor/utils/__init__.py
|
rehmanx/PandaEditor
|
125c79605fd46a045201e5ff6a88709764ac104f
|
[
"MIT"
] | null | null | null |
src/editor/utils/__init__.py
|
rehmanx/PandaEditor
|
125c79605fd46a045201e5ff6a88709764ac104f
|
[
"MIT"
] | null | null | null |
src/editor/utils/__init__.py
|
rehmanx/PandaEditor
|
125c79605fd46a045201e5ff6a88709764ac104f
|
[
"MIT"
] | null | null | null |
import editor.utils.property as EdProperty
import editor.utils.math as common_maths
from editor.utils.objectData import ObjectData
from editor.utils.directoryWatcher import DirWatcher
from editor.utils.exceptionHandler import try_execute, try_execute_1
| 42.166667
| 68
| 0.873518
| 35
| 253
| 6.2
| 0.485714
| 0.253456
| 0.207373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00431
| 0.083004
| 253
| 5
| 69
| 50.6
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d059895907fafc2af0e161a1d3a498356539462a
| 269
|
py
|
Python
|
backend/swpp/models.py
|
2019-swpp-8/swpp
|
62a4c6bc4ae44efb3f19c95c1e4b17a5d77ff12b
|
[
"MIT"
] | 7
|
2019-04-13T05:20:26.000Z
|
2019-06-17T02:38:39.000Z
|
backend/swpp/models.py
|
2019-swpp-8/swpp
|
62a4c6bc4ae44efb3f19c95c1e4b17a5d77ff12b
|
[
"MIT"
] | 68
|
2019-04-18T07:05:05.000Z
|
2021-05-08T13:22:36.000Z
|
backend/swpp/models.py
|
2019-swpp-8/swpp
|
62a4c6bc4ae44efb3f19c95c1e4b17a5d77ff12b
|
[
"MIT"
] | 1
|
2019-04-19T06:24:19.000Z
|
2019-04-19T06:24:19.000Z
|
from swpp.model.user import User
from swpp.model.profile import Profile
from swpp.model.tutor import Tutor
from swpp.model.lecture import Lecture
from swpp.model.request import Request
from swpp.model.times import Times
from swpp.model.notification import Notification
| 33.625
| 48
| 0.843866
| 42
| 269
| 5.404762
| 0.261905
| 0.246696
| 0.400881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104089
| 269
| 7
| 49
| 38.428571
| 0.941909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d0740861deac65aac2bcecbc06468210d45d6e68
| 5,660
|
py
|
Python
|
repository/test/data_repository_test.py
|
MobilityData/mobility-database-interface
|
c6eb62b09e4784219c1d02e9f7cb88f77beaa2d8
|
[
"Apache-2.0"
] | 4
|
2021-03-12T10:40:47.000Z
|
2022-01-11T10:56:53.000Z
|
repository/test/data_repository_test.py
|
MobilityData/mobility-database-interface
|
c6eb62b09e4784219c1d02e9f7cb88f77beaa2d8
|
[
"Apache-2.0"
] | 181
|
2021-03-09T15:27:51.000Z
|
2022-01-31T15:25:28.000Z
|
repository/test/data_repository_test.py
|
MobilityData/mobility-database-interface
|
c6eb62b09e4784219c1d02e9f7cb88f77beaa2d8
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase, mock
from unittest.mock import MagicMock
from repository.data_repository import DataRepository
from representation.gtfs_representation import GtfsRepresentation
class DataRepositoryTest(TestCase):
def test_data_repository_initializing_repository_should_return_instance(self):
under_test = DataRepository()
self.assertIsInstance(under_test, DataRepository)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_adding_none_dataset_key_should_raise_exception(
self, mock_representation
):
self.assertRaises(TypeError, DataRepository, None, mock_representation)
def test_data_repository_adding_none_dataset_representation_should_raise_exception(
self,
):
mock_dataset_key = MagicMock()
self.assertRaises(TypeError, DataRepository, mock_dataset_key, None)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_adding_representation_with_valid_parameters_should_return_none(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
under_test = DataRepository().add_dataset_representation(
mock_dataset_key, mock_representation
)
self.assertIsNone(under_test)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_print_dataset_representations_should_return_none(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
under_test = DataRepository().print_all_dataset_representations()
self.assertIsNone(under_test)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_print_dataset_representation_with_non_existent_key_should_return_none(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
mock_dataset_key.__str__.return_value = "test_key"
test_dataset_key = "non_existent_test_key"
data_repository = DataRepository()
data_repository.add_dataset_representation(
str(mock_dataset_key), mock_representation
)
under_test = data_repository.print_dataset_representation(test_dataset_key)
self.assertIsNone(under_test)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_print_dataset_representation_with_existent_key_should_return_none(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
mock_dataset_key.__str__.return_value = "test_key"
test_dataset_key = "test_key"
data_repository = DataRepository()
data_repository.add_dataset_representation(
str(mock_dataset_key), mock_representation
)
under_test = data_repository.print_dataset_representation(test_dataset_key)
self.assertIsNone(under_test)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_get_dataset_representations_should_return_representations(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
mock_dataset_key.__str__.return_value = "test_key"
test_dataset_representations = {"test_key": mock_representation}
data_repository = DataRepository()
data_repository.add_dataset_representation(
str(mock_dataset_key), mock_representation
)
under_test = data_repository.dataset_representations
self.assertEqual(under_test, test_dataset_representations)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_get_dataset_representation_with_non_existent_key_should_return_none(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
mock_dataset_key.__str__.return_value = "test_key"
test_dataset_key = "non_existent_test_key"
data_repository = DataRepository()
data_repository.add_dataset_representation(
str(mock_dataset_key), mock_representation
)
under_test = data_repository.dataset_representations.get(test_dataset_key)
self.assertIsNone(under_test)
@mock.patch("representation.gtfs_representation.GtfsRepresentation")
def test_data_repository_get_dataset_representation_with_existent_key_should_return_none(
self, mock_representation
):
mock_representation.__class__ = GtfsRepresentation
mock_dataset_key = MagicMock()
mock_dataset_key.__class__ = str
mock_dataset_key.__str__.return_value = "test_key"
test_dataset_key = "test_key"
test_dataset_representation = mock_representation
data_repository = DataRepository()
data_repository.dataset_representations[
str(mock_dataset_key)
] = mock_representation
under_test = data_repository.dataset_representations[test_dataset_key]
self.assertEqual(under_test, test_dataset_representation)
| 41.313869
| 99
| 0.754417
| 576
| 5,660
| 6.795139
| 0.079861
| 0.091978
| 0.096576
| 0.053654
| 0.805825
| 0.783342
| 0.765457
| 0.73301
| 0.73301
| 0.73301
| 0
| 0
| 0.188163
| 5,660
| 136
| 100
| 41.617647
| 0.851795
| 0
| 0
| 0.666667
| 0
| 0
| 0.09364
| 0.082332
| 0
| 0
| 0
| 0
| 0.087719
| 1
| 0.087719
| false
| 0
| 0.035088
| 0
| 0.131579
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d08a59f08e584d6c451a2f12dc473707d9e3345a
| 1,730
|
py
|
Python
|
tests/datastructures/test_sorted_linkedlist.py
|
deniscostadsc/becoming-a-better-programmer
|
ec993d494ef5e9272429cc4d2ed1e7ffbe8e1bfd
|
[
"MIT"
] | 1
|
2020-05-04T11:28:35.000Z
|
2020-05-04T11:28:35.000Z
|
tests/datastructures/test_sorted_linkedlist.py
|
deniscostadsc/becoming-a-better-programmer
|
ec993d494ef5e9272429cc4d2ed1e7ffbe8e1bfd
|
[
"MIT"
] | null | null | null |
tests/datastructures/test_sorted_linkedlist.py
|
deniscostadsc/becoming-a-better-programmer
|
ec993d494ef5e9272429cc4d2ed1e7ffbe8e1bfd
|
[
"MIT"
] | null | null | null |
import pytest
from datastructures import SortedLinkedList
@pytest.fixture
def sorted_linked_list():
return SortedLinkedList()
def test_sorted_linked_list_size_is_initially_zero(sorted_linked_list):
assert len(sorted_linked_list) == 0
def test_insert_item_to_sorted_linked_list(sorted_linked_list):
sorted_linked_list.insert(1)
assert len(sorted_linked_list) == 1
assert pytest.helpers.equal_items([1], sorted_linked_list)
def test_insert_items_to_sorted_linked_list(sorted_linked_list):
sorted_linked_list.insert(1)
sorted_linked_list.insert(2)
sorted_linked_list.insert(3)
assert len(sorted_linked_list) == 3
assert pytest.helpers.equal_items([1, 2, 3], sorted_linked_list)
def test_insert_reservely_sorted_items_to_sorted_linked_list(
sorted_linked_list,
):
sorted_linked_list.insert(3)
sorted_linked_list.insert(2)
sorted_linked_list.insert(1)
assert len(sorted_linked_list) == 3
assert pytest.helpers.equal_items([1, 2, 3], sorted_linked_list)
def test_insert_unsorted_items_to_sorted_linked_list(sorted_linked_list,):
sorted_linked_list.insert(1)
sorted_linked_list.insert(3)
sorted_linked_list.insert(2)
assert len(sorted_linked_list) == 3
assert pytest.helpers.equal_items([1, 2, 3], sorted_linked_list)
def test_remove_item_from_sorted_linked_list(sorted_linked_list):
sorted_linked_list.insert(1)
sorted_linked_list.insert(3)
sorted_linked_list.insert(2)
assert len(sorted_linked_list) == 3
assert pytest.helpers.equal_items([1, 2, 3], sorted_linked_list)
sorted_linked_list.remove(0)
assert len(sorted_linked_list) == 2
assert pytest.helpers.equal_items([2, 3], sorted_linked_list)
| 27.460317
| 74
| 0.780347
| 259
| 1,730
| 4.772201
| 0.119691
| 0.38835
| 0.517799
| 0.231392
| 0.815534
| 0.736246
| 0.675566
| 0.675566
| 0.675566
| 0.656958
| 0
| 0.024032
| 0.134104
| 1,730
| 62
| 75
| 27.903226
| 0.801068
| 0
| 0
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.325
| 1
| 0.175
| false
| 0
| 0.05
| 0.025
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ef8d57a40e7b5b45a8e9d43389d14eead8af09d7
| 200
|
py
|
Python
|
ClassToPass.py
|
smckee6192/MockyThingy
|
c1f0e9daf6ca4ff32b15cef346a5a4ec95fe60ef
|
[
"MIT"
] | null | null | null |
ClassToPass.py
|
smckee6192/MockyThingy
|
c1f0e9daf6ca4ff32b15cef346a5a4ec95fe60ef
|
[
"MIT"
] | 8
|
2015-03-25T04:02:27.000Z
|
2015-04-26T01:08:15.000Z
|
ClassToPass.py
|
smckee6192/MockyThingy
|
c1f0e9daf6ca4ff32b15cef346a5a4ec95fe60ef
|
[
"MIT"
] | null | null | null |
__author__ = 'Shane'
class ClassToPass:
def __init__(self, int1=int(), int2=int()):
self.int1 = int1
self.int2 = int2
def gimmeTheSum(self, a, b) -> int:
return a + b
| 22.222222
| 47
| 0.575
| 26
| 200
| 4.115385
| 0.538462
| 0.149533
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042254
| 0.29
| 200
| 9
| 48
| 22.222222
| 0.711268
| 0
| 0
| 0
| 0
| 0
| 0.024876
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
560021c21ea27de9c2476bb7645cc12ae7375146
| 38
|
py
|
Python
|
starfish/core/experiment/builder/test/factories/__init__.py
|
haoxusci/starfish
|
d7bd856024c75f2ce41504406f2a663566c3814b
|
[
"MIT"
] | 164
|
2018-03-21T21:52:56.000Z
|
2022-03-23T17:14:39.000Z
|
starfish/core/experiment/builder/test/factories/__init__.py
|
lbgbox/starfish
|
0e879d995d5c49b6f5a842e201e3be04c91afc7e
|
[
"MIT"
] | 1,728
|
2018-03-15T23:16:09.000Z
|
2022-03-12T00:09:18.000Z
|
starfish/core/experiment/builder/test/factories/__init__.py
|
lbgbox/starfish
|
0e879d995d5c49b6f5a842e201e3be04c91afc7e
|
[
"MIT"
] | 66
|
2018-03-25T17:21:15.000Z
|
2022-01-16T09:17:11.000Z
|
from .unique_tiles import unique_data
| 19
| 37
| 0.868421
| 6
| 38
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ef2dea74bb7a0f769bd7e80a992c6a4a897862aa
| 397
|
py
|
Python
|
tests/__init__.py
|
ghuls/weblogo
|
7eab5d1b8a8ec38786fa426af84bd77950835524
|
[
"MIT"
] | 108
|
2015-08-21T10:39:22.000Z
|
2022-03-04T22:10:49.000Z
|
tests/__init__.py
|
ghuls/weblogo
|
7eab5d1b8a8ec38786fa426af84bd77950835524
|
[
"MIT"
] | 60
|
2015-07-21T22:55:52.000Z
|
2022-03-24T21:20:00.000Z
|
tests/__init__.py
|
ghuls/weblogo
|
7eab5d1b8a8ec38786fa426af84bd77950835524
|
[
"MIT"
] | 40
|
2015-08-04T00:18:23.000Z
|
2021-12-30T13:41:54.000Z
|
# TODO: replace with direct calls to pkg_resources
from weblogo.utils import resource_filename, resource_stream, resource_string
def data_string(name):
return resource_string(__name__, "data/" + name, __file__)
def data_stream(name):
return resource_stream(__name__, "data/" + name, __file__)
def data_filename(name):
return resource_filename(__name__, "data/" + name, __file__)
| 26.466667
| 77
| 0.763224
| 51
| 397
| 5.27451
| 0.411765
| 0.078067
| 0.200743
| 0.178439
| 0.171004
| 0.171004
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141058
| 397
| 14
| 78
| 28.357143
| 0.788856
| 0.120907
| 0
| 0
| 0
| 0
| 0.043228
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ef6437c22e5f0446699a12b73b040c3754cb6902
| 46
|
py
|
Python
|
src/vulnpy/wsgi/__init__.py
|
davidaustinarcher/vulnpy
|
692703dae701197fd42ae7fc5a9d52f05a501550
|
[
"MIT"
] | 7
|
2021-03-23T17:40:45.000Z
|
2022-03-14T16:07:27.000Z
|
src/vulnpy/wsgi/__init__.py
|
davidaustinarcher/vulnpy
|
692703dae701197fd42ae7fc5a9d52f05a501550
|
[
"MIT"
] | 27
|
2020-06-29T13:35:45.000Z
|
2022-01-21T07:10:55.000Z
|
src/vulnpy/wsgi/__init__.py
|
davidaustinarcher/vulnpy
|
692703dae701197fd42ae7fc5a9d52f05a501550
|
[
"MIT"
] | 14
|
2020-07-26T18:23:16.000Z
|
2022-03-09T13:44:53.000Z
|
from .app import vulnerable_app # noqa: F401
| 23
| 45
| 0.76087
| 7
| 46
| 4.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 0.173913
| 46
| 1
| 46
| 46
| 0.815789
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ef654431bc8c5c0367cb05676c7b6812c7f8bfb5
| 2,985
|
py
|
Python
|
dagbldr/tests/test_full_feedforward.py
|
dagbldr/dagbldr
|
3bfad37f4e425cfcc1fded91dbce733cf44c6cfd
|
[
"BSD-3-Clause"
] | 21
|
2015-07-23T05:07:17.000Z
|
2016-11-01T23:20:52.000Z
|
dagbldr/tests/test_full_feedforward.py
|
dagbldr/dagbldr
|
3bfad37f4e425cfcc1fded91dbce733cf44c6cfd
|
[
"BSD-3-Clause"
] | 39
|
2015-07-10T19:16:07.000Z
|
2017-03-31T14:00:01.000Z
|
dagbldr/tests/test_full_feedforward.py
|
dagbldr/dagbldr
|
3bfad37f4e425cfcc1fded91dbce733cf44c6cfd
|
[
"BSD-3-Clause"
] | 7
|
2015-07-10T17:38:28.000Z
|
2019-04-23T09:34:06.000Z
|
import numpy as np
import theano
from theano import tensor
from dagbldr.datasets import minibatch_iterator
from dagbldr.utils import convert_to_one_hot
from dagbldr import get_params, del_shared
from dagbldr.nodes import linear, softmax
from dagbldr.nodes import categorical_crossentropy
from dagbldr.optimizers import sgd
from dagbldr.datasets import load_digits
# Common between tests
digits = load_digits()
X = digits["data"].astype("float32")
y = digits["target"]
n_classes = len(set(y))
y = convert_to_one_hot(y, n_classes).astype("float32")
def test_feedforward_classifier():
del_shared()
minibatch_size = 100
random_state = np.random.RandomState(1999)
X_sym = tensor.fmatrix()
y_sym = tensor.fmatrix()
l1_o = linear([X_sym], [X.shape[1]], proj_dim=20, name='l1',
random_state=random_state)
y_pred = softmax([l1_o], [20], proj_dim=n_classes, name='out',
random_state=random_state)
cost = categorical_crossentropy(y_pred, y_sym).mean()
params = list(get_params().values())
grads = theano.grad(cost, params)
learning_rate = 0.001
opt = sgd(params, learning_rate)
updates = opt.updates(params, grads)
fit_function = theano.function([X_sym, y_sym], [cost], updates=updates,
mode="FAST_COMPILE")
cost_function = theano.function([X_sym, y_sym], [cost],
mode="FAST_COMPILE")
train_itr = minibatch_iterator([X, y], minibatch_size, axis=0)
valid_itr = minibatch_iterator([X, y], minibatch_size, axis=0)
X_train, y_train = next(train_itr)
X_train, y_train = next(train_itr)
X_valid, y_valid = next(valid_itr)
fit_function(X_train, y_train)
cost_function(X_valid, y_valid)
def test_feedforward_theano_mix():
del_shared()
minibatch_size = 100
random_state = np.random.RandomState(1999)
X_sym = tensor.fmatrix()
y_sym = tensor.fmatrix()
l1_o = linear([X_sym], [X.shape[1]], proj_dim=20, name='l1',
random_state=random_state)
l1_o = .999 * l1_o
y_pred = softmax([l1_o], [20], proj_dim=n_classes, name='out',
random_state=random_state)
cost = categorical_crossentropy(y_pred, y_sym).mean()
params = list(get_params().values())
grads = theano.grad(cost, params)
learning_rate = 0.001
opt = sgd(params, learning_rate)
updates = opt.updates(params, grads)
fit_function = theano.function([X_sym, y_sym], [cost], updates=updates,
mode="FAST_COMPILE")
cost_function = theano.function([X_sym, y_sym], [cost],
mode="FAST_COMPILE")
train_itr = minibatch_iterator([X, y], minibatch_size, axis=0)
valid_itr = minibatch_iterator([X, y], minibatch_size, axis=0)
X_train, y_train = next(train_itr)
X_valid, y_valid = next(valid_itr)
fit_function(X_train, y_train)
cost_function(X_valid, y_valid)
| 34.310345
| 75
| 0.665327
| 416
| 2,985
| 4.485577
| 0.211538
| 0.05895
| 0.018757
| 0.032154
| 0.734191
| 0.734191
| 0.734191
| 0.734191
| 0.734191
| 0.734191
| 0
| 0.021907
| 0.220101
| 2,985
| 86
| 76
| 34.709302
| 0.779639
| 0.0067
| 0
| 0.73913
| 0
| 0
| 0.027675
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.144928
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
32389c72ff4ef1cbb615f96078750fbabffaf857
| 45
|
py
|
Python
|
workspace.py
|
brainglobe/brainrender_paper
|
307b8162e36372c690044780c8c7964b2bbbaa3c
|
[
"MIT"
] | 2
|
2020-12-16T21:49:50.000Z
|
2022-01-28T15:53:49.000Z
|
workspace.py
|
brainglobe/brainrender_paper
|
307b8162e36372c690044780c8c7964b2bbbaa3c
|
[
"MIT"
] | null | null | null |
workspace.py
|
brainglobe/brainrender_paper
|
307b8162e36372c690044780c8c7964b2bbbaa3c
|
[
"MIT"
] | null | null | null |
from scripts import make_video
make_video(4)
| 15
| 30
| 0.844444
| 8
| 45
| 4.5
| 0.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.111111
| 45
| 3
| 31
| 15
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
324d134bc4572299ccb90a232ff31c189e4ffce7
| 33
|
py
|
Python
|
mpd_pydb/__init__.py
|
mineo/mpd_pydb
|
5746e15ec1a8243d1f909f605bc9d0d282846b1e
|
[
"MIT"
] | 1
|
2017-01-20T02:19:02.000Z
|
2017-01-20T02:19:02.000Z
|
mpd_pydb/__init__.py
|
mineo/mpd_pydb
|
5746e15ec1a8243d1f909f605bc9d0d282846b1e
|
[
"MIT"
] | null | null | null |
mpd_pydb/__init__.py
|
mineo/mpd_pydb
|
5746e15ec1a8243d1f909f605bc9d0d282846b1e
|
[
"MIT"
] | null | null | null |
from .db import Database # noqa
| 16.5
| 32
| 0.727273
| 5
| 33
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 33
| 1
| 33
| 33
| 0.923077
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
325abc76a5cc7b507a30c23dec27839f8508c20c
| 48
|
py
|
Python
|
app.py
|
wilarz89/TODOAppFlask
|
c86085b87738b32c57fac9fac2a1401e5b31de2f
|
[
"MIT"
] | 1
|
2019-02-04T15:25:17.000Z
|
2019-02-04T15:25:17.000Z
|
app.py
|
wilarz89/TODOAppFlask
|
c86085b87738b32c57fac9fac2a1401e5b31de2f
|
[
"MIT"
] | null | null | null |
app.py
|
wilarz89/TODOAppFlask
|
c86085b87738b32c57fac9fac2a1401e5b31de2f
|
[
"MIT"
] | null | null | null |
#Entry point root file
from app import app
| 6
| 22
| 0.708333
| 8
| 48
| 4.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270833
| 48
| 7
| 23
| 6.857143
| 0.971429
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3274aadc0c9aa14973760cbaeed616212cde670e
| 97
|
py
|
Python
|
src/cheesyutils/structures/__init__.py
|
e-Lisae/cheesyutils
|
75e3d40d4456f2947e6b5130328d2cacd42b9e31
|
[
"MIT"
] | null | null | null |
src/cheesyutils/structures/__init__.py
|
e-Lisae/cheesyutils
|
75e3d40d4456f2947e6b5130328d2cacd42b9e31
|
[
"MIT"
] | 5
|
2021-05-07T02:15:39.000Z
|
2022-03-04T02:25:13.000Z
|
src/cheesyutils/structures/__init__.py
|
e-Lisae/cheesyutils
|
75e3d40d4456f2947e6b5130328d2cacd42b9e31
|
[
"MIT"
] | 2
|
2021-04-10T23:39:34.000Z
|
2021-07-18T19:35:19.000Z
|
from .default_lru import DefaultLRUCache
from .write_through_lru import AsyncWriteThroughLRUCache
| 48.5
| 56
| 0.907216
| 11
| 97
| 7.727273
| 0.727273
| 0.211765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072165
| 97
| 2
| 56
| 48.5
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
328f478cf77e03405c43e508cd51ddfa1ab8b568
| 269
|
py
|
Python
|
day39_axf/axf/views.py
|
BillionsRichard/pycharmWorkspace
|
709e2681fc6d85ff52fb25717215a365f51073aa
|
[
"Apache-2.0"
] | null | null | null |
day39_axf/axf/views.py
|
BillionsRichard/pycharmWorkspace
|
709e2681fc6d85ff52fb25717215a365f51073aa
|
[
"Apache-2.0"
] | null | null | null |
day39_axf/axf/views.py
|
BillionsRichard/pycharmWorkspace
|
709e2681fc6d85ff52fb25717215a365f51073aa
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def home(request):
return render(request, '')
def cart(request):
return render(request, '')
def market(request):
return render(request, '')
def mine(request):
return render(request, '')
| 16.8125
| 35
| 0.684015
| 33
| 269
| 5.575758
| 0.484848
| 0.282609
| 0.413043
| 0.565217
| 0.472826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185874
| 269
| 15
| 36
| 17.933333
| 0.840183
| 0.085502
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0.111111
| 0.444444
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
08867b5bd7ae688900f6f5f771c0ae1e967cb419
| 104
|
py
|
Python
|
terrascript/rabbitmq/__init__.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/rabbitmq/__init__.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/rabbitmq/__init__.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
# terrascript/rabbitmq/__init__.py
import terrascript
class rabbitmq(terrascript.Provider):
pass
| 13
| 37
| 0.788462
| 11
| 104
| 7.090909
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134615
| 104
| 7
| 38
| 14.857143
| 0.866667
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
3e9f7455a0661ecef8c6d374dc17752f7a1c2bfe
| 87
|
py
|
Python
|
pydrawing/modules/beautifiers/fastneuralstyletransfer/__init__.py
|
CharlesPikachu/pydrawing
|
be95378a5667ea345f2a3760f8814dff255ebe15
|
[
"MIT"
] | 93
|
2022-01-18T01:42:58.000Z
|
2022-03-18T18:42:55.000Z
|
pydrawing/modules/beautifiers/fastneuralstyletransfer/__init__.py
|
CharlesPikachu/pydrawing
|
be95378a5667ea345f2a3760f8814dff255ebe15
|
[
"MIT"
] | null | null | null |
pydrawing/modules/beautifiers/fastneuralstyletransfer/__init__.py
|
CharlesPikachu/pydrawing
|
be95378a5667ea345f2a3760f8814dff255ebe15
|
[
"MIT"
] | 1
|
2022-02-17T04:36:17.000Z
|
2022-02-17T04:36:17.000Z
|
'''initialize'''
from .fastneuralstyletransfer import FastNeuralStyleTransferBeautifier
| 43.5
| 70
| 0.873563
| 5
| 87
| 15.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045977
| 87
| 2
| 70
| 43.5
| 0.915663
| 0.114943
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ed3706c6ba159d040e1d5b5652071e24ebc0e0b
| 156
|
py
|
Python
|
server/repositories/stream_repository.py
|
thulio/watchlogs
|
17469f77851ce0cab916c472f9f508790b6157bf
|
[
"MIT"
] | 1
|
2019-12-30T16:32:47.000Z
|
2019-12-30T16:32:47.000Z
|
server/repositories/stream_repository.py
|
thulio/watchlogs
|
17469f77851ce0cab916c472f9f508790b6157bf
|
[
"MIT"
] | null | null | null |
server/repositories/stream_repository.py
|
thulio/watchlogs
|
17469f77851ce0cab916c472f9f508790b6157bf
|
[
"MIT"
] | null | null | null |
from server.shared.services import CloudWatchService
cloudwatch = CloudWatchService()
def list_streams(group):
return cloudwatch.list_streams(group)
| 19.5
| 52
| 0.814103
| 17
| 156
| 7.352941
| 0.705882
| 0.176
| 0.256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 156
| 7
| 53
| 22.285714
| 0.905797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3ef68852a85dfff4585cb9e3d4ae09150d9a2fa3
| 29
|
py
|
Python
|
libsaas/services/pingdom/__init__.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 155
|
2015-01-27T15:17:59.000Z
|
2022-02-20T00:14:08.000Z
|
libsaas/services/pingdom/__init__.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 14
|
2015-01-12T08:22:37.000Z
|
2021-06-16T19:49:31.000Z
|
libsaas/services/pingdom/__init__.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 43
|
2015-01-28T22:41:45.000Z
|
2021-09-21T04:44:26.000Z
|
from .service import Pingdom
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f5c5443ea11b060db67a90b416d6f9deda13b6e4
| 166
|
py
|
Python
|
hilbert_config/__init__.py
|
hilbert/hilbert-cli
|
d33775d38b8a59f058024b61f48c4384a7af2b79
|
[
"Apache-2.0"
] | 8
|
2016-11-30T13:03:20.000Z
|
2018-08-09T07:07:25.000Z
|
hilbert_config/__init__.py
|
hilbert/hilbert-cli
|
d33775d38b8a59f058024b61f48c4384a7af2b79
|
[
"Apache-2.0"
] | 79
|
2016-09-26T13:02:34.000Z
|
2018-06-05T16:50:08.000Z
|
hilbert_config/__init__.py
|
hilbert/hilbert-cli
|
d33775d38b8a59f058024b61f48c4384a7af2b79
|
[
"Apache-2.0"
] | 1
|
2016-10-27T00:50:44.000Z
|
2016-10-27T00:50:44.000Z
|
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '0.3.0' # TODO: add git commit id?
# from hilbert_cli_config import *
| 23.714286
| 49
| 0.789157
| 24
| 166
| 4.791667
| 0.708333
| 0.173913
| 0.278261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.150602
| 166
| 6
| 50
| 27.666667
| 0.794326
| 0.343373
| 0
| 0
| 0
| 0
| 0.04717
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f5f3113069b328a6d0934a3225b9d0940c4fd2db
| 39
|
py
|
Python
|
setup_db.py
|
inferlink/landmark-rest
|
5bda40424bd1d62c64c9f4931855b4e341742b95
|
[
"BSD-4-Clause"
] | null | null | null |
setup_db.py
|
inferlink/landmark-rest
|
5bda40424bd1d62c64c9f4931855b4e341742b95
|
[
"BSD-4-Clause"
] | null | null | null |
setup_db.py
|
inferlink/landmark-rest
|
5bda40424bd1d62c64c9f4931855b4e341742b95
|
[
"BSD-4-Clause"
] | null | null | null |
from database import init_db
init_db()
| 13
| 28
| 0.820513
| 7
| 39
| 4.285714
| 0.714286
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 2
| 29
| 19.5
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
eb0c1b7f3bee1a1df24af3463b9fef17b5cbc348
| 22
|
py
|
Python
|
testsuit/statutil/testsuit/main.py
|
pahaz/bashtest
|
27be6db6dbe19330f4a0b6b3e126cf01b52be3fc
|
[
"MIT"
] | 41
|
2016-07-27T09:23:34.000Z
|
2021-06-30T10:57:50.000Z
|
testsuit/statutil/testsuit/main.py
|
pahaz/bashtest
|
27be6db6dbe19330f4a0b6b3e126cf01b52be3fc
|
[
"MIT"
] | 8
|
2016-08-10T19:22:47.000Z
|
2016-09-10T17:34:49.000Z
|
testsuit/statutil/testsuit/main.py
|
pahaz/bashtest
|
27be6db6dbe19330f4a0b6b3e126cf01b52be3fc
|
[
"MIT"
] | 4
|
2017-12-26T16:20:44.000Z
|
2019-11-24T23:02:50.000Z
|
import qwe
print(qwe)
| 7.333333
| 10
| 0.772727
| 4
| 22
| 4.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 2
| 11
| 11
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
de4abcd4bb876b8aa6031b8c4e524bc2c732d4e0
| 80
|
py
|
Python
|
app/views/admin/__init__.py
|
FSU-ACM-OSSG/Contest-Server
|
f9aabd9742a6aa78cbefc685fd2760a1f83d7721
|
[
"MIT"
] | null | null | null |
app/views/admin/__init__.py
|
FSU-ACM-OSSG/Contest-Server
|
f9aabd9742a6aa78cbefc685fd2760a1f83d7721
|
[
"MIT"
] | null | null | null |
app/views/admin/__init__.py
|
FSU-ACM-OSSG/Contest-Server
|
f9aabd9742a6aa78cbefc685fd2760a1f83d7721
|
[
"MIT"
] | null | null | null |
# from . import sign_in
from . import user_csv
from .sign_in import SignInView
| 16
| 31
| 0.775
| 13
| 80
| 4.538462
| 0.538462
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 80
| 4
| 32
| 20
| 0.893939
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de5be1dd6af3f66c3e97d09f0cb1fece277134ca
| 96
|
py
|
Python
|
imperio/robot/hr/lip_control/__init__.py
|
imbesat-rizvi/imperio
|
5129109f86d660c5152baaaee50b0cc8f5341630
|
[
"MIT"
] | null | null | null |
imperio/robot/hr/lip_control/__init__.py
|
imbesat-rizvi/imperio
|
5129109f86d660c5152baaaee50b0cc8f5341630
|
[
"MIT"
] | null | null | null |
imperio/robot/hr/lip_control/__init__.py
|
imbesat-rizvi/imperio
|
5129109f86d660c5152baaaee50b0cc8f5341630
|
[
"MIT"
] | 2
|
2021-01-17T22:53:02.000Z
|
2021-03-03T01:11:43.000Z
|
from .VisemesPublisher import VisemesPublisher
from .PhonemesPublisher import PhonemesPublisher
| 32
| 48
| 0.895833
| 8
| 96
| 10.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 2
| 49
| 48
| 0.977273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
724d25d9fddf2cd0b2b8120c0def3ff19069a4ab
| 181
|
py
|
Python
|
people/tests/conftest.py
|
giantmade/giant-people
|
67acd7cc8fc3649d5935373be15a59aeceb57fdb
|
[
"MIT"
] | null | null | null |
people/tests/conftest.py
|
giantmade/giant-people
|
67acd7cc8fc3649d5935373be15a59aeceb57fdb
|
[
"MIT"
] | 5
|
2021-01-15T14:16:26.000Z
|
2021-03-24T15:47:04.000Z
|
people/tests/conftest.py
|
giantmade/giant-people
|
67acd7cc8fc3649d5935373be15a59aeceb57fdb
|
[
"MIT"
] | null | null | null |
import pytest
from people.models import Person
@pytest.fixture
def person_instance():
return Person(name="test person", linkedin_url="https://www.linkedin.com/in/testperson")
| 22.625
| 92
| 0.773481
| 25
| 181
| 5.52
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104972
| 181
| 7
| 93
| 25.857143
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.270718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
a0d7e8dcd9a66fbe077fa07984f37679ff69691d
| 62
|
py
|
Python
|
exercises/unit-testing/tests/test_extract_position.py
|
Make-School-Courses/SPD-2.3-Testing-and-Architecture
|
b9e1a32515785e66ebe1323ea11a94292f8580a1
|
[
"MIT"
] | 1
|
2021-02-08T04:44:36.000Z
|
2021-02-08T04:44:36.000Z
|
exercises/unit-testing/tests/test_extract_position.py
|
Make-School-Courses/SPD-2.3-Testing-and-Architecture
|
b9e1a32515785e66ebe1323ea11a94292f8580a1
|
[
"MIT"
] | 10
|
2021-02-18T18:47:15.000Z
|
2021-06-02T04:40:46.000Z
|
exercises/unit-testing/tests/test_extract_position.py
|
Make-School-Courses/SPD-2.3-Testing-and-Architecture
|
b9e1a32515785e66ebe1323ea11a94292f8580a1
|
[
"MIT"
] | 34
|
2021-02-02T18:24:32.000Z
|
2021-08-15T08:57:55.000Z
|
import pytest
from ..extract_position import extract_position
| 20.666667
| 47
| 0.870968
| 8
| 62
| 6.5
| 0.625
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 62
| 2
| 48
| 31
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0f50bd49193f5cd45741e93d050a30ee9add998
| 23,492
|
py
|
Python
|
api/tests/test_tenant_intake.py
|
smegurus/smegurus-django
|
053973b5ff0b997c52bfaca8daf8e07db64a877c
|
[
"BSD-4-Clause"
] | 1
|
2020-07-16T10:58:23.000Z
|
2020-07-16T10:58:23.000Z
|
api/tests/test_tenant_intake.py
|
smegurus/smegurus-django
|
053973b5ff0b997c52bfaca8daf8e07db64a877c
|
[
"BSD-4-Clause"
] | 13
|
2018-11-30T02:29:39.000Z
|
2022-03-11T23:35:49.000Z
|
api/tests/test_tenant_intake.py
|
smegurus/smegurus-django
|
053973b5ff0b997c52bfaca8daf8e07db64a877c
|
[
"BSD-4-Clause"
] | null | null | null |
import json
from django.core import mail
from django.db import transaction
from django.core.urlresolvers import resolve, reverse
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.utils import translation
from django.contrib.auth.models import User, Group
from rest_framework.authtoken.models import Token
from rest_framework import status
from rest_framework.test import APIClient
from rest_framework.test import APITestCase
from django_tenants.test.cases import TenantTestCase
from django_tenants.test.client import TenantClient
from foundation_tenant.models.base.me import Me
from foundation_tenant.models.base.postaladdress import PostalAddress
from foundation_tenant.models.base.contactpoint import ContactPoint
from foundation_tenant.models.base.intake import Intake
from foundation_tenant.models.base.note import Note
from smegurus import constants
TEST_USER_EMAIL = "ledo@gah.com"
TEST_USER_USERNAME = "ledo"
TEST_USER_PASSWORD = "GalacticAllianceOfHumankind"
class APIIntakeWithTenantSchemaTestCase(APITestCase, TenantTestCase):
fixtures = []
def setup_tenant(self, tenant):
"""Tenant Schema"""
tenant.schema_name = 'galacticalliance'
tenant.name = "Galactic Alliance of Humankind"
tenant.has_perks=True
tenant.has_mentors=True
tenant.how_discovered = "Command HQ"
tenant.how_many_served = 1
@classmethod
def setUpTestData(cls):
Group.objects.bulk_create([
Group(id=constants.ENTREPRENEUR_GROUP_ID, name="Entreprenuer",),
Group(id=constants.MENTOR_GROUP_ID, name="Mentor",),
Group(id=constants.ADVISOR_GROUP_ID, name="Advisor",),
Group(id=constants.ORGANIZATION_MANAGER_GROUP_ID, name="Org Manager",),
Group(id=constants.ORGANIZATION_ADMIN_GROUP_ID, name="Org Admin",),
Group(id=constants.CLIENT_MANAGER_GROUP_ID, name="Client Manager",),
Group(id=constants.SYSTEM_ADMIN_GROUP_ID, name="System Admin",),
])
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
user = User.objects.create_user( # Create our user.
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
user.is_superuser = True
user.is_active = True
user.groups.add(org_admin_group)
user.save()
@transaction.atomic
def setUp(self):
translation.activate('en') # Set English.
super(APIIntakeWithTenantSchemaTestCase, self).setUp()
# Initialize our test data.
self.user = User.objects.get()
token = Token.objects.get(user__username=TEST_USER_USERNAME)
# Setup.
self.unauthorized_client = TenantClient(self.tenant)
self.authorized_client = TenantClient(self.tenant, HTTP_AUTHORIZATION='Token ' + token.key)
self.authorized_client.login(
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
self.tenant.owner = self.user
self.tenant.save()
self.me = Me.objects.create(
owner=self.user,
)
# Above taken from:
# http://www.django-rest-framework.org/api-guide/testing/#authenticating
@transaction.atomic
def tearDown(self):
Intake.objects.delete_all()
Note.objects.delete_all()
PostalAddress.objects.delete_all()
ContactPoint.objects.delete_all()
Me.objects.delete_all()
items = User.objects.all()
for item in items.all():
item.delete()
items = Group.objects.all()
for item in items.all():
item.delete()
# super(APIIntakeWithTenantSchemaTestCase, self).tearDown()
@transaction.atomic
def test_list_with_anonymous_user(self):
response = self.unauthorized_client.get('/api/tenantintake/?format=json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@transaction.atomic
def test_list_with_authenticated__user(self):
# Change Group that the User belongs in.
entrepreneur_group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(entrepreneur_group)
self.user.save()
# Test and verify.
response = self.authorized_client.get('/api/tenantintake/?format=json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_list_with_authenticated_management_group_user(self):
# Change Group that the User belongs in.
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
self.user.groups.remove(org_admin_group)
self.user.save()
# Test and verify.
response = self.authorized_client.get('/api/tenantintake/?format=json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_list_with_authenticated_advisor_group_user(self):
# Change Group that the User belongs in.
advisor_group = Group.objects.get(id=constants.ADVISOR_GROUP_ID)
self.user.groups.add(advisor_group)
self.user.save()
# Test and verify.
response = self.authorized_client.get('/api/tenantintake/?format=json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_post_with_anonymous_user(self):
data = {
'me': self.me.id,
}
response = self.unauthorized_client.post('/api/tenantintake/?format=json', json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@transaction.atomic
def test_post_with_authenticated_management_group_user(self):
# Run the test and verify.
data = {
'me': self.me.id,
}
response = self.authorized_client.post('/api/tenantintake/?format=json', json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
@transaction.atomic
def test_post_with_authenticated_advisor_group_user(self):
# Change Group that the User belongs in.
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
advisor_group = Group.objects.get(id=constants.ADVISOR_GROUP_ID)
self.user.groups.remove(org_admin_group)
self.user.groups.add(advisor_group)
self.user.save()
# Test and verify.
data = {
'me': self.me.id,
}
response = self.authorized_client.post('/api/tenantintake/?format=json', json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
@transaction.atomic
def test_put_with_anonymous_user(self):
# Create a new object with our specific test data.
Intake.objects.create(
id=1,
me=self.me,
)
# Run the test.
data = {
'id': 1,
'me': self.me.id,
}
response = self.unauthorized_client.put('/api/tenantintake/1/?format=json', json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@transaction.atomic
def test_put_with_authenticated_management_user(self):
# Create a new object with our specific test data.
Intake.objects.create(
id=1,
me=self.me,
)
# Run the test.
data = {
'id': 1,
'me': self.me.id,
}
response = self.authorized_client.put('/api/tenantintake/1/?format=json', json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_put_with_authenticated_advisor_user(self):
# Change Group that the User belongs in.
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
advisor_group = Group.objects.get(id=constants.ADVISOR_GROUP_ID)
self.user.groups.remove(org_admin_group)
self.user.groups.add(advisor_group)
self.user.save()
# Create a new object with our specific test data.
Intake.objects.create(
id=1,
me=self.me,
)
# Run the test.
data = {
'id': 1,
'me': self.me.id,
}
response = self.authorized_client.put('/api/tenantintake/1/?format=json', json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_delete_with_anonymous_user(self):
Intake.objects.create(
id=1,
me=self.me,
)
response = self.unauthorized_client.delete('/api/tenantintake/1/?format=json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@transaction.atomic
def test_delete_with_authenticated_management_user(self):
Intake.objects.create(
id=1,
me=self.me,
judgement_note=Note.objects.create(
id=1,
me=self.me,
),
privacy_note=Note.objects.create(
id=2,
me=self.me,
),
terms_note=Note.objects.create(
id=3,
me=self.me,
),
confidentiality_note=Note.objects.create(
id=4,
me=self.me,
),
collection_note=Note.objects.create(
id=5,
me=self.me,
),
)
response = self.authorized_client.delete('/api/tenantintake/1/?format=json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
@transaction.atomic
def test_delete_with_authenticated_advisor_user(self):
# Create our object to be deleted.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
judgement_note=Note.objects.create(
id=1,
me=self.me,
),
)
# Change Group that the User belongs in.
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
advisor_group = Group.objects.get(id=constants.ADVISOR_GROUP_ID)
self.user.groups.remove(org_admin_group)
self.user.groups.add(advisor_group)
self.user.save()
# Run test and verify.
response = self.authorized_client.delete('/api/tenantintake/1/?format=json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
@transaction.atomic
def test_complete_intake_with_anonymous_user(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.PENDING_REVIEW_STATUS,
judgement_note=Note.objects.create(
id=1,
me=self.me,
),
)
# Run the test and verify.
response = self.unauthorized_client.put(
'/api/tenantintake/1/complete_intake/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
me = Intake.objects.get(id=1)
self.assertEqual(me.status, constants.PENDING_REVIEW_STATUS)
self.assertEqual(len(mail.outbox), 0) # Test that one message has not been sent.
@transaction.atomic
def test_complete_intake_with_owner_user(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/complete_intake/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
me = Intake.objects.get(id=1)
self.assertEqual(me.status, constants.PENDING_REVIEW_STATUS)
# Test that one email has been sent.
self.assertEqual(len(mail.outbox), 1)
# Verify that the subject of the first message is correct.
self.assertEqual(mail.outbox[0].subject, 'New Entrepreneur Application!')
@transaction.atomic
def test_complete_intake_with_different_owner_user(self):
# Setup our objects.
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
new_user = User.objects.create_user( # Create our user.
email='chambers@gah.com',
username='Chambers',
password='I do not like Stryker',
)
new_user.is_active = True
new_user.groups.add(org_admin_group)
new_user.save()
new_me = Me.objects.create(
owner=new_user
)
Intake.objects.create(
id=1,
me=new_me,
status=constants.CREATED_STATUS,
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/complete_intake/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
me = Intake.objects.get(id=1)
self.assertEqual(me.status, constants.CREATED_STATUS)
self.assertEqual(len(mail.outbox), 0) # Test that one message has not been sent.
@transaction.atomic
def test_complete_intake_with_owner_user_with_404(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/6666/complete_intake/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
me = Intake.objects.get(id=1)
self.assertEqual(me.status, constants.CREATED_STATUS)
self.assertEqual(len(mail.outbox), 0) # Test that one message has not been sent.
@transaction.atomic
def test_judge_with_anonymous_user(self):
# Create a new object with our specific test data.
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
)
# Run the test.
data = {
'id': 1,
'owner': self.user.id,
'is_employee_created': False,
}
response = self.unauthorized_client.put(
'/api/tenantintake/1/judge/?format=json',
json.dumps(data),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
intake = Intake.objects.get(id=1)
self.assertEqual(intake.status, constants.CREATED_STATUS)
self.assertFalse(intake.me.is_in_intake)
self.assertEqual(len(mail.outbox), 0) # Test that one message has not been sent.
@transaction.atomic
def test_judge_with_employee_user_for_existing_intake_with_note(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
judgement_note=Note.objects.create(
me=self.me,
),
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/judge/?format=json',
json.dumps({
'status': constants.APPROVED_STATUS,
'comment': 'This is a test comment.',
'is_employee_created': False,
}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
intake = Intake.objects.get(id=1)
self.assertEqual(intake.status, constants.APPROVED_STATUS)
self.assertTrue(intake.me.is_in_intake)
note = Note.objects.get(id=1)
self.assertIn('This is a test comment.', note.description)
self.assertEqual(len(mail.outbox), 1) # Test that one message has been sent.
self.assertIn('Accepted', mail.outbox[0].subject)
@transaction.atomic
def test_judge_with_employee_user_for_existing_intake_without_note(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/judge/?format=json',
json.dumps({
'status': constants.REJECTED_STATUS,
'comment': 'This is a test comment.',
'is_employee_created': False,
}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
intake = Intake.objects.get(id=1)
self.assertEqual(intake.status, constants.REJECTED_STATUS)
self.assertFalse(intake.me.is_in_intake)
note = Note.objects.get(id=1)
self.assertIn('This is a test comment.', note.description)
self.assertEqual(len(mail.outbox), 1) # Test that one message has been sent.
self.assertIn('Rejected', mail.outbox[0].subject)
@transaction.atomic
def test_judge_with_employee_user_for_manually_created_intake(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/judge/?format=json',
json.dumps({
'status': constants.APPROVED_STATUS,
'comment': 'This is a test comment.',
'is_employee_created': True,
}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
intake = Intake.objects.get(id=1)
self.assertEqual(intake.status, constants.APPROVED_STATUS)
self.assertTrue(intake.me.is_in_intake)
note = Note.objects.get(id=1)
self.assertIn('This is a test comment.', note.description)
self.assertEqual(len(mail.outbox), 1) # Test that one message has been sent.
@transaction.atomic
def test_judge_with_non_employee_user(self):
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
self.user.groups.remove(org_admin_group)
group = Group.objects.get(id=constants.ENTREPRENEUR_GROUP_ID)
self.user.groups.add(group)
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
)
# Run the test.
response = self.authorized_client.put(
'/api/tenantintake/1/judge/?format=json',
json.dumps({
'status': constants.APPROVED_STATUS,
'comment': 'This is a test comment.',
'is_employee_created': False,
}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
intake = Intake.objects.get(id=1)
self.assertEqual(intake.status, constants.CREATED_STATUS)
self.assertFalse(intake.me.is_in_intake)
self.assertEqual(len(mail.outbox), 0) # Test that one message has not been sent.
@transaction.atomic
def test_judge_with_owner_user_with_404(self):
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
self.user.groups.remove(org_admin_group)
group = Group.objects.get(id=constants.ADVISOR_GROUP_ID)
self.user.groups.add(group)
response = self.authorized_client.put(
'/api/tenantintake/666/judge/?format=json',
json.dumps({
'status': constants.APPROVED_STATUS,
'comment': 'This is a test comment.',
'is_employee_created': False,
}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b'No Intake matches the given query.', response.content)
self.assertEqual(len(mail.outbox), 0) # Test that one message has not been sent.
@transaction.atomic
def test_crm_update_with_anonymous_user(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.PENDING_REVIEW_STATUS,
has_signed_with_name="Ledo"
)
# Run the test and verify.
response = self.unauthorized_client.put(
'/api/tenantintake/1/crm_update/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@transaction.atomic
def test_crm_update_with_owner_user(self):
# Setup our object.
Intake.objects.create(
id=1,
me=self.me,
status=constants.CREATED_STATUS,
has_signed_with_name="Ledo"
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/crm_update/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@transaction.atomic
def test_crm_update_with_different_owner_user(self):
# Setup our objects.
org_admin_group = Group.objects.get(id=constants.ORGANIZATION_ADMIN_GROUP_ID)
new_user = User.objects.create_user( # Create our user.
email='chambers@gah.com',
username='Chambers',
password='I do not like Stryker',
)
new_user.is_active = True
new_user.groups.add(org_admin_group)
new_user.save()
new_me = Me.objects.create(
owner=new_user
)
Intake.objects.create(
id=1,
me=new_me,
status=constants.CREATED_STATUS,
has_signed_with_name="Ledo"
)
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/1/crm_update/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@transaction.atomic
def test_crm_update_with_owner_user_with_404(self):
# Run the test and verify.
response = self.authorized_client.put(
'/api/tenantintake/6666/crm_update/?format=json',
json.dumps({}),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| 37.288889
| 134
| 0.632598
| 2,743
| 23,492
| 5.22202
| 0.082756
| 0.049218
| 0.016755
| 0.045239
| 0.823094
| 0.792726
| 0.777157
| 0.757959
| 0.7426
| 0.731709
| 0
| 0.010072
| 0.268815
| 23,492
| 629
| 135
| 37.348172
| 0.823834
| 0.077814
| 0
| 0.660819
| 0
| 0
| 0.095814
| 0.047652
| 0
| 0
| 0
| 0
| 0.11306
| 1
| 0.060429
| false
| 0.009747
| 0.042885
| 0
| 0.107212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9d2dc0ee62f7c4303085396de5715fb58d5c86a0
| 33
|
py
|
Python
|
atcoder/other/idn2015qa_a.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | 1
|
2018-11-12T15:18:55.000Z
|
2018-11-12T15:18:55.000Z
|
atcoder/other/idn2015qa_a.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | null | null | null |
atcoder/other/idn2015qa_a.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | null | null | null |
print(len(input())*len(input()))
| 16.5
| 32
| 0.636364
| 5
| 33
| 4.2
| 0.6
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 33
| 1
| 33
| 33
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
19d56d347769a9338b7505f6c7c34fb930c257ab
| 3,010
|
py
|
Python
|
figure_generation/FigureS4/nominal_validation.py
|
ramachandran-lab/multiancestry_enrichment
|
59f9eea4dbfbff6754224a9188730ebe393a3c18
|
[
"CC0-1.0"
] | 1
|
2022-03-31T18:22:52.000Z
|
2022-03-31T18:22:52.000Z
|
figure_generation/FigureS4/nominal_validation.py
|
ramachandran-lab/multiancestry_enrichment
|
59f9eea4dbfbff6754224a9188730ebe393a3c18
|
[
"CC0-1.0"
] | null | null | null |
figure_generation/FigureS4/nominal_validation.py
|
ramachandran-lab/multiancestry_enrichment
|
59f9eea4dbfbff6754224a9188730ebe393a3c18
|
[
"CC0-1.0"
] | null | null | null |
import pandas as pd
import numpy as np
traits = ['MCV','PLC','Height', 'BMI', 'DBP', 'SBP', 'WBC', 'RBC', 'Hemoglobin', 'Hematocrit', 'MCH', 'MCHC', 'Lymphocyte', 'Monocyte', 'Neutrophil', 'Eosinophil', 'Basophil','Urate','Triglyceride','Cholesterol','LDL','HDL','HBA1C','EGFR','CRP']
thresholds = pd.DataFrame(np.zeros((25,4)), columns = ['thresh','tested','replicated','missed'],index = traits)
for trait in traits:
significance = pd.read_csv('../variant_manhattan_plot/' + trait + '.upset.input.csv')
otr_sig = significance[significance['european'] == 0]
significance = significance[significance['european'] == 1]
pfile = pd.read_csv('../variant_manhattan_plot/' + trait + '.merged.snps.txt',sep = '\t').set_index('SNP')
pfile = pfile.loc[significance['SNP'].tolist()]
pfile = pfile.drop(['european','#CHROM','POS'], axis = 1)
pfile['nans'] = pfile.isnull().sum(axis = 1)
pfile = pfile[pfile['nans'] != len(pfile.columns)-1]
thresholds.loc[trait,'tested'] = pfile.shape[0]
thresh = 0.05/pfile.shape[0]
pfile = pfile.drop(['nans'], axis = 1)
siggy = pfile < thresh
siggy = siggy*1
siggy.to_csv('nominal.' + trait + '.upset.input.csv',index = True)
siggy['replicates'] = siggy.sum(axis = 1)
replicated = siggy[siggy['replicates'] >= 1]
thresholds.loc[trait,'replicated'] = replicated.shape[0]
thresholds.loc[trait,'missed'] = otr_sig.shape[0]
thresholds['thresh'] = 0.05/thresholds['tested']
thresholds['proportion'] = thresholds['replicated']/thresholds['tested']
thresholds[['thresh','tested','replicated','proportion','missed']].to_csv('nominal.metadata.csv')
thresholds[['thresh','tested','replicated','proportion','missed']].to_csv('nominal.metadata.latex.csv',sep = '&')
thresholds = pd.DataFrame(np.zeros((25,4)), columns = ['thresh','tested','replicated','missed'],index = traits)
for trait in traits:
significance = pd.read_csv('../gene_manhattan_plot/' + trait + '.upset.input.csv')
otr_sig = significance[significance['european'] == 0]
significance = significance[significance['european'] == 1]
pfile = pd.read_csv('../gene_manhattan_plot/' + trait + '.merged.genes.txt',sep = '\t').set_index('gene')
pfile = pfile.loc[significance['gene'].tolist()]
pfile = pfile.drop(['european','chr'], axis = 1)
pfile['nans'] = pfile.isnull().sum(axis = 1)
pfile = pfile[pfile['nans'] != len(pfile.columns)-1]
thresholds.loc[trait,'tested'] = pfile.shape[0]
thresh = 0.05/pfile.shape[0]
pfile = pfile.drop(['nans'], axis = 1)
siggy = pfile < thresh
siggy = siggy*1
siggy.to_csv('nominal.' + trait + '.gene.upset.input.csv',index = True)
siggy['replicates'] = siggy.sum(axis = 1)
replicated = siggy[siggy['replicates'] >= 1]
thresholds.loc[trait,'replicated'] = replicated.shape[0]
thresholds.loc[trait,'missed'] = otr_sig.shape[0]
thresholds['thresh'] = 0.05/thresholds['tested']
thresholds['proportion'] = thresholds['replicated']/thresholds['tested']
thresholds.to_csv('nominal.metadata.gene.csv')
thresholds.to_csv('nominal.metadata.gene.latex.csv',sep = '&')
| 48.548387
| 245
| 0.685382
| 390
| 3,010
| 5.228205
| 0.230769
| 0.049044
| 0.052967
| 0.037273
| 0.847474
| 0.805297
| 0.776851
| 0.748406
| 0.748406
| 0.748406
| 0
| 0.016562
| 0.097342
| 3,010
| 61
| 246
| 49.344262
| 0.733898
| 0
| 0
| 0.666667
| 0
| 0
| 0.285809
| 0.0668
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039216
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dfc012af331025226531bd81ef16ee8072cf3051
| 31
|
py
|
Python
|
nepse/market/__init__.py
|
thenishantsapkota/nepse-api
|
d7b325d2eaecaae16e3859dd50012507dc3b3afa
|
[
"MIT"
] | 28
|
2021-05-30T15:45:21.000Z
|
2021-08-03T13:21:14.000Z
|
nepse/market/__init__.py
|
razesh66/nepse-api
|
e0aaef402b00b9c07b4e0a3e18ef5bc20beba5c3
|
[
"MIT"
] | 27
|
2021-06-03T09:35:28.000Z
|
2021-07-17T21:03:01.000Z
|
nepse/market/__init__.py
|
razesh66/nepse-api
|
e0aaef402b00b9c07b4e0a3e18ef5bc20beba5c3
|
[
"MIT"
] | 9
|
2021-06-02T09:18:24.000Z
|
2021-07-17T04:44:40.000Z
|
from .core import MarketClient
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f06be5fd8f7300dbe0d103d552e47d7d0e1c73e
| 42
|
py
|
Python
|
my_common/__init__.py
|
ctwillson/tutusnowball
|
5413cbc8403be4c1374ae4924ccc47abb92da980
|
[
"Apache-2.0"
] | null | null | null |
my_common/__init__.py
|
ctwillson/tutusnowball
|
5413cbc8403be4c1374ae4924ccc47abb92da980
|
[
"Apache-2.0"
] | null | null | null |
my_common/__init__.py
|
ctwillson/tutusnowball
|
5413cbc8403be4c1374ae4924ccc47abb92da980
|
[
"Apache-2.0"
] | null | null | null |
from .mypush import *
from .mylog import *
| 21
| 21
| 0.738095
| 6
| 42
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 2
| 22
| 21
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f30638c5229aad042ac6a6e0f79c427a2bd3761
| 423
|
py
|
Python
|
xfb/ReIDTest/FPO/fpo/config.py
|
X-funbean/fast-reid
|
97ef9608ba696d116fafb3a8746842b8d0cf546e
|
[
"Apache-2.0"
] | null | null | null |
xfb/ReIDTest/FPO/fpo/config.py
|
X-funbean/fast-reid
|
97ef9608ba696d116fafb3a8746842b8d0cf546e
|
[
"Apache-2.0"
] | null | null | null |
xfb/ReIDTest/FPO/fpo/config.py
|
X-funbean/fast-reid
|
97ef9608ba696d116fafb3a8746842b8d0cf546e
|
[
"Apache-2.0"
] | null | null | null |
from fastreid.config import CfgNode as CN
def add_fpo_config(cfg):
_C = cfg
# ---------------------------------------------------------------------------- #
# res4f b1 Optimizer
# ---------------------------------------------------------------------------- #
# _C.MODEL.LOSSES.BCE = CN()
# _C.MODEL.LOSSES.BCE.WEIGHT_ENABLED = True
# _C.MODEL.LOSSES.BCE.SCALE = 1.0
# _C.TEST.THRES = 0.5
| 30.214286
| 84
| 0.387707
| 41
| 423
| 3.804878
| 0.658537
| 0.115385
| 0.230769
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017094
| 0.170213
| 423
| 14
| 85
| 30.214286
| 0.42735
| 0.6974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a06efd957b45d6e6befc361e8e3bd459af884d60
| 165
|
py
|
Python
|
application/views.py
|
kjarkko/Heron
|
6b78780381d9a9c24f57cf1b354493b05b659ab8
|
[
"MIT"
] | null | null | null |
application/views.py
|
kjarkko/Heron
|
6b78780381d9a9c24f57cf1b354493b05b659ab8
|
[
"MIT"
] | 1
|
2018-06-19T16:18:49.000Z
|
2018-06-19T16:18:49.000Z
|
application/views.py
|
kjarkko/Heron
|
6b78780381d9a9c24f57cf1b354493b05b659ab8
|
[
"MIT"
] | null | null | null |
from flask import render_template
from application import app
@app.route("/")
def index(text='Welcome to Heron'):
return render_template("index.html", text=text)
| 20.625
| 48
| 0.763636
| 24
| 165
| 5.166667
| 0.666667
| 0.225806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115152
| 165
| 7
| 49
| 23.571429
| 0.849315
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
a0b5e4dfcefb6a42381073e07be8e89b24c288f5
| 182
|
py
|
Python
|
src/components/completed_popup.py
|
priscilafaliani/Analisis-De-Datos
|
247695e3e976f042fc453d02bd9d4d722fb331ff
|
[
"CC0-1.0"
] | 6
|
2021-05-02T08:28:42.000Z
|
2021-05-07T13:18:32.000Z
|
src/components/completed_popup.py
|
priscilafaliani/Analisis-De-Datos
|
247695e3e976f042fc453d02bd9d4d722fb331ff
|
[
"CC0-1.0"
] | null | null | null |
src/components/completed_popup.py
|
priscilafaliani/Analisis-De-Datos
|
247695e3e976f042fc453d02bd9d4d722fb331ff
|
[
"CC0-1.0"
] | null | null | null |
from src.windows.popups import completed_popup
def start(filepath):
"""Show the popup and return the values read."""
return completed_popup.build(filepath).read(close=True)
| 30.333333
| 59
| 0.758242
| 26
| 182
| 5.230769
| 0.730769
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137363
| 182
| 6
| 59
| 30.333333
| 0.866242
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
39fd25eb6db424b4ba4edfa8aa47ea52c2f6d78b
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/setuptools/namespaces.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/setuptools/namespaces.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/setuptools/namespaces.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/3c/ca/86/54f5cf610823513bc483d6c671c440908383ad0e8d9ac0e0fdfc04af02
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 0
| 96
| 1
| 96
| 96
| 0.479167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
261909d20a850d3a893a2cce7f4167f566a15591
| 392
|
py
|
Python
|
app/lib/__init__.py
|
huguge/flask-bms
|
ebb93405c3142335f7162c3db9ba15e55f1633e4
|
[
"MIT"
] | 6
|
2019-04-24T06:44:24.000Z
|
2021-12-26T13:47:45.000Z
|
app/lib/__init__.py
|
huguge/flask-bms
|
ebb93405c3142335f7162c3db9ba15e55f1633e4
|
[
"MIT"
] | null | null | null |
app/lib/__init__.py
|
huguge/flask-bms
|
ebb93405c3142335f7162c3db9ba15e55f1633e4
|
[
"MIT"
] | 2
|
2020-02-26T12:07:44.000Z
|
2021-10-16T05:59:47.000Z
|
from .auth_decorators import super_admin_require, content_admin_require
from random import choice
from flask import render_template,current_app
def custom_render_template(template_name, **kw):
category = current_app.config['MENU_CATEGORY']
return render_template(template_name,MENU_CATEGORY=category,**kw)
def color_picker():
return choice(['info','primary','warning','success'])
| 43.555556
| 71
| 0.803571
| 52
| 392
| 5.75
| 0.557692
| 0.140468
| 0.147157
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094388
| 392
| 9
| 72
| 43.555556
| 0.842254
| 0
| 0
| 0
| 0
| 0
| 0.096692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.375
| 0.125
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
2683678fb96bfe8db403432f42980951e89813f9
| 205
|
py
|
Python
|
blender/nodes/converter/__init__.py
|
LewisOrton/taichi_elements_houdini
|
50ef3232f080030213bcb7578a48d03647a9445b
|
[
"MIT"
] | 1
|
2021-05-13T11:34:03.000Z
|
2021-05-13T11:34:03.000Z
|
blender/nodes/converter/__init__.py
|
LewisOrton/taichi_elements_houdini
|
50ef3232f080030213bcb7578a48d03647a9445b
|
[
"MIT"
] | null | null | null |
blender/nodes/converter/__init__.py
|
LewisOrton/taichi_elements_houdini
|
50ef3232f080030213bcb7578a48d03647a9445b
|
[
"MIT"
] | null | null | null |
from .int_to_float import *
from .color_to_vector import *
from .hex_color_to_rgb import *
from .float_math import *
from .vector_math import *
from .combine_vector import *
from .seratate_vector import *
| 25.625
| 31
| 0.795122
| 32
| 205
| 4.75
| 0.375
| 0.394737
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136585
| 205
| 7
| 32
| 29.285714
| 0.858757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cd856c9e8c12b9dca7849de589beddc7d8230698
| 165
|
py
|
Python
|
tests/test_state.py
|
emanuellima1/cadcad-ri
|
b9c6d60cd60e74cecf61641167c7a531ebf345b8
|
[
"BSD-2-Clause"
] | 1
|
2022-01-11T16:43:47.000Z
|
2022-01-11T16:43:47.000Z
|
tests/test_state.py
|
emanuellima1/cadcad-ri
|
b9c6d60cd60e74cecf61641167c7a531ebf345b8
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_state.py
|
emanuellima1/cadcad-ri
|
b9c6d60cd60e74cecf61641167c7a531ebf345b8
|
[
"BSD-2-Clause"
] | 1
|
2022-01-11T16:43:54.000Z
|
2022-01-11T16:43:54.000Z
|
"""State testing.
TODO: Create more comprehensive testing
"""
# from cadcad.space import Space
def test_class_creation() -> None:
"""Test class creation."""
| 15
| 39
| 0.69697
| 20
| 165
| 5.65
| 0.75
| 0.159292
| 0.300885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169697
| 165
| 10
| 40
| 16.5
| 0.824818
| 0.654545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 1
| 1
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
f8359bb46477bbc1e0d7ce536b202ca3cb4f9f0a
| 9,682
|
py
|
Python
|
engine/blender/scripts/Finger_Front.py
|
divinedragon/dumb-charades-ai
|
f187580d7f68c4d5a8b2378f211d286fc5bb1fe6
|
[
"Apache-2.0"
] | null | null | null |
engine/blender/scripts/Finger_Front.py
|
divinedragon/dumb-charades-ai
|
f187580d7f68c4d5a8b2378f211d286fc5bb1fe6
|
[
"Apache-2.0"
] | null | null | null |
engine/blender/scripts/Finger_Front.py
|
divinedragon/dumb-charades-ai
|
f187580d7f68c4d5a8b2378f211d286fc5bb1fe6
|
[
"Apache-2.0"
] | null | null | null |
##################################################################################################
# SCRIPT TO ANIMATE FRONT PART OF FINGERS
# Created, Invented and Made Alive By : Deepak Shakya
# Date : 08-April-2008
##################################################################################################
##################################################################################################
# This function Animates the Front Part of Finger Bones - All of them
#
# Armature_Object - The Bone Armature from which it selects the Finger Bone to animate
# direction - The final direction where the Bone has to be moved()
# endFrame - The final frame at which the Bone has to be moved in the specified direction
##################################################################################################
##########################################
# ALL LEFT BONES ARE SCRIPTED BELOW #
##########################################
from Blender import *
import Blender
# Left Thumb - Front Part
def LEFTTHUMBFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Left Thumb - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.8,0.0,0.0]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
LThumbFront = [bone for bone in all_bones if bone.name == "Bone.002_R.005"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
LThumbFront.quat[:] = x,y,z,r
LThumbFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
# Left Index Finger - Front Part
def LEFTINDEXFINGERFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Left Index Finger - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.0,0.0,-1.5]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
LIndexFingerFront = [bone for bone in all_bones if bone.name == "Bone.002_R.011"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
LIndexFingerFront.quat[:] = x,y,z,r
LIndexFingerFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
# Left Middle Finger - Front Part
def LEFTMIDDLEFINGERFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Left Middle Finger - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.0,0.0,-1.5]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
LMiddleFingerFront = [bone for bone in all_bones if bone.name == "Bone.002_R.007"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
LMiddleFingerFront.quat[:] = x,y,z,r
LMiddleFingerFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
# Left Small Finger - Front Part
def LEFTSMALLFINGERFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Left Small Finger - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.0,0.0,-0.8]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
LSmallFingerFront = [bone for bone in all_bones if bone.name == "Bone.002_R.009"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
LSmallFingerFront.quat[:] = x,y,z,r
LSmallFingerFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
###########################################
# ALL RIGHT BONES ARE SCRIPTED BELOW #
###########################################
# Right Thumb - Front Part
def RIGHTTHUMBFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Right Thumb - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.8,0.0,0.0]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
RThumbFront = [bone for bone in all_bones if bone.name == "Bone.002_L.005"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
RThumbFront.quat[:] = x,y,z,r
RThumbFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
# Right Index Finger - Front Part
def RIGHTINDEXFINGERFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Right Index Finger - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.0,0.0,1.5]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
RIndexFingerFront = [bone for bone in all_bones if bone.name == "Bone.002_L.011"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
RIndexFingerFront.quat[:] = x,y,z,r
RIndexFingerFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
# Right Middle Finger - Front Part
def RIGHTMIDDLEFINGERFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Right Middle Finger - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.0,0.0,1.5]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
RMiddleFingerFront = [bone for bone in all_bones if bone.name == "Bone.002_L.007"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
RMiddleFingerFront.quat[:] = x,y,z,r
RMiddleFingerFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
# Right Small Finger - Front Part
def RIGHTSMALLFINGERFRONT(Armature_Object, direction, endFrame):
#Defining all possible positions for the Right Small Finger - Front Part
# first attribute - i am still guessing(nothing happens if changed from 1.0)
# second attribute - it moves the head move up and down
# third attribute - it is making the bone to rotate on axis (0.5 corresponds to 90 degree rotation)
# fourth attribute - it is making the bone move left and right
Positions = {}
Positions["FOLD"] = [1.0,0.0,0.0,0.8]
Positions["DEFAULT"] = [1.0,0.0,0.0,0.0]
#Get the Bone from the Armature Object
pose_bones = Armature_Object.getPose()
all_bones = pose_bones.bones.values()
RSmallFingerFront = [bone for bone in all_bones if bone.name == "Bone.002_L.009"][0]
#Set the frame in which the Position will be reached
x = Positions[direction][0]
y = Positions[direction][1]
z = Positions[direction][2]
r = Positions[direction][3]
RSmallFingerFront.quat[:] = x,y,z,r
RSmallFingerFront.insertKey(Armature_Object,endFrame,Object.Pose.ROT)
return endFrame
| 40.51046
| 100
| 0.694485
| 1,445
| 9,682
| 4.608305
| 0.093426
| 0.024628
| 0.028833
| 0.028833
| 0.856435
| 0.8258
| 0.820093
| 0.820093
| 0.820093
| 0.802673
| 0
| 0.032891
| 0.152138
| 9,682
| 239
| 101
| 40.51046
| 0.778292
| 0.443607
| 0
| 0.684211
| 0
| 0
| 0.042114
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070175
| false
| 0
| 0.017544
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f83e7a733c9ee1e9e54fdd275f2cdbfb210bc404
| 749
|
py
|
Python
|
tests/test_statistics.py
|
soccermetrics/marcotti-mls
|
84c6d0d619c1a0c70dc6602074a3c5227959803c
|
[
"MIT"
] | 3
|
2016-08-04T10:34:01.000Z
|
2019-03-05T23:22:06.000Z
|
tests/test_statistics.py
|
soccermetrics/marcotti-mls
|
84c6d0d619c1a0c70dc6602074a3c5227959803c
|
[
"MIT"
] | null | null | null |
tests/test_statistics.py
|
soccermetrics/marcotti-mls
|
84c6d0d619c1a0c70dc6602074a3c5227959803c
|
[
"MIT"
] | null | null | null |
# coding=utf-8
def test_common_stats_insert(session):
raise NotImplementedError
def test_duplicate_common_stats_error(session):
raise NotImplementedError
def test_common_stats_default_values(session):
raise NotImplementedError
def test_common_stats_negative_value_error(session):
raise NotImplementedError
def test_field_stats_default_values(session):
raise NotImplementedError
def test_field_stats_negative_value_error(session):
raise NotImplementedError
def test_goalkeeper_stats_default_values(session):
raise NotImplementedError
def test_goalkeeper_stats_negative_value_error(session):
raise NotImplementedError
def test_field_and_goalkeeper_stats_insert(session):
raise NotImplementedError
| 19.710526
| 56
| 0.837116
| 87
| 749
| 6.758621
| 0.229885
| 0.107143
| 0.47449
| 0.462585
| 0.904762
| 0.75
| 0.741497
| 0.613946
| 0.328231
| 0.22449
| 0
| 0.001522
| 0.12283
| 749
| 37
| 57
| 20.243243
| 0.893455
| 0.016021
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f8b5dccc87d30f9b5f8ae4d74ae75f80efa61a0c
| 2,952
|
py
|
Python
|
Web/carleaseapp/car-lease-calculator-v2/fin_picking/views.py
|
honchardev/Fun
|
ca7c0076e9bb3017c5d7e89aa7d5bd54a83c8ecc
|
[
"MIT"
] | null | null | null |
Web/carleaseapp/car-lease-calculator-v2/fin_picking/views.py
|
honchardev/Fun
|
ca7c0076e9bb3017c5d7e89aa7d5bd54a83c8ecc
|
[
"MIT"
] | 3
|
2020-03-24T16:26:35.000Z
|
2020-04-15T19:40:41.000Z
|
Web/carleaseapp/car-lease-calculator-v2/fin_picking/views.py
|
honchardev/Fun
|
ca7c0076e9bb3017c5d7e89aa7d5bd54a83c8ecc
|
[
"MIT"
] | null | null | null |
import json
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from .ratepicker import RatePicker
@require_http_methods(['POST', ])
@csrf_exempt
def view_pick_v2(request):
# Fetch POST fields from request
request_post_data = json.loads(
request.body.decode('utf-8')
)
# Prepare data storages for rate picker
customer_metrics = {
'car_price_usd': request_post_data.get('car_price_usd', None),
'down_payment_usd': request_post_data.get('down_payment_usd', None),
'lease_term_months': request_post_data.get('lease_term_months', None),
'discount_pct': request_post_data.get('discount_pct', 0),
}
business_metrics = {
'commission_pct': request_post_data.get('commission_pct', None),
'tracker_price_uah': request_post_data.get('tracker_price_uah', None),
'tracker_subscription_fee_uah': request_post_data.get('tracker_subscription_fee_uah', None),
'insurance_pct': request_post_data.get('insurance_pct', None),
'desired_irr_pct': request_post_data.get('desired_irr_pct', None),
}
misc_metrics = {
'exchange_rate': request_post_data.get('exchange_rate', None),
'precision': request_post_data.get('precision', None)
}
# Pick the best lease rate
rate_picker = RatePicker()
rate_picker_result = rate_picker.pick_v2(
customer_metrics=customer_metrics,
business_metrics=business_metrics,
misc_metrics=misc_metrics
)
# Define view response
response_data = {
'status': 'ok',
'result': rate_picker_result
}
view_response = JsonResponse(response_data)
# Return response
return view_response
@require_http_methods(['POST', ])
@csrf_exempt
def view_pick_cash_credit(request):
# Fetch POST fields from request
request_post_data = json.loads(
request.body.decode('utf-8')
)
# Prepare data storages for rate picker
customer_metrics = {
'car_price_uah': request_post_data.get('car_price_uah', None),
'lease_term_months': request_post_data.get('lease_term_months', None),
}
business_metrics = {
'commission_pct': request_post_data.get('commission_pct', None),
'desired_irr_pct': request_post_data.get('desired_irr_pct', None),
}
misc_metrics = {
'precision': request_post_data.get('precision', None)
}
# Pick the best lease rate
rate_picker = RatePicker()
rate_picker_result = rate_picker.pick_cash_credit(
customer_metrics=customer_metrics,
business_metrics=business_metrics,
misc_metrics=misc_metrics
)
# Define view response
response_data = {
'status': 'ok',
'result': rate_picker_result
}
view_response = JsonResponse(response_data)
# Return response
return view_response
| 31.404255
| 100
| 0.695122
| 362
| 2,952
| 5.28453
| 0.190608
| 0.103502
| 0.14114
| 0.150549
| 0.80345
| 0.768949
| 0.708312
| 0.708312
| 0.708312
| 0.663356
| 0
| 0.002131
| 0.205285
| 2,952
| 93
| 101
| 31.741935
| 0.813299
| 0.088415
| 0
| 0.57971
| 0
| 0
| 0.192537
| 0.020896
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.072464
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f8bbae11ecd9a1e4aa5afefc3820875c809878ed
| 3,408
|
py
|
Python
|
examples/convolution_light_shutter_photonics.py
|
davidxcohen/imaging
|
c4d7107c169c2be3338e32a5c8a88db62f504d6d
|
[
"MIT"
] | null | null | null |
examples/convolution_light_shutter_photonics.py
|
davidxcohen/imaging
|
c4d7107c169c2be3338e32a5c8a88db62f504d6d
|
[
"MIT"
] | null | null | null |
examples/convolution_light_shutter_photonics.py
|
davidxcohen/imaging
|
c4d7107c169c2be3338e32a5c8a88db62f504d6d
|
[
"MIT"
] | null | null | null |
import numpy as np
import os, sys
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.dirname(CURRENT_DIR+'/..'+'/func'))
from func.photonic_func import Photonic
from plotly.offline import init_notebook_mode, iplot
import plotly.graph_objs as go
# init_notebook_mode(connected=True) # for Jupyter Lab notebook
photonic = Photonic(config='Cfg3')
# Light & Shutter pulses + its Convolution: both square and equal
rise = 1e-14
fall = 1e-14
width = 1e-8
y1, t1 = photonic.generate_pulse(rise=rise, fall=fall, width=width, smooth=False)
rise = 1e-14
fall = 1e-14
width = 1e-8
delay = 3e-9
y2, t2 = photonic.generate_pulse(delay=delay, rise=rise, fall=fall, width=width, smooth=False)
y3, t3 = photonic.conv_light_shutter(t_light=t1, y_light=y1, t_shutter=t2, y_shutter=y2)
trace0 = go.Scatter(x=t1,
y=y1, mode='lines+markers', # Select 'lines', 'markers' or 'lines+markers'
name='Light')
trace1 = go.Scatter(x=t2,
y=y2, mode='lines+markers',
name='Shutter')
trace2 = go.Scatter(x=t3-width-2*1e-9,
y=y3, mode='lines+markers',
name='Conv light-shutter')
trace3 = go.Scatter(x=[-13e-9],
y=[0.8], mode='text', textposition='top right',
name='text', text=['Convolution is normalized to the light integral'
+ '<br>Equal 1.0 when light fully integrated by the shutter'])
data = [trace0, trace1, trace2, trace3]
layout = dict(title='Light & Shutter pulses + its Convolution: both square and equal',
xaxis=dict(title='time, time delay [sec]', type='linear'), # Select 'log' or 'linear'
yaxis=dict(title='Signal',type='linear'), # Select 'log' or 'linear'
template='plotly_dark')
iplot(dict(data=data, layout=layout))
# Light & Shutter pulses + its Convolution: square shutter triangle light
rise = 0.3e-8
fall = 0.3e-8
width = 1e-8
y1, t1 = photonic.generate_pulse(rise=rise, fall=fall, width=width, smooth=True)
rise = 1e-9
fall = 1e-9
width = 0.8e-8
delay = 3e-9
y2, t2 = photonic.generate_pulse(delay=delay, rise=rise, fall=fall, width=width, smooth=True)
y3, t3 = photonic.conv_light_shutter(t_light=t1, y_light=y1, t_shutter=t2, y_shutter=y2)
trace0 = go.Scatter(x=t1,
y=y1, mode='lines+markers', # Select 'lines', 'markers' or 'lines+markers'
name='Light')
trace1 = go.Scatter(x=t2,
y=y2, mode='lines+markers',
name='Shutter')
trace2 = go.Scatter(x=t3-width-2*1e-9,
y=y3, mode='lines+markers',
name='Conv light-shutter')
trace3 = go.Scatter(x=[-10e-9],
y=[0.8], mode='text', textposition='top right',
name='text', text=['Convolution is normalized to the light integral'
+ '<br>Equal 1.0 when light fully integrated by the shutter'])
data = [trace0, trace1, trace2, trace3]
layout = dict(title='Light & Shutter pulses + its Convolution: both square and equal',
xaxis=dict(title='time, time delay [sec]', type='linear'), # Select 'log' or 'linear'
yaxis=dict(title='Signal',type='linear'), # Select 'log' or 'linear'
template='plotly_dark')
iplot({'data': data, 'layout': layout})
| 39.627907
| 101
| 0.614437
| 476
| 3,408
| 4.336134
| 0.228992
| 0.05814
| 0.03876
| 0.040698
| 0.806686
| 0.791182
| 0.791182
| 0.791182
| 0.782461
| 0.739341
| 0
| 0.040341
| 0.243545
| 3,408
| 86
| 102
| 39.627907
| 0.760279
| 0.113556
| 0
| 0.681818
| 0
| 0
| 0.208569
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.075758
| 0
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3e7aade394b23fa0e220a94db3d1ffb28ebb088f
| 9,442
|
py
|
Python
|
idfy_rest_client/controllers/events_controller.py
|
dealflowteam/Idfy
|
fa3918a6c54ea0eedb9146578645b7eb1755b642
|
[
"MIT"
] | null | null | null |
idfy_rest_client/controllers/events_controller.py
|
dealflowteam/Idfy
|
fa3918a6c54ea0eedb9146578645b7eb1755b642
|
[
"MIT"
] | null | null | null |
idfy_rest_client/controllers/events_controller.py
|
dealflowteam/Idfy
|
fa3918a6c54ea0eedb9146578645b7eb1755b642
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
idfy_rest_client.controllers.events_controller
This file was automatically generated for Idfy by APIMATIC v2.0 ( https://apimatic.io ).
"""
from .base_controller import BaseController
from ..api_helper import APIHelper
from ..configuration import Configuration
from ..http.auth.o_auth_2 import OAuth2
from ..models.event_type_info import EventTypeInfo
from ..models.event_dto import EventDto
class EventsController(BaseController):
"""A Controller to access Endpoints in the idfy_rest_client API."""
def events_get_event_types(self):
"""Does a GET request to /notification/events/types.
Returns a list of all available event types.
Returns:
EventTypeInfo: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/notification/events/types'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
OAuth2.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, EventTypeInfo.from_dictionary)
def events_clear(self):
"""Does a POST request to /notification/events/clear.
Clear all events for your account
Returns:
void: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/notification/events/clear'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
_request = self.http_client.post(_query_url)
OAuth2.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
def events_peek(self,
event_type=None,
tags=None):
"""Does a GET request to /notification/events/peek.
Peek top 100 unhandled events regardless if they are locked or not.
Dont use this endpoint to handle events.
Args:
event_type (EventType, optional): Filter by event type
tags (string, optional): Filter the events with your own tags that
you added to the document when you created it (Separate tags
with ,)
Returns:
list of EventDto: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/notification/events/peek'
_query_parameters = {
'eventType': event_type,
'tags': tags
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
OAuth2.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, EventDto.from_dictionary)
def events_handle(self,
event_id):
"""Does a POST request to /notification/events/{eventId}.
Mark the status of an event as handled to make sure you dont retrieve
this event again
Args:
event_id (uuid|string): TODO: type description here. Example:
Returns:
void: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(event_id=event_id)
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/notification/events/{eventId}'
_query_builder = APIHelper.append_url_with_template_parameters(_query_builder, {
'eventId': event_id
})
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
_request = self.http_client.post(_query_url)
OAuth2.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
def events_handle_many(self,
event_ids):
"""Does a POST request to /notification/events.
Mark the status of a batch of events as handled to make sure you dont
retrieve these events again
Args:
event_ids (list of uuid|string): TODO: type description here.
Example:
Returns:
void: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(event_ids=event_ids)
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/notification/events'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(event_ids))
OAuth2.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
def events_get(self,
event_type=None,
tags=None):
"""Does a GET request to /notification/events.
Retrieve up to 100 unhandled events for your account. After you
retrieve this list the
events will be "locked" for 10 minutes to give you time
to handle them. Please handle the events using one of the
endpoints in this API to avoid retrieving the same events
multiple times.
Args:
event_type (EventType, optional): Filter by event type
tags (string, optional): Filter the events with your own tags that
you added to the document when you created it (Separate tags
with ,)
Returns:
list of EventDto: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/notification/events'
_query_parameters = {
'eventType': event_type,
'tags': tags
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
OAuth2.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, EventDto.from_dictionary)
| 35.630189
| 119
| 0.612158
| 1,036
| 9,442
| 5.366795
| 0.173745
| 0.051799
| 0.032374
| 0.053957
| 0.777698
| 0.777698
| 0.771583
| 0.745863
| 0.733633
| 0.733633
| 0
| 0.003151
| 0.327791
| 9,442
| 264
| 120
| 35.765152
| 0.872853
| 0.425651
| 0
| 0.645161
| 1
| 0
| 0.064813
| 0.023996
| 0
| 0
| 0
| 0.007576
| 0
| 1
| 0.064516
| false
| 0
| 0.064516
| 0
| 0.172043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e416a2c332967e0c36aa82cb886286b508d0c801
| 146
|
py
|
Python
|
src/deoxys/keras/models.py
|
huynhngoc/deoxys
|
b2e9936b723807e129fda36d8d6131ca00db558f
|
[
"MIT"
] | 1
|
2021-12-28T15:48:45.000Z
|
2021-12-28T15:48:45.000Z
|
src/deoxys/keras/models.py
|
huynhngoc/deoxys
|
b2e9936b723807e129fda36d8d6131ca00db558f
|
[
"MIT"
] | 2
|
2020-06-26T11:03:53.000Z
|
2020-06-26T11:05:09.000Z
|
src/deoxys/keras/models.py
|
huynhngoc/deoxys
|
b2e9936b723807e129fda36d8d6131ca00db558f
|
[
"MIT"
] | null | null | null |
from ..utils import is_keras_standalone
if is_keras_standalone():
from keras.models import *
else:
from tensorflow.keras.models import *
| 20.857143
| 41
| 0.760274
| 20
| 146
| 5.35
| 0.5
| 0.130841
| 0.317757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164384
| 146
| 6
| 42
| 24.333333
| 0.877049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
902c8cf6817ca04b0e786759d1d6a1229db0776e
| 491
|
py
|
Python
|
tests/test_rankings.py
|
noahgill409/osrs_highscores
|
5d499490c73681ea4a0cbe329bd6ec2c4720ba03
|
[
"MIT"
] | 2
|
2022-01-31T00:07:16.000Z
|
2022-03-04T19:37:55.000Z
|
tests/test_rankings.py
|
noahgill409/osrs_highscores
|
5d499490c73681ea4a0cbe329bd6ec2c4720ba03
|
[
"MIT"
] | 3
|
2020-03-04T04:32:30.000Z
|
2020-03-23T14:16:19.000Z
|
tests/test_rankings.py
|
noahgill409/osrs_highscores
|
5d499490c73681ea4a0cbe329bd6ec2c4720ba03
|
[
"MIT"
] | 4
|
2020-03-04T04:44:00.000Z
|
2022-01-31T01:00:42.000Z
|
import pytest
from osrs_highscores.rankings import Rankings
# TODO: Breaking change on OSRS highscores for robot detection.
# def test_lookup_skill():
# rank = Rankings().get_rank_in_skill('attack', 1)
# assert rank.rank == 1
#
#
# def test_lookup_nonskill():
# rank = Rankings().get_rank_in_target('clue_scrolls_all', 1)
# assert rank.score != 0
#
#
# def test_lookup_high_rank():
# rank = Rankings().get_rank_in_skill('herblore', 5000)
# assert rank.rank == 5000
| 25.842105
| 65
| 0.700611
| 68
| 491
| 4.779412
| 0.485294
| 0.064615
| 0.12
| 0.175385
| 0.224615
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0.029557
| 0.173116
| 491
| 18
| 66
| 27.277778
| 0.770936
| 0.814664
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f7808fe7b1a3be9af1159219f8dcbb84aade250
| 210
|
py
|
Python
|
checkmate/contrib/plugins/git/commands/__init__.py
|
marcinguy/checkmate-ce
|
fc33c7c27bc640ab4db5dbda274a0edd3b3db218
|
[
"MIT"
] | 80
|
2015-01-06T17:42:39.000Z
|
2022-02-08T19:08:21.000Z
|
checkmate/contrib/plugins/git/commands/__init__.py
|
ravikumarpurbey/checkmate
|
1a4d010c8ef25c678d8d14dc8e37a9bed1883ca2
|
[
"MIT"
] | 6
|
2015-08-04T12:16:48.000Z
|
2021-02-27T12:09:16.000Z
|
checkmate/contrib/plugins/git/commands/__init__.py
|
ravikumarpurbey/checkmate
|
1a4d010c8ef25c678d8d14dc8e37a9bed1883ca2
|
[
"MIT"
] | 33
|
2015-01-02T14:18:11.000Z
|
2021-03-18T05:06:54.000Z
|
# -*- coding: utf-8 -*-
from .init import Command as InitCommand
from .analyze import Command as AnalyzeCommand
from .diff import Command as DiffCommand
from .update_stats import Command as UpdateStatsCommand
| 30
| 55
| 0.795238
| 28
| 210
| 5.928571
| 0.571429
| 0.313253
| 0.361446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005556
| 0.142857
| 210
| 6
| 56
| 35
| 0.916667
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5f8985c0aab46bec37784489e7c888f315e73679
| 26
|
py
|
Python
|
lab1/monitor/monitor/modules/__init__.py
|
vyacheslav-bezborodov/skt
|
58551eed497687adec5b56336037613a78cc5b2d
|
[
"MIT"
] | null | null | null |
lab1/monitor/monitor/modules/__init__.py
|
vyacheslav-bezborodov/skt
|
58551eed497687adec5b56336037613a78cc5b2d
|
[
"MIT"
] | null | null | null |
lab1/monitor/monitor/modules/__init__.py
|
vyacheslav-bezborodov/skt
|
58551eed497687adec5b56336037613a78cc5b2d
|
[
"MIT"
] | null | null | null |
import user
import system
| 8.666667
| 13
| 0.846154
| 4
| 26
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 2
| 14
| 13
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5fd0943b2ce78c592d78e2bf4ee02e6076cd9742
| 16,704
|
py
|
Python
|
tests/resources/test_keynote.py
|
pythonpanama/python-panama-backend
|
201a7d70c05c74a0a4127c8180240f8af303eb7d
|
[
"MIT"
] | 2
|
2021-03-22T14:48:31.000Z
|
2021-10-16T16:00:34.000Z
|
tests/resources/test_keynote.py
|
pythonpanama/python-panama-backend
|
201a7d70c05c74a0a4127c8180240f8af303eb7d
|
[
"MIT"
] | null | null | null |
tests/resources/test_keynote.py
|
pythonpanama/python-panama-backend
|
201a7d70c05c74a0a4127c8180240f8af303eb7d
|
[
"MIT"
] | null | null | null |
import json
import unittest
from tests.base_test import BaseTest
from tests.model_test_data import (
TEST_KEYNOTE_1,
TEST_KEYNOTE_2,
TEST_KEYNOTE_400,
TEST_MEMBER_1,
)
# noinspection PyArgumentList
class TestKeynoteResource(BaseTest):
"""Test all endpoints for the keynote resource"""
TITLE_1 = TEST_KEYNOTE_1["title"]
DESCRIPTION_1 = TEST_KEYNOTE_1["description"]
SPEAKER_ID_1 = TEST_KEYNOTE_1["speaker_id"]
MEETING_ID_1 = TEST_KEYNOTE_2["meeting_id"]
TITLE_2 = TEST_KEYNOTE_2["title"]
DESCRIPTION_2 = TEST_KEYNOTE_2["description"]
SPEAKER_ID_2 = TEST_KEYNOTE_2["speaker_id"]
MEETING_ID_2 = TEST_KEYNOTE_2["meeting_id"]
MSG_200 = "Keynote modified successfully."
MSG_201 = "Keynote created successfully."
MSG_400 = "400 BAD REQUEST"
MSG_404 = "404 Not Found: Keynote with id '99' was not found."
MSG_404_L = "404 Not Found: No keynotes found."
MSG_404_M = "404 Not Found: Keynotes with meeting_id '99' was not found."
MSG_404_S = "404 Not Found: Keynotes with speaker_id '99' was not found."
MSG_DEL = "Keynote deleted successfully."
def test_get_keynote_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, _, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes/{keynote.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["keynote"]["title"], self.TITLE_1)
self.assertEqual(
data["keynote"]["description"], self.DESCRIPTION_1
)
self.assertEqual(
data["keynote"]["speaker_id"], self.SPEAKER_ID_1
)
self.assertEqual(
data["keynote"]["meeting_id"], self.MEETING_ID_1
)
def test_get_keynote_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_get_keynotes_by_meeting_id_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, _, meeting = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes/meeting/{meeting.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(len(data["keynotes"]), 1)
self.assertEqual(data["keynotes"][0]["title"], self.TITLE_1)
self.assertEqual(
data["keynotes"][0]["description"], self.DESCRIPTION_1
)
self.assertEqual(
data["keynotes"][0]["speaker_id"], self.SPEAKER_ID_1
)
self.assertEqual(data["keynotes"][0]["meeting_id"], meeting.id)
def test_get_keynotes_by_meeting_id_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes/meeting/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404_M)
def test_get_keynotes_by_speaker_id_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, speaker, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes/speaker/{speaker.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(len(data["keynotes"]), 1)
self.assertEqual(data["keynotes"][0]["title"], self.TITLE_1)
self.assertEqual(
data["keynotes"][0]["description"], self.DESCRIPTION_1
)
self.assertEqual(
data["keynotes"][0]["speaker_id"], speaker.id
)
self.assertEqual(data["keynotes"][0]["meeting_id"], self.MEETING_ID_1)
def test_get_keynotes_by_speaker_id_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes/speaker/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404_S)
def test_post_keynote_201(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, _, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.post(
"/keynotes",
data=json.dumps(TEST_KEYNOTE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["message"], self.MSG_201)
self.assertEqual(data["keynote"]["title"], self.TITLE_2)
self.assertEqual(
data["keynote"]["description"], self.DESCRIPTION_2
)
self.assertEqual(
data["keynote"]["speaker_id"], self.SPEAKER_ID_2
)
self.assertEqual(
data["keynote"]["meeting_id"], self.MEETING_ID_2
)
def test_post_keynote_400(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.post(
"/keynotes",
data=json.dumps(TEST_KEYNOTE_400),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertTrue("title" in data["error"])
self.assertTrue("description" in data["error"])
results = c.post(
"/keynotes",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
self.assertEqual(results.status, self.MSG_400)
def test_put_keynote_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, _, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.put(
f"/keynotes/{keynote.id}",
data=json.dumps(TEST_KEYNOTE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["message"], self.MSG_200)
self.assertEqual(data["keynote"]["title"], self.TITLE_2)
self.assertEqual(
data["keynote"]["description"], self.DESCRIPTION_2
)
self.assertEqual(
data["keynote"]["speaker_id"], self.SPEAKER_ID_2
)
self.assertEqual(
data["keynote"]["meeting_id"], self.MEETING_ID_2
)
def test_put_keynote_400(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, _, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.put(
f"/keynotes/{keynote.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
self.assertEqual(results.status, self.MSG_400)
def test_put_keynote_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.put(
f"/keynotes/99",
data=json.dumps(TEST_KEYNOTE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(
data["error"],
self.MSG_404,
)
def test_delete_keynote_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote, member, _, _, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.delete(
f"/keynotes/{keynote.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["keynote"]["title"], self.TITLE_1)
self.assertEqual(data["message"], self.MSG_DEL)
def test_delete_keynote_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.delete(
f"/keynotes/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_get_keynotes_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
keynote_1, member, _, _, _ = self.add_keynote_to_db(
self.keynote_1,
self.role_1,
self.member_1,
self.speaker_1,
self.meeting_1,
)
keynote_2 = self.keynote_2.save_to_db()
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(len(data["keynotes"]), 2)
self.assertEqual(data["keynotes"][0]["id"], keynote_1.id)
self.assertEqual(data["keynotes"][1]["id"], keynote_2.id)
def test_get_keynotes_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/keynotes",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404_L)
if __name__ == "__main__": # pragma: no cover
unittest.main()
| 38.4
| 87
| 0.469887
| 1,594
| 16,704
| 4.659975
| 0.060853
| 0.033656
| 0.081852
| 0.062466
| 0.882472
| 0.857297
| 0.852989
| 0.825794
| 0.825794
| 0.802369
| 0
| 0.027261
| 0.426844
| 16,704
| 434
| 88
| 38.488479
| 0.74859
| 0.005328
| 0
| 0.654795
| 0
| 0
| 0.146531
| 0.031903
| 0
| 0
| 0
| 0
| 0.106849
| 1
| 0.041096
| false
| 0.041096
| 0.010959
| 0
| 0.09863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
39d5ac98dd4a54a95124aa36d1b0d2072cb35488
| 411
|
py
|
Python
|
work.py
|
gt-big-data/redis-performance-example
|
2fba60e0750f15055e61a7e6381b126fe4027e0f
|
[
"MIT"
] | null | null | null |
work.py
|
gt-big-data/redis-performance-example
|
2fba60e0750f15055e61a7e6381b126fe4027e0f
|
[
"MIT"
] | null | null | null |
work.py
|
gt-big-data/redis-performance-example
|
2fba60e0750f15055e61a7e6381b126fe4027e0f
|
[
"MIT"
] | null | null | null |
import time
def do_work():
"""This function simulates doing work that takes some amount of time.
It's here to demonstrate offloading work from webservers to backend workers.
Because a lot of that time in the Real World(tm) may be spent waiting for DB calls to return, I don't feel that it's necessary to make this max out the CPU while executing.
"""
time.sleep(0.1) # Do work for 100 ms.
| 41.1
| 176
| 0.722628
| 74
| 411
| 4
| 0.756757
| 0.040541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015576
| 0.218978
| 411
| 9
| 177
| 45.666667
| 0.906542
| 0.822384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f2f05cdb0cc049cdaffffd8831615d85d7135869
| 7,450
|
py
|
Python
|
sibur-activity-web/hello/views.py
|
andreipit/ml_sib_chal_2019_deploy
|
19a486af35f70d47164f3a5fa4028b9b36856e0f
|
[
"MIT"
] | null | null | null |
sibur-activity-web/hello/views.py
|
andreipit/ml_sib_chal_2019_deploy
|
19a486af35f70d47164f3a5fa4028b9b36856e0f
|
[
"MIT"
] | null | null | null |
sibur-activity-web/hello/views.py
|
andreipit/ml_sib_chal_2019_deploy
|
19a486af35f70d47164f3a5fa4028b9b36856e0f
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from .models import Greeting
import requests
from django.conf import settings
import xlrd
import hello.mylib as mylib
import hello.mylib_feateng as mylib_feateng
from django.http import HttpResponseRedirect
import pdb #pdb.set_trace()
import pandas as pd
def uploadcsv(request): # pdb.set_trace();
import pickle
context = {'predictions': 'uploadcsv2','df': 'uploadcsv2','pred': 'uploadcsv2'}
return render(request, "index.html", context)
def update(request):
import pickle
# activity
filepath = settings.MEDIA_ROOT + '/products/'+ 'train_with_pred2' + '.csv'
# filepath = settings.MEDIA_ROOT + '/products/'+ 'train_with_pred_backup' + '.csv'
df = pd.read_csv(filepath, parse_dates=["date"], index_col="date").tail(460)#df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
result = []
result.append(['date', 'true', 'prediction'])
for i in range(df.shape[0]):
if df.iloc[i]['activity'] == 0: activity = 0
else: activity = df.iloc[i]['activity']
if df.iloc[i]['pred6h'] == 0: pred6h = 0
else: pred6h = df.iloc[i]['pred6h']
result.append([str(df.index[i]).split(' ')[1], activity, pred6h])
# atactic
# atc_filepath = settings.MEDIA_ROOT + '/products/'+ 'atc_pred' + '.csv'
# atc_df = pd.read_csv(atc_filepath, parse_dates=["date"], index_col="date").tail(720)#df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
# atc_result = []
# # atc_result.append(['date', 'true1', 'true2', 'true3', 'prediction1', 'prediction2', 'prediction3'])
# atc_result.append(['date', 'true1', 'prediction1'])
# for i in range(atc_df.shape[0]):
# atc1 = df.iloc[i]['atactic_1']
# # atc2 = df.iloc[i]['atactic_2']
# # atc3 = df.iloc[i]['atactic_3']
# a1_pred6h = df.iloc[i]['a1_pred6h']
# # a2_pred6h = df.iloc[i]['a2_pred6h']
# # a3_pred6h = df.iloc[i]['a3_pred6h']
# # atc_result.append([str(df.index[i]), atc1, atc2, atc3, a1_pred6h, a2_pred6h, a3_pred6h])
# atc_result.append([str(df.index[i]), atc1, a1_pred6h])
# # atactic fake
# filepath = settings.MEDIA_ROOT + '/products/'+ 'train_with_pred2' + '.csv'
# df = pd.read_csv(filepath, parse_dates=["date"], index_col="date").tail(720)#df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
# atc_result = []
# atc_result.append(['date', 'true', 'prediction'])
# for i in range(df.shape[0]):
# if df.iloc[i]['activity'] == 0: activity = 0
# else: activity = df.iloc[i]['activity']
# if df.iloc[i]['pred6h'] == 0: pred6h = 0
# else: pred6h = df.iloc[i]['pred6h']
# atc_result.append([str(df.index[i]), activity, pred6h])
values = [round(df.head(1)['f0'][-1],2), round(df.head(1)['f1'][-1],2), round(df.head(1)['f7'][-1],2), round(df.head(1)['f14'][-1],2), round(df.head(1)['f41'][-1],2), round(df.head(1)['f50'][-1],2)]
import json
# response = HttpResponse(json.dumps({'product_values_count': '123', 'prediction':result, 'atc_prediction':atc_result}))
response = HttpResponse(json.dumps({'product_values_count': '123', 'prediction':result,'values':values}))
return response
# context = {'predictions': 'update','df': 'update','pred': 'update'}
# return render(request, "index.html", context)
def atc_update(request):
import pickle
# atactic
# atc_filepath = settings.MEDIA_ROOT + '/products/'+ 'atc_pred' + '.csv'
# atc_df = pd.read_csv(atc_filepath, parse_dates=["date"], index_col="date").tail(720)#df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
# atc_result = []
# # atc_result.append(['date', 'true1', 'true2', 'true3', 'prediction1', 'prediction2', 'prediction3'])
# atc_result.append(['date', 'true1', 'prediction1'])
# for i in range(atc_df.shape[0]):
# atc1 = atc_df.iloc[i]['atactic_1']
# # atc2 = df.iloc[i]['atactic_2']
# # atc3 = df.iloc[i]['atactic_3']
# a1_pred6h = atc_df.iloc[i]['a1_pred6h']
# # a2_pred6h = df.iloc[i]['a2_pred6h']
# # a3_pred6h = df.iloc[i]['a3_pred6h']
# # atc_result.append([str(df.index[i]), atc1, atc2, atc3, a1_pred6h, a2_pred6h, a3_pred6h])
# atc_result.append([str(atc_df.index[i]), atc1, a1_pred6h])
# atactic fake
# filepath = settings.MEDIA_ROOT + '/products/'+ 'train_with_pred2' + '.csv'
# df = pd.read_csv(filepath, parse_dates=["date"], index_col="date").tail(360)#df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
# atc_result = []
# atc_result.append(['date', 'true', 'prediction'])
# for i in range(df.shape[0]):
# if df.iloc[i]['activity'] == 0: activity = 0
# else: activity = df.iloc[i]['activity']
# if df.iloc[i]['pred6h'] == 0: pred6h = 0
# else: pred6h = df.iloc[i]['pred6h']
# atc_result.append([str(df.index[i]), activity, pred6h])
atc_result = []
atc_result.append(['date', 'true', 'prediction'])
atc_result.append(['2012-12-01', '10', '20'])
import json
response = HttpResponse(json.dumps({'product_values_count': '123', 'atc_prediction':atc_result}))
return response
# context = {'predictions': 'update','df': 'update','pred': 'update'}
# return render(request, "index.html", context)
def index(request):
import pickle
imgs_adress = settings.STATIC_ROOT + '\\hello\\hello_imgs\\'
context = {'predictions': 'index','df': 'index','pred': 'index','imgs_adress':imgs_adress, 'values':[12,13,14,15,16,17]}
filepath = settings.MEDIA_ROOT + '/products/'+ 'train_with_pred2' + '.csv'
df = pd.read_csv(filepath, parse_dates=["date"], index_col="date") #df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
true_values = []; predictions = []; dates = []
for i in range(df.shape[0]):
true_values.append(df.iloc[i]['activity'])
predictions.append(df.iloc[i]['pred6h'])
dates.append(str(df.index[i]))
result = (true_values, predictions, dates)
context['df'] = result#str(df.head(1000))
context['values'] = [df['f0'][-1],df['f1'][-1],df['f2'][-1],df['f3'][-1],df['f4'][-1],df['f5'][-1]]
return render(request, "index_boot.html", context)
def atc_index(request):
import pickle
imgs_adress = settings.STATIC_ROOT + '\\hello\\hello_imgs\\'
context = {'predictions': 'index','df': 'index','pred': 'index','imgs_adress':imgs_adress, 'values':[12,13,14,15,16,17]}
filepath = settings.MEDIA_ROOT + '/products/'+ 'train_with_pred2' + '.csv'
df = pd.read_csv(filepath, parse_dates=["date"], index_col="date") #df = pd.read_json('NashDomRyazan-29-03-2019.json', encoding='utf-8')
true_values = []; predictions = []; dates = []
for i in range(df.shape[0]):
true_values.append(df.iloc[i]['activity'])
predictions.append(df.iloc[i]['pred6h'])
dates.append(str(df.index[i]))
result = (true_values, predictions, dates)
context['df'] = result#str(df.head(1000))
context['values'] = [df['f0'][-1],df['f1'][-1],df['f2'][-1],df['f3'][-1],df['f4'][-1],df['f5'][-1]]
return render(request, "index_atactic.html", context)
def db(request): # pdb.set_trace();
import pickle
context = {'predictions': 'db','df': 'db','pred': 'db'}
return render(request, "index.html", context)
| 47.452229
| 202
| 0.621745
| 1,029
| 7,450
| 4.36346
| 0.127308
| 0.037416
| 0.043653
| 0.044543
| 0.864365
| 0.864365
| 0.835857
| 0.81804
| 0.787305
| 0.776169
| 0
| 0.049069
| 0.171141
| 7,450
| 156
| 203
| 47.75641
| 0.678057
| 0.470738
| 0
| 0.520548
| 0
| 0
| 0.159574
| 0.010898
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082192
| false
| 0
| 0.260274
| 0
| 0.424658
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
840d4647ff3a3d189ba6485ac1af6131cc4f0783
| 133
|
py
|
Python
|
BricksO/bricks/cadastro_usuario/__init__.py
|
Jhonan01/Brick
|
09d62d8cde3a5503ad8b84eaea54edbd91445479
|
[
"Apache-2.0"
] | null | null | null |
BricksO/bricks/cadastro_usuario/__init__.py
|
Jhonan01/Brick
|
09d62d8cde3a5503ad8b84eaea54edbd91445479
|
[
"Apache-2.0"
] | null | null | null |
BricksO/bricks/cadastro_usuario/__init__.py
|
Jhonan01/Brick
|
09d62d8cde3a5503ad8b84eaea54edbd91445479
|
[
"Apache-2.0"
] | null | null | null |
from flask import Blueprint
cadastroUsuario_bp = Blueprint('cadastro_usuario',__name__)
from bricks.cadastro_usuario import routes
| 22.166667
| 59
| 0.849624
| 16
| 133
| 6.625
| 0.6875
| 0.283019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097744
| 133
| 5
| 60
| 26.6
| 0.883333
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
0816a6312b7a455adf1c44bb0ea0a4b9958c349f
| 41,730
|
py
|
Python
|
nova/tests/api/openstack/contrib/test_security_groups.py
|
armaan/nova
|
22859fccb95502efcb73ecf2bd827c45c0886bd3
|
[
"Apache-2.0"
] | 1
|
2021-11-08T10:11:44.000Z
|
2021-11-08T10:11:44.000Z
|
nova/tests/api/openstack/contrib/test_security_groups.py
|
armaan/nova
|
22859fccb95502efcb73ecf2bd827c45c0886bd3
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/api/openstack/contrib/test_security_groups.py
|
armaan/nova
|
22859fccb95502efcb73ecf2bd827c45c0886bd3
|
[
"Apache-2.0"
] | 1
|
2020-05-10T16:36:03.000Z
|
2020-05-10T16:36:03.000Z
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mox
import nova
import unittest
import webob
from xml.dom import minidom
from nova import exception
from nova import test
from nova.api.openstack.contrib import security_groups
from nova.tests.api.openstack import fakes
def _get_create_request_json(body_dict):
req = webob.Request.blank('/v1.1/123/os-security-groups')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body_dict)
return req
def _create_security_group_json(security_group):
body_dict = _create_security_group_request_dict(security_group)
request = _get_create_request_json(body_dict)
response = request.get_response(fakes.wsgi_app())
return response
def _create_security_group_request_dict(security_group):
sg = {}
if security_group is not None:
name = security_group.get('name', None)
description = security_group.get('description', None)
if name:
sg['name'] = security_group['name']
if description:
sg['description'] = security_group['description']
return {'security_group': sg}
def return_server(context, server_id):
return {'id': server_id, 'state': 0x01, 'host': "localhost"}
def return_non_running_server(context, server_id):
return {'id': server_id, 'state': 0x02,
'host': "localhost"}
def return_security_group(context, project_id, group_name):
return {'id': 1, 'name': group_name, "instances": [
{'id': 1}]}
def return_security_group_without_instances(context, project_id, group_name):
return {'id': 1, 'name': group_name}
def return_server_nonexistant(context, server_id):
raise exception.InstanceNotFound(instance_id=server_id)
class TestSecurityGroups(test.TestCase):
def setUp(self):
super(TestSecurityGroups, self).setUp()
def tearDown(self):
super(TestSecurityGroups, self).tearDown()
def _create_security_group_request_dict(self, security_group):
sg = {}
if security_group is not None:
name = security_group.get('name', None)
description = security_group.get('description', None)
if name:
sg['name'] = security_group['name']
if description:
sg['description'] = security_group['description']
return {'security_group': sg}
def _format_create_xml_request_body(self, body_dict):
sg = body_dict['security_group']
body_parts = []
body_parts.extend([
'<?xml version="1.0" encoding="UTF-8"?>',
'<security_group xmlns="http://docs.openstack.org/ext/'
'securitygroups/api/v1.1"',
' name="%s">' % (sg['name'])])
if 'description' in sg:
body_parts.append('<description>%s</description>'
% sg['description'])
body_parts.append('</security_group>')
return ''.join(body_parts)
def _get_create_request_xml(self, body_dict):
req = webob.Request.blank('/v1.1/123/os-security-groups')
req.headers['Content-Type'] = 'application/xml'
req.content_type = 'application/xml'
req.accept = 'application/xml'
req.method = 'POST'
req.body = self._format_create_xml_request_body(body_dict)
return req
def _create_security_group_xml(self, security_group):
body_dict = self._create_security_group_request_dict(security_group)
request = self._get_create_request_xml(body_dict)
response = request.get_response(fakes.wsgi_app())
return response
def _delete_security_group(self, id):
request = webob.Request.blank('/v1.1/123/os-security-groups/%s'
% id)
request.method = 'DELETE'
response = request.get_response(fakes.wsgi_app())
return response
def test_create_security_group_json(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
res_dict = json.loads(response.body)
self.assertEqual(res_dict['security_group']['name'], "test")
self.assertEqual(res_dict['security_group']['description'],
"group-description")
self.assertEquals(response.status_int, 200)
def test_create_security_group_xml(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = "group-description"
response = \
self._create_security_group_xml(security_group)
self.assertEquals(response.status_int, 200)
dom = minidom.parseString(response.body)
sg = dom.childNodes[0]
self.assertEquals(sg.nodeName, 'security_group')
self.assertEqual(security_group['name'], sg.getAttribute('name'))
def test_create_security_group_with_no_name_json(self):
security_group = {}
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_no_description_json(self):
security_group = {}
security_group['name'] = "test"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_blank_name_json(self):
security_group = {}
security_group['name'] = ""
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_whitespace_name_json(self):
security_group = {}
security_group['name'] = " "
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_blank_description_json(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = ""
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_whitespace_description_json(self):
security_group = {}
security_group['name'] = "name"
security_group['description'] = " "
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_duplicate_name_json(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 200)
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_with_no_body_json(self):
request = _get_create_request_json(body_dict=None)
response = request.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 422)
def test_create_security_group_with_no_security_group(self):
body_dict = {}
body_dict['no-securityGroup'] = None
request = _get_create_request_json(body_dict)
response = request.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 422)
def test_create_security_group_above_255_characters_name_json(self):
security_group = {}
security_group['name'] = ("1234567890123456"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890")
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_above_255_characters_description_json(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = ("1234567890123456"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890"
"1234567890123456789012345678901234567890")
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_non_string_name_json(self):
security_group = {}
security_group['name'] = 12
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_create_security_group_non_string_description_json(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = 12
response = _create_security_group_json(security_group)
self.assertEquals(response.status_int, 400)
def test_get_security_group_list(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
req = webob.Request.blank('/v1.1/123/os-security-groups')
req.headers['Content-Type'] = 'application/json'
req.method = 'GET'
response = req.get_response(fakes.wsgi_app())
res_dict = json.loads(response.body)
expected = {'security_groups': [
{'id': 1,
'name':"default",
'tenant_id': "123",
"description":"default",
"rules": []
},
]
}
expected['security_groups'].append(
{
'id': 2,
'name': "test",
'tenant_id': "123",
"description": "group-description",
"rules": []
}
)
self.assertEquals(response.status_int, 200)
self.assertEquals(res_dict, expected)
def test_get_security_group_by_id(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
res_dict = json.loads(response.body)
req = webob.Request.blank('/v1.1/123/os-security-groups/%s' %
res_dict['security_group']['id'])
req.headers['Content-Type'] = 'application/json'
req.method = 'GET'
response = req.get_response(fakes.wsgi_app())
res_dict = json.loads(response.body)
expected = {
'security_group': {
'id': 2,
'name': "test",
'tenant_id': "123",
'description': "group-description",
'rules': []
}
}
self.assertEquals(res_dict, expected)
def test_get_security_group_by_invalid_id(self):
req = webob.Request.blank('/v1.1/123/os-security-groups/invalid')
req.headers['Content-Type'] = 'application/json'
req.method = 'GET'
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_get_security_group_by_non_existing_id(self):
req = webob.Request.blank('/v1.1/123/os-security-groups/111111111')
req.headers['Content-Type'] = 'application/json'
req.method = 'GET'
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 404)
def test_delete_security_group_by_id(self):
security_group = {}
security_group['name'] = "test"
security_group['description'] = "group-description"
response = _create_security_group_json(security_group)
security_group = json.loads(response.body)['security_group']
response = self._delete_security_group(security_group['id'])
self.assertEquals(response.status_int, 202)
response = self._delete_security_group(security_group['id'])
self.assertEquals(response.status_int, 404)
def test_delete_security_group_by_invalid_id(self):
response = self._delete_security_group('invalid')
self.assertEquals(response.status_int, 400)
def test_delete_security_group_by_non_existing_id(self):
response = self._delete_security_group(11111111)
self.assertEquals(response.status_int, 404)
def test_associate_by_non_existing_security_group_name(self):
body = dict(addSecurityGroup=dict(name='non-existing'))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 404)
def test_associate_by_invalid_server_id(self):
body = dict(addSecurityGroup=dict(name='test'))
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
req = webob.Request.blank('/v1.1/123/servers/invalid/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_associate_without_body(self):
req = webob.Request.blank('/v1.1/123/servers/1/action')
body = dict(addSecurityGroup=None)
self.stubs.Set(nova.db.api, 'instance_get', return_server)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_associate_no_security_group_name(self):
req = webob.Request.blank('/v1.1/123/servers/1/action')
body = dict(addSecurityGroup=dict())
self.stubs.Set(nova.db.api, 'instance_get', return_server)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_associate_security_group_name_with_whitespaces(self):
req = webob.Request.blank('/v1.1/123/servers/1/action')
body = dict(addSecurityGroup=dict(name=" "))
self.stubs.Set(nova.db.api, 'instance_get', return_server)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_associate_non_existing_instance(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server_nonexistant)
body = dict(addSecurityGroup=dict(name="test"))
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
req = webob.Request.blank('/v1.1/123/servers/10000/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 404)
def test_associate_non_running_instance(self):
self.stubs.Set(nova.db.api, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(addSecurityGroup=dict(name="test"))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_associate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server)
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
body = dict(addSecurityGroup=dict(name="test"))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_associate(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server)
self.mox.StubOutWithMock(nova.db.api, 'instance_add_security_group')
nova.db.api.instance_add_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group_without_instances)
self.mox.ReplayAll()
body = dict(addSecurityGroup=dict(name="test"))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 202)
def test_associate_xml(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server)
self.mox.StubOutWithMock(nova.db.api, 'instance_add_security_group')
nova.db.api.instance_add_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group_without_instances)
self.mox.ReplayAll()
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/xml'
req.method = 'POST'
req.body = """<addSecurityGroup>
<name>test</name>
</addSecurityGroup>"""
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 202)
def test_disassociate_by_non_existing_security_group_name(self):
body = dict(removeSecurityGroup=dict(name='non-existing'))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 404)
def test_disassociate_by_invalid_server_id(self):
body = dict(removeSecurityGroup=dict(name='test'))
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
req = webob.Request.blank('/v1.1/123/servers/invalid/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_disassociate_without_body(self):
req = webob.Request.blank('/v1.1/123/servers/1/action')
body = dict(removeSecurityGroup=None)
self.stubs.Set(nova.db.api, 'instance_get', return_server)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_disassociate_no_security_group_name(self):
req = webob.Request.blank('/v1.1/123/servers/1/action')
body = dict(removeSecurityGroup=dict())
self.stubs.Set(nova.db.api, 'instance_get', return_server)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_disassociate_security_group_name_with_whitespaces(self):
req = webob.Request.blank('/v1.1/123/servers/1/action')
body = dict(removeSecurityGroup=dict(name=" "))
self.stubs.Set(nova.db.api, 'instance_get', return_server)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_disassociate_non_existing_instance(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server_nonexistant)
body = dict(removeSecurityGroup=dict(name="test"))
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
req = webob.Request.blank('/v1.1/123/servers/10000/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 404)
def test_disassociate_non_running_instance(self):
self.stubs.Set(nova.db.api, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
body = dict(removeSecurityGroup=dict(name="test"))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_disassociate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server)
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(removeSecurityGroup=dict(name="test"))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 400)
def test_disassociate(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server)
self.mox.StubOutWithMock(nova.db.api, 'instance_remove_security_group')
nova.db.api.instance_remove_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
self.mox.ReplayAll()
body = dict(removeSecurityGroup=dict(name="test"))
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = json.dumps(body)
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 202)
def test_disassociate_xml(self):
self.stubs.Set(nova.db.api, 'instance_get', return_server)
self.mox.StubOutWithMock(nova.db.api, 'instance_remove_security_group')
nova.db.api.instance_remove_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db.api, 'security_group_get_by_name',
return_security_group)
self.mox.ReplayAll()
req = webob.Request.blank('/v1.1/123/servers/1/action')
req.headers['Content-Type'] = 'application/xml'
req.method = 'POST'
req.body = """<removeSecurityGroup>
<name>test</name>
</removeSecurityGroup>"""
response = req.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 202)
class TestSecurityGroupRules(test.TestCase):
def setUp(self):
super(TestSecurityGroupRules, self).setUp()
security_group = {}
security_group['name'] = "authorize-revoke"
security_group['description'] = ("Security group created for "
" authorize-revoke testing")
response = _create_security_group_json(security_group)
security_group = json.loads(response.body)
self.parent_security_group = security_group['security_group']
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"parent_group_id": self.parent_security_group['id'],
"cidr": "10.0.0.0/24"
}
}
res = self._create_security_group_rule_json(rules)
self.assertEquals(res.status_int, 200)
self.security_group_rule = json.loads(res.body)['security_group_rule']
def tearDown(self):
super(TestSecurityGroupRules, self).tearDown()
def _create_security_group_rule_json(self, rules):
request = webob.Request.blank('/v1.1/123/os-security-group-rules')
request.headers['Content-Type'] = 'application/json'
request.method = 'POST'
request.body = json.dumps(rules)
response = request.get_response(fakes.wsgi_app())
return response
def _delete_security_group_rule(self, id):
request = webob.Request.blank('/v1.1/123/os-security-group-rules/%s'
% id)
request.method = 'DELETE'
response = request.get_response(fakes.wsgi_app())
return response
def test_create_by_cidr_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"parent_group_id": 2,
"cidr": "10.2.3.124/24"
}
}
response = self._create_security_group_rule_json(rules)
security_group_rule = json.loads(response.body)['security_group_rule']
self.assertEquals(response.status_int, 200)
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'], 2)
self.assertEquals(security_group_rule['ip_range']['cidr'],
"10.2.3.124/24")
def test_create_by_group_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"group_id": "1",
"parent_group_id": "%s"
% self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 200)
security_group_rule = json.loads(response.body)['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'], 2)
def test_create_add_existing_rules_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"cidr": "10.0.0.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_no_body_json(self):
request = webob.Request.blank('/v1.1/123/os-security-group-rules')
request.headers['Content-Type'] = 'application/json'
request.method = 'POST'
request.body = json.dumps(None)
response = request.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 422)
def test_create_with_no_security_group_rule_in_body_json(self):
request = webob.Request.blank('/v1.1/123/os-security-group-rules')
request.headers['Content-Type'] = 'application/json'
request.method = 'POST'
body_dict = {'test': "test"}
request.body = json.dumps(body_dict)
response = request.get_response(fakes.wsgi_app())
self.assertEquals(response.status_int, 422)
def test_create_with_invalid_parent_group_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"parent_group_id": "invalid"
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_non_existing_parent_group_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"group_id": "invalid",
"parent_group_id": "1111111111111"
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 404)
def test_create_with_invalid_protocol_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "invalid-protocol",
"from_port": "22",
"to_port": "22",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_no_protocol_json(self):
rules = {
"security_group_rule": {
"from_port": "22",
"to_port": "22",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_invalid_from_port_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "666666",
"to_port": "22",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_invalid_to_port_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "666666",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_non_numerical_from_port_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "invalid",
"to_port": "22",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_non_numerical_to_port_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "invalid",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_no_to_port_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"cidr": "10.2.2.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_invalid_cidr_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"cidr": "10.2.22222.0/24",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_no_cidr_group_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
security_group_rule = json.loads(response.body)['security_group_rule']
self.assertEquals(response.status_int, 200)
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEquals(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_with_invalid_group_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"group_id": "invalid",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_empty_group_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"group_id": "invalid",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_with_invalid_group_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"group_id": "222222",
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_create_rule_with_same_group_parent_id_json(self):
rules = {
"security_group_rule": {
"ip_protocol": "tcp",
"from_port": "22",
"to_port": "22",
"group_id": "%s" % self.parent_security_group['id'],
"parent_group_id": "%s" % self.parent_security_group['id'],
}
}
response = self._create_security_group_rule_json(rules)
self.assertEquals(response.status_int, 400)
def test_delete(self):
response = self._delete_security_group_rule(
self.security_group_rule['id'])
self.assertEquals(response.status_int, 202)
response = self._delete_security_group_rule(
self.security_group_rule['id'])
self.assertEquals(response.status_int, 404)
def test_delete_invalid_rule_id(self):
response = self._delete_security_group_rule('invalid')
self.assertEquals(response.status_int, 400)
def test_delete_non_existing_rule_id(self):
response = self._delete_security_group_rule(22222222222222)
self.assertEquals(response.status_int, 404)
class TestSecurityGroupRulesXMLDeserializer(unittest.TestCase):
def setUp(self):
self.deserializer = security_groups.SecurityGroupRulesXMLDeserializer()
def test_create_request(self):
serial_request = """
<security_group_rule>
<parent_group_id>12</parent_group_id>
<from_port>22</from_port>
<to_port>22</to_port>
<group_id></group_id>
<ip_protocol>tcp</ip_protocol>
<cidr>10.0.0.0/24</cidr>
</security_group_rule>"""
request = self.deserializer.deserialize(serial_request, 'create')
expected = {
"security_group_rule": {
"parent_group_id": "12",
"from_port": "22",
"to_port": "22",
"ip_protocol": "tcp",
"group_id": "",
"cidr": "10.0.0.0/24",
},
}
self.assertEquals(request['body'], expected)
def test_create_no_protocol_request(self):
serial_request = """
<security_group_rule>
<parent_group_id>12</parent_group_id>
<from_port>22</from_port>
<to_port>22</to_port>
<group_id></group_id>
<cidr>10.0.0.0/24</cidr>
</security_group_rule>"""
request = self.deserializer.deserialize(serial_request, 'create')
expected = {
"security_group_rule": {
"parent_group_id": "12",
"from_port": "22",
"to_port": "22",
"group_id": "",
"cidr": "10.0.0.0/24",
},
}
self.assertEquals(request['body'], expected)
class TestSecurityGroupXMLDeserializer(unittest.TestCase):
def setUp(self):
self.deserializer = security_groups.SecurityGroupXMLDeserializer()
def test_create_request(self):
serial_request = """
<security_group name="test">
<description>test</description>
</security_group>"""
request = self.deserializer.deserialize(serial_request, 'create')
expected = {
"security_group": {
"name": "test",
"description": "test",
},
}
self.assertEquals(request['body'], expected)
def test_create_no_description_request(self):
serial_request = """
<security_group name="test">
</security_group>"""
request = self.deserializer.deserialize(serial_request, 'create')
expected = {
"security_group": {
"name": "test",
},
}
self.assertEquals(request['body'], expected)
def test_create_no_name_request(self):
serial_request = """
<security_group>
<description>test</description>
</security_group>"""
request = self.deserializer.deserialize(serial_request, 'create')
expected = {
"security_group": {
"description": "test",
},
}
self.assertEquals(request['body'], expected)
| 40.436047
| 79
| 0.605296
| 4,566
| 41,730
| 5.231932
| 0.052562
| 0.161623
| 0.049102
| 0.084139
| 0.895894
| 0.883168
| 0.864289
| 0.822052
| 0.794006
| 0.77048
| 0
| 0.040831
| 0.278696
| 41,730
| 1,031
| 80
| 40.475267
| 0.752824
| 0.014857
| 0
| 0.693273
| 0
| 0
| 0.177759
| 0.056722
| 0
| 0
| 0.000195
| 0
| 0.099202
| 1
| 0.103763
| false
| 0
| 0.011403
| 0.004561
| 0.13569
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
08304aa363b103f699723d58633d3b32be64d7a4
| 657
|
py
|
Python
|
math2.py
|
napend/MathProblem
|
521e4fa4c59d7e9935580518e9d0f065c3f6be4f
|
[
"Apache-2.0"
] | null | null | null |
math2.py
|
napend/MathProblem
|
521e4fa4c59d7e9935580518e9d0f065c3f6be4f
|
[
"Apache-2.0"
] | null | null | null |
math2.py
|
napend/MathProblem
|
521e4fa4c59d7e9935580518e9d0f065c3f6be4f
|
[
"Apache-2.0"
] | null | null | null |
#This app does your math
addition = input("Print your math sign, +, -, *, /: ")
if addition == "+":
a = int(input("First Number: "))
b = int(input("Seccond Number: "))
c = a + b
print(c)
elif addition == "-":
a = int(input("First Number: "))
b = int(input("Seccond Number: "))
c = a - b
print(c)
elif addition == "*":
a = int(input("First Number: "))
b = int(input("Seccond Number: "))
c = a * b
print(c)
elif addition == "/":
a = int(input("First Number: "))
b = int(input("Seccond Number: "))
c = a / b
print(c)
else:
print("That is not a valid operation. Please do +, -, *, /")
| 19.909091
| 64
| 0.517504
| 89
| 657
| 3.820225
| 0.292135
| 0.188235
| 0.141176
| 0.2
| 0.729412
| 0.729412
| 0.729412
| 0.729412
| 0.729412
| 0.729412
| 0
| 0
| 0.281583
| 657
| 32
| 65
| 20.53125
| 0.720339
| 0.035008
| 0
| 0.521739
| 0
| 0
| 0.330174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.217391
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0849927e378d4bf29b57c624cf8dfc5fcc42edd1
| 2,542
|
py
|
Python
|
tests/test_get_profiles.py
|
razzius/hms-weave
|
e21de7b673af78d84e474560cdf80376c1ee0c18
|
[
"MIT"
] | 4
|
2018-10-03T17:33:02.000Z
|
2020-01-30T20:12:09.000Z
|
tests/test_get_profiles.py
|
razzius/hms-weave
|
e21de7b673af78d84e474560cdf80376c1ee0c18
|
[
"MIT"
] | 201
|
2018-10-03T02:28:11.000Z
|
2020-05-19T01:56:14.000Z
|
tests/test_get_profiles.py
|
razzius/weave
|
e21de7b673af78d84e474560cdf80376c1ee0c18
|
[
"MIT"
] | 2
|
2018-11-17T02:54:41.000Z
|
2019-10-26T10:35:29.000Z
|
import http
from server.models import ProfileStar, VerificationEmail, VerificationToken, save
from .utils import create_test_profile, create_test_verification_token
def test_get_profiles_missing_token(client):
response = client.get("/api/profiles")
assert response.status_code == http.HTTPStatus.UNAUTHORIZED.value
def test_get_profiles_bogus_token(client):
response = client.get("/api/profiles", headers={"cookie": "session=fake"})
assert response.status_code == http.HTTPStatus.UNAUTHORIZED.value
def test_get_profiles_empty(client, auth):
verification_email = save(VerificationEmail(email="test@test.com"))
verification_token = save(
VerificationToken(token="1234", email_id=verification_email.id)
)
auth.login(verification_token.token)
response = client.get("/api/profiles")
assert response.status_code == http.HTTPStatus.OK.value
assert response.json == {"profile_count": 0, "profiles": []}
def test_get_profiles_search_empty(client, auth):
verification_email = save(VerificationEmail(email="test@test.com"))
verification_token = save(
VerificationToken(token="1234", email_id=verification_email.id)
)
auth.login(verification_token.token)
response = client.get("/api/profiles", query_string={"query": "abc"},)
assert response.status_code == http.HTTPStatus.OK.value
assert response.json == {"profile_count": 0, "profiles": []}
def test_get_starred_profile(client, auth):
verification_token = create_test_verification_token()
starred_profile = create_test_profile(available_for_mentoring=True)
save(
ProfileStar(
from_verification_email_id=verification_token.email.id,
to_verification_email_id=starred_profile.verification_email_id,
)
)
auth.login(verification_token.token)
response = client.get("/api/profiles")
assert response.json["profiles"][0]["starred"]
def test_get_profile_other_user_starred(client, auth):
verification_token = create_test_verification_token()
other_verification_token = create_test_verification_token()
starred_profile = create_test_profile(available_for_mentoring=True)
save(
ProfileStar(
from_verification_email_id=other_verification_token.email.id,
to_verification_email_id=starred_profile.verification_email_id,
)
)
auth.login(verification_token.token)
response = client.get("/api/profiles")
assert not response.json["profiles"][0]["starred"]
| 28.244444
| 81
| 0.737215
| 296
| 2,542
| 6.023649
| 0.192568
| 0.143017
| 0.08525
| 0.067302
| 0.839035
| 0.812114
| 0.812114
| 0.784072
| 0.748177
| 0.748177
| 0
| 0.005605
| 0.15775
| 2,542
| 89
| 82
| 28.561798
| 0.827184
| 0
| 0
| 0.576923
| 0
| 0
| 0.082612
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.115385
| false
| 0
| 0.057692
| 0
| 0.173077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f22ea3f8bdfa37bf34892a84bb05d18a66c30a12
| 235
|
py
|
Python
|
djangocms_equation/settings.py
|
s-weigand/djangocms-equation
|
6ccc02e93710fd7fef71ab03a8f2d3a9f6e70356
|
[
"Apache-2.0"
] | 7
|
2019-04-14T01:25:19.000Z
|
2020-08-26T08:30:02.000Z
|
djangocms_equation/settings.py
|
s-weigand/djangocms-equation
|
6ccc02e93710fd7fef71ab03a8f2d3a9f6e70356
|
[
"Apache-2.0"
] | 285
|
2019-04-06T23:21:02.000Z
|
2022-03-31T04:04:09.000Z
|
djangocms_equation/settings.py
|
s-weigand/djangocms-equation
|
6ccc02e93710fd7fef71ab03a8f2d3a9f6e70356
|
[
"Apache-2.0"
] | null | null | null |
"""Plugin specific settings with their default value."""
from django.conf import settings
KATEX_EQUATION_SETTINGS = getattr(settings, "KATEX_EQUATION_SETTINGS", {"allow_copy": False})
"""Default seetings of KATEX_EQUATION_SETTINGS"""
| 39.166667
| 93
| 0.795745
| 29
| 235
| 6.206897
| 0.655172
| 0.216667
| 0.35
| 0.322222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093617
| 235
| 5
| 94
| 47
| 0.84507
| 0.212766
| 0
| 0
| 0
| 0
| 0.253846
| 0.176923
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f23b6c98565135cbe58c281e24573896f1952b59
| 52
|
py
|
Python
|
sample_py/test_failure.py
|
paolorechia/fast-pytest-github-action
|
3d3b1cccb10a42cc45ccd9a4865e194178c3fa4d
|
[
"MIT"
] | 1
|
2021-08-29T13:38:04.000Z
|
2021-08-29T13:38:04.000Z
|
sample_py/test_failure.py
|
paolorechia/fast-pytest-github-action
|
3d3b1cccb10a42cc45ccd9a4865e194178c3fa4d
|
[
"MIT"
] | null | null | null |
sample_py/test_failure.py
|
paolorechia/fast-pytest-github-action
|
3d3b1cccb10a42cc45ccd9a4865e194178c3fa4d
|
[
"MIT"
] | null | null | null |
import pytest
def test_failure():
assert Failure
| 10.4
| 19
| 0.769231
| 7
| 52
| 5.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 52
| 4
| 20
| 13
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f23c6ae3801de0486a41a602e80ab4a89a3f96a7
| 280
|
py
|
Python
|
tests/test_algorithms/test_shortest_paths.py
|
jsbeckwith/unweaver
|
a4ba9e4e288c75e93bf7f9d67bc11680f09c3da0
|
[
"Apache-2.0"
] | 4
|
2019-04-24T16:38:57.000Z
|
2021-12-28T20:38:08.000Z
|
tests/test_algorithms/test_shortest_paths.py
|
jsbeckwith/unweaver
|
a4ba9e4e288c75e93bf7f9d67bc11680f09c3da0
|
[
"Apache-2.0"
] | 3
|
2021-06-02T04:06:33.000Z
|
2021-11-02T01:47:20.000Z
|
tests/test_algorithms/test_shortest_paths.py
|
jsbeckwith/unweaver
|
a4ba9e4e288c75e93bf7f9d67bc11680f09c3da0
|
[
"Apache-2.0"
] | 1
|
2020-08-13T04:42:05.000Z
|
2020-08-13T04:42:05.000Z
|
from unweaver.algorithms.shortest_paths import shortest_paths
from ..constants import cost_fun, EXAMPLE_NODE
def test_shortest_paths(built_G):
# TODO: test output # TODO: test augmented graph context version as well
shortest_paths(built_G, EXAMPLE_NODE, cost_fun, 400)
| 31.111111
| 76
| 0.8
| 41
| 280
| 5.195122
| 0.585366
| 0.244131
| 0.169014
| 0.178404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012448
| 0.139286
| 280
| 8
| 77
| 35
| 0.871369
| 0.246429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
4b7cbc4412e20e76e2884085b328d5aed291432f
| 153
|
py
|
Python
|
src/python/dart/util/rand.py
|
RetailMeNotSandbox/dart
|
58a05f56c04fadd6741501262d92aeb143cd2f2e
|
[
"MIT"
] | 18
|
2016-03-03T19:10:21.000Z
|
2021-07-14T22:37:35.000Z
|
src/python/dart/util/rand.py
|
RetailMeNotSandbox/dart
|
58a05f56c04fadd6741501262d92aeb143cd2f2e
|
[
"MIT"
] | 62
|
2016-04-11T15:17:23.000Z
|
2017-09-08T17:18:53.000Z
|
src/python/dart/util/rand.py
|
RetailMeNotSandbox/dart
|
58a05f56c04fadd6741501262d92aeb143cd2f2e
|
[
"MIT"
] | 15
|
2016-03-03T15:38:34.000Z
|
2019-03-27T19:33:08.000Z
|
import random
import string
def random_id(length=10):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for i in range(length))
| 21.857143
| 96
| 0.75817
| 23
| 153
| 4.956522
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015038
| 0.130719
| 153
| 6
| 97
| 25.5
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
4b8e213d295032e9f068c4980c98347944f449fc
| 45
|
py
|
Python
|
src/processor/utils/__init__.py
|
svetlyak40wt/python-processor
|
9126a021d603030899897803ab9973250e5b16f6
|
[
"BSD-2-Clause"
] | 40
|
2015-03-18T09:27:13.000Z
|
2021-12-31T06:25:48.000Z
|
src/processor/utils/__init__.py
|
svetlyak40wt/python-processor
|
9126a021d603030899897803ab9973250e5b16f6
|
[
"BSD-2-Clause"
] | 2
|
2015-03-19T18:31:22.000Z
|
2016-08-19T13:49:31.000Z
|
src/processor/utils/__init__.py
|
svetlyak40wt/python-processor
|
9126a021d603030899897803ab9973250e5b16f6
|
[
"BSD-2-Clause"
] | 7
|
2015-03-19T17:59:24.000Z
|
2019-09-05T15:16:19.000Z
|
# flake8: noqa
from .datastructures import *
| 15
| 29
| 0.755556
| 5
| 45
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.155556
| 45
| 2
| 30
| 22.5
| 0.868421
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4bacadc1874347a7ad6882406669610afa2105d2
| 210
|
py
|
Python
|
api_site/src/api_x/application/account_user.py
|
webee/pay
|
b48c6892686bf3f9014bb67ed119506e41050d45
|
[
"W3C"
] | 1
|
2019-10-14T11:51:49.000Z
|
2019-10-14T11:51:49.000Z
|
api_site/src/api_x/application/account_user.py
|
webee/pay
|
b48c6892686bf3f9014bb67ed119506e41050d45
|
[
"W3C"
] | null | null | null |
api_site/src/api_x/application/account_user.py
|
webee/pay
|
b48c6892686bf3f9014bb67ed119506e41050d45
|
[
"W3C"
] | null | null | null |
# -*- coding: utf-8 -*-
from api_x.zyt.vas import user
def get_user_cash_balance(account_user_id):
# FIXME:
# 在application成为真正独立子系统时,这里应该使用api交互
return user.get_user_cash_balance(account_user_id)
| 23.333333
| 54
| 0.752381
| 30
| 210
| 4.9
| 0.666667
| 0.095238
| 0.14966
| 0.244898
| 0.421769
| 0.421769
| 0.421769
| 0
| 0
| 0
| 0
| 0.005587
| 0.147619
| 210
| 8
| 55
| 26.25
| 0.815642
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
298f6a906b2aaaa7e1d4c81137e355a1184e058c
| 5,645
|
py
|
Python
|
tests/task_router/test_task_queues.py
|
quippp/twilio-python
|
22b84cdfd19a6b1bde84350053870a7c507af410
|
[
"MIT"
] | 11
|
2016-01-23T04:38:23.000Z
|
2017-11-19T04:03:25.000Z
|
venv/lib/python2.7/site-packages/tests/task_router/test_task_queues.py
|
jideobs/twilioAngular
|
eb95308d287d7dbb72fe516a633199a0af8b76b9
|
[
"MIT"
] | 1
|
2016-05-26T21:39:12.000Z
|
2016-05-26T21:39:14.000Z
|
venv/lib/python2.7/site-packages/tests/task_router/test_task_queues.py
|
jideobs/twilioAngular
|
eb95308d287d7dbb72fe516a633199a0af8b76b9
|
[
"MIT"
] | 2
|
2019-05-19T06:02:26.000Z
|
2020-12-23T11:27:20.000Z
|
import unittest
from mock import patch, Mock
from tests.tools import create_mock_json
from twilio.rest.resources.task_router.task_queues import TaskQueues, TaskQueue
AUTH = ("AC123", "token")
BASE_URI = "https://taskrouter.twilio.com/v1/Accounts/AC123/Workspaces/WSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
TASK_QUEUE_SID = "WQaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
TIMEOUT = 30
class TaskQueueTest(unittest.TestCase):
@patch('twilio.rest.resources.base.make_twilio_request')
def test_create(self, request):
resp = create_mock_json('tests/resources/task_router/task_queues_instance.json')
resp.status_code = 201
request.return_value = resp
task_queues = TaskQueues(BASE_URI, AUTH, TIMEOUT)
task_queues.create("Test TaskQueue", "WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
exp_params = {
'FriendlyName': "Test TaskQueue",
'AssignmentActivitySid': "WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
'ReservationActivitySid': 'WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
}
request.assert_called_with("POST",
"{0}/TaskQueues".format(BASE_URI),
data=exp_params, auth=AUTH, timeout=TIMEOUT,
use_json_extension=False)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_delete_instance(self, request):
resp = Mock()
resp.content = ""
resp.status_code = 204
request.return_value = resp
uri = "{0}/TaskQueues/{1}".format(BASE_URI, TASK_QUEUE_SID)
list_resource = TaskQueues(BASE_URI, AUTH, TIMEOUT)
task_queue = TaskQueue(list_resource, TASK_QUEUE_SID)
task_queue.delete()
request.assert_called_with("DELETE", uri, auth=AUTH, timeout=TIMEOUT,
use_json_extension=False)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_delete_list(self, request):
resp = Mock()
resp.content = ""
resp.status_code = 204
request.return_value = resp
uri = "{0}/TaskQueues/{1}".format(BASE_URI, TASK_QUEUE_SID)
list_resource = TaskQueues(BASE_URI, AUTH, TIMEOUT)
list_resource.delete(TASK_QUEUE_SID)
request.assert_called_with("DELETE", uri, auth=AUTH, timeout=TIMEOUT,
use_json_extension=False)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_get(self, request):
resp = create_mock_json('tests/resources/task_router/task_queues_instance.json')
resp.status_code = 200
request.return_value = resp
uri = "{0}/TaskQueues/{1}".format(BASE_URI, TASK_QUEUE_SID)
list_resource = TaskQueues(BASE_URI, AUTH, TIMEOUT)
list_resource.get(TASK_QUEUE_SID)
request.assert_called_with("GET", uri, auth=AUTH, timeout=TIMEOUT,
use_json_extension=False)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_list(self, request):
resp = create_mock_json('tests/resources/task_router/task_queues_list.json')
resp.status_code = 200
request.return_value = resp
uri = "{0}/TaskQueues".format(BASE_URI)
list_resource = TaskQueues(BASE_URI, AUTH, TIMEOUT)
list_resource.list()
request.assert_called_with("GET", uri, params={}, auth=AUTH, timeout=TIMEOUT,
use_json_extension=False)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_update_instance(self, request):
resp = create_mock_json('tests/resources/task_router/task_queues_instance.json')
resp.status_code = 201
request.return_value = resp
uri = "{0}/TaskQueues/{1}".format(BASE_URI, TASK_QUEUE_SID)
list_resource = TaskQueues(BASE_URI, AUTH, TIMEOUT)
task_queue = TaskQueue(list_resource, TASK_QUEUE_SID)
task_queue.update(friendly_name='Test TaskQueue', assignment_activity_sid='WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
reservation_activity_sid='WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
exp_params = {
'FriendlyName': "Test TaskQueue",
'AssignmentActivitySid': "WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
'ReservationActivitySid': 'WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
}
request.assert_called_with("POST", uri, data=exp_params,
auth=AUTH, timeout=TIMEOUT,
use_json_extension=False)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_update_list(self, request):
resp = create_mock_json('tests/resources/task_router/task_queues_instance.json')
resp.status_code = 201
request.return_value = resp
uri = "{0}/TaskQueues/{1}".format(BASE_URI, TASK_QUEUE_SID)
list_resource = TaskQueues(BASE_URI, AUTH, TIMEOUT)
list_resource.update(TASK_QUEUE_SID, friendly_name='Test TaskQueue',
assignment_activity_sid='WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
reservation_activity_sid='WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
exp_params = {
'FriendlyName': "Test TaskQueue",
'AssignmentActivitySid': "WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
'ReservationActivitySid': 'WAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
}
request.assert_called_with("POST", uri, data=exp_params, auth=AUTH,
timeout=TIMEOUT, use_json_extension=False)
| 44.801587
| 120
| 0.661293
| 586
| 5,645
| 6.085324
| 0.134812
| 0.029445
| 0.037016
| 0.047112
| 0.874369
| 0.856702
| 0.838755
| 0.825575
| 0.825575
| 0.810993
| 0
| 0.009818
| 0.242161
| 5,645
| 125
| 121
| 45.16
| 0.823749
| 0
| 0
| 0.621359
| 0
| 0
| 0.270151
| 0.204429
| 0
| 0
| 0
| 0
| 0.067961
| 1
| 0.067961
| false
| 0
| 0.038835
| 0
| 0.116505
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29df0997bc6eb066886859df5b0cd33c842d1024
| 13
|
py
|
Python
|
bugtests/test223s.py
|
doom38/jython_v2.2.1
|
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
|
[
"CNRI-Jython"
] | null | null | null |
bugtests/test223s.py
|
doom38/jython_v2.2.1
|
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
|
[
"CNRI-Jython"
] | null | null | null |
bugtests/test223s.py
|
doom38/jython_v2.2.1
|
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
|
[
"CNRI-Jython"
] | null | null | null |
a += b += c
| 4.333333
| 11
| 0.230769
| 3
| 13
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 13
| 2
| 12
| 6.5
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29e8c166776b27a483a2261ec26ac93e47e0cd21
| 6,841
|
py
|
Python
|
osc_bge/student/migrations/0008_studentmonthlyreport.py
|
jisuhan3201/osc-bge
|
125c441d23d7f1fdb2d9b8f42f859082e757e25a
|
[
"MIT"
] | null | null | null |
osc_bge/student/migrations/0008_studentmonthlyreport.py
|
jisuhan3201/osc-bge
|
125c441d23d7f1fdb2d9b8f42f859082e757e25a
|
[
"MIT"
] | 5
|
2020-06-05T19:49:47.000Z
|
2021-09-08T00:50:55.000Z
|
osc_bge/student/migrations/0008_studentmonthlyreport.py
|
jisuhan3201/osc-bge
|
125c441d23d7f1fdb2d9b8f42f859082e757e25a
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.9 on 2018-12-10 16:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('student', '0007_auto_20181209_1204'),
]
operations = [
migrations.CreateModel(
name='StudentMonthlyReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(blank=True, max_length=80, null=True)),
('counseling_date', models.CharField(blank=True, max_length=80, null=True)),
('manager_confirm_date', models.CharField(blank=True, max_length=80, null=True)),
('school_year', models.CharField(blank=True, max_length=80, null=True)),
('grade', models.CharField(blank=True, max_length=80, null=True)),
('send_to_agent_date', models.CharField(blank=True, max_length=80, null=True)),
('college_plan', models.TextField(blank=True, null=True)),
('eng9h_lv', models.CharField(blank=True, max_length=80, null=True)),
('eng9h_tg', models.CharField(blank=True, max_length=80, null=True)),
('eng9h_current', models.CharField(blank=True, max_length=80, null=True)),
('precal_lv', models.CharField(blank=True, max_length=80, null=True)),
('precal_tg', models.CharField(blank=True, max_length=80, null=True)),
('precal_current', models.CharField(blank=True, max_length=80, null=True)),
('bioh_lv', models.CharField(blank=True, max_length=80, null=True)),
('bioh_tg', models.CharField(blank=True, max_length=80, null=True)),
('bioh_current', models.CharField(blank=True, max_length=80, null=True)),
('geo_lv', models.CharField(blank=True, max_length=80, null=True)),
('geo_tg', models.CharField(blank=True, max_length=80, null=True)),
('geo_current', models.CharField(blank=True, max_length=80, null=True)),
('cs_lv', models.CharField(blank=True, max_length=80, null=True)),
('cs_tg', models.CharField(blank=True, max_length=80, null=True)),
('cs_current', models.CharField(blank=True, max_length=80, null=True)),
('sp_lv', models.CharField(blank=True, max_length=80, null=True)),
('sp_tg', models.CharField(blank=True, max_length=80, null=True)),
('sp_current', models.CharField(blank=True, max_length=80, null=True)),
('orch_lv', models.CharField(blank=True, max_length=80, null=True)),
('orch_tg', models.CharField(blank=True, max_length=80, null=True)),
('orch_current', models.CharField(blank=True, max_length=80, null=True)),
('comment', models.TextField(blank=True, null=True)),
('target_gpa', models.TextField(blank=True, null=True)),
('transcript', models.TextField(blank=True, null=True)),
('eng_skill', models.TextField(blank=True, null=True)),
('toefl', models.CharField(blank=True, max_length=80, null=True)),
('toefl_reading', models.CharField(blank=True, max_length=80, null=True)),
('toefl_listening', models.CharField(blank=True, max_length=80, null=True)),
('toefl_speaking', models.CharField(blank=True, max_length=80, null=True)),
('toefl_writing', models.CharField(blank=True, max_length=80, null=True)),
('toefl_total', models.CharField(blank=True, max_length=80, null=True)),
('toefl_target', models.CharField(blank=True, max_length=80, null=True)),
('toefl_next_test_date', models.CharField(blank=True, max_length=80, null=True)),
('sat', models.CharField(blank=True, max_length=80, null=True)),
('sat_evb_reading_writing', models.CharField(blank=True, max_length=80, null=True)),
('sat_math', models.CharField(blank=True, max_length=80, null=True)),
('sat_total', models.CharField(blank=True, max_length=80, null=True)),
('sat_target', models.CharField(blank=True, max_length=80, null=True)),
('sat_next_test_date', models.CharField(blank=True, max_length=80, null=True)),
('act', models.CharField(blank=True, max_length=80, null=True)),
('act_eng', models.CharField(blank=True, max_length=80, null=True)),
('act_math', models.CharField(blank=True, max_length=80, null=True)),
('act_reading', models.CharField(blank=True, max_length=80, null=True)),
('act_sci', models.CharField(blank=True, max_length=80, null=True)),
('act_composition_score', models.CharField(blank=True, max_length=80, null=True)),
('act_target', models.CharField(blank=True, max_length=80, null=True)),
('act_next_test_date', models.CharField(blank=True, max_length=80, null=True)),
('ap_tests', models.TextField(blank=True, null=True)),
('sat_subjects_tests', models.TextField(blank=True, null=True)),
('test_prep', models.TextField(blank=True, null=True)),
('activities', models.TextField(blank=True, null=True)),
('community_services', models.TextField(blank=True, null=True)),
('agenda', models.TextField(blank=True, null=True)),
('comment2', models.TextField(blank=True, null=True)),
('objective_assignment', models.TextField(blank=True, null=True)),
('payment_desc', models.CharField(blank=True, max_length=255, null=True)),
('payment_expense', models.CharField(blank=True, max_length=80, null=True)),
('payment_due_date', models.CharField(blank=True, max_length=80, null=True)),
('payment_payment', models.CharField(blank=True, max_length=80, null=True)),
('payment_paid_date', models.CharField(blank=True, max_length=80, null=True)),
('payment_balance', models.CharField(blank=True, max_length=80, null=True)),
('payment_invoice', models.CharField(blank=True, max_length=80, null=True)),
('student', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='monthly_report', to='student.Student')),
],
options={
'abstract': False,
},
),
]
| 71.260417
| 157
| 0.611168
| 820
| 6,841
| 4.921951
| 0.14878
| 0.142716
| 0.277502
| 0.333003
| 0.808226
| 0.794351
| 0.698959
| 0.680624
| 0.680624
| 0.638008
| 0
| 0.02826
| 0.234469
| 6,841
| 95
| 158
| 72.010526
| 0.74241
| 0.006578
| 0
| 0
| 1
| 0
| 0.128937
| 0.009862
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022472
| 0
| 0.05618
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4b02c49e711128f0fb15d1b3edccb742c1e10645
| 25
|
py
|
Python
|
SysID Addon/seeq_sysid/__init__.py
|
BYU-PRISM/Seeq
|
456629b70fb7981c10e608946e26f2dd892a1808
|
[
"MIT"
] | 12
|
2021-10-15T08:08:35.000Z
|
2021-12-17T01:25:00.000Z
|
SysID Addon/seeq_sysid/__init__.py
|
BYU-PRISM/Seeq
|
456629b70fb7981c10e608946e26f2dd892a1808
|
[
"MIT"
] | 4
|
2021-10-19T17:11:42.000Z
|
2022-02-23T08:05:25.000Z
|
SysID Addon/seeq_sysid/__init__.py
|
BYU-PRISM/Seeq
|
456629b70fb7981c10e608946e26f2dd892a1808
|
[
"MIT"
] | 2
|
2022-01-18T09:31:30.000Z
|
2022-01-18T18:25:10.000Z
|
from . import gui, model
| 12.5
| 24
| 0.72
| 4
| 25
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 25
| 1
| 25
| 25
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b11e55dea2205567ec514e662148485498675c3
| 100
|
py
|
Python
|
src/pyshowdown/utils.py
|
ScottehMax/pyshowdown
|
34e5809b55ce4032a64c3e07270d13d1a32f83ed
|
[
"MIT"
] | null | null | null |
src/pyshowdown/utils.py
|
ScottehMax/pyshowdown
|
34e5809b55ce4032a64c3e07270d13d1a32f83ed
|
[
"MIT"
] | null | null | null |
src/pyshowdown/utils.py
|
ScottehMax/pyshowdown
|
34e5809b55ce4032a64c3e07270d13d1a32f83ed
|
[
"MIT"
] | null | null | null |
import re
def to_id(string: str) -> str:
return (re.sub(r"[^A-Za-z0-9]", "", string)).lower()
| 16.666667
| 56
| 0.57
| 18
| 100
| 3.111111
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024096
| 0.17
| 100
| 5
| 57
| 20
| 0.650602
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
d9eeaece8d61ce0598d0ea576fa721136f2f3c9e
| 8,443
|
py
|
Python
|
tests/model/test_base.py
|
rszemplinski/datamodel-code-generator
|
86665b19c4e90ef62b28f8b341923a8cab5d597e
|
[
"MIT"
] | null | null | null |
tests/model/test_base.py
|
rszemplinski/datamodel-code-generator
|
86665b19c4e90ef62b28f8b341923a8cab5d597e
|
[
"MIT"
] | null | null | null |
tests/model/test_base.py
|
rszemplinski/datamodel-code-generator
|
86665b19c4e90ef62b28f8b341923a8cab5d597e
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Any
import pytest
from datamodel_code_generator.model.base import (
DataModel,
DataModelFieldBase,
TemplateBase,
)
from datamodel_code_generator.reference import Reference
from datamodel_code_generator.types import DataType, Types
class A(TemplateBase):
def render(self) -> str:
return ''
class B(DataModel):
@classmethod
def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
pass
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
TEMPLATE_FILE_PATH = ''
class C(DataModel):
@classmethod
def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
pass
template: str = '''{%- for decorator in decorators -%}
{{ decorator }}
{%- endfor %}
@dataclass
class {{ class_name }}:
{%- for field in fields -%}
{%- if field.required %}
{{ field.name }}: {{ field.type_hint }}
{%- else %}
{{ field.name }}: {{ field.type_hint }} = {{field.default}}
{%- endif %}
{%- endfor -%}'''
def test_template_base():
with NamedTemporaryFile('w', delete=False) as dummy_template:
dummy_template.write('abc')
dummy_template.seek(0)
dummy_template.close()
a: TemplateBase = A(Path(dummy_template.name))
assert str(a.template_file_path) == dummy_template.name
assert a._render() == 'abc'
assert str(a) == ''
def test_data_model():
field = DataModelFieldBase(
name='a', data_type=DataType(type='str'), default="" 'abc' "", required=True
)
with NamedTemporaryFile('w', delete=False) as dummy_template:
dummy_template.write(template)
dummy_template.seek(0)
dummy_template.close()
B.TEMPLATE_FILE_PATH = dummy_template.name
data_model = B(
fields=[field],
decorators=['@validate'],
base_classes=[Reference(path='base', original_name='base', name='Base')],
reference=Reference(path='test_model', name='test_model'),
)
assert data_model.name == 'test_model'
assert data_model.fields == [field]
assert data_model.decorators == ['@validate']
assert data_model.base_class == 'Base'
assert (
data_model.render() == '@validate\n'
'@dataclass\n'
'class test_model:\n'
' a: str'
)
def test_data_model_exception():
field = DataModelFieldBase(
name='a', data_type=DataType(type='str'), default="" 'abc' "", required=True
)
with pytest.raises(Exception, match='TEMPLATE_FILE_PATH is undefined'):
C(
fields=[field],
reference=Reference(path='abc', original_name='abc', name='abc'),
)
def test_data_field():
# field = DataModelField(name='a', data_types=[], required=True)
# assert field.type_hint == ''
field = DataModelFieldBase(
name='a',
data_type=DataType(is_list=True),
required=True,
is_list=True,
is_union=True,
)
assert field.type_hint == 'List'
# field = DataModelField(
# name='a', data_types=[], required=True, is_list=False, is_union=True
# )
# assert field.type_hint == ''
# field = DataModelField(
# name='a', data_types=[], required=True, is_list=False, is_union=False
# )
# assert field.type_hint == ''
field = DataModelFieldBase(
name='a',
data_type=DataType(is_list=True),
required=True,
is_list=True,
is_union=False,
)
assert field.type_hint == 'List'
field = DataModelFieldBase(name='a', data_type=DataType(), required=False)
assert field.type_hint == 'Optional'
field = DataModelFieldBase(
name='a',
data_type=DataType(is_list=True),
required=False,
is_list=True,
is_union=True,
)
assert field.type_hint == 'Optional[List]'
field = DataModelFieldBase(
name='a', data_type=DataType(), required=False, is_list=False, is_union=True
)
assert field.type_hint == 'Optional'
field = DataModelFieldBase(
name='a', data_type=DataType(), required=False, is_list=False, is_union=False
)
assert field.type_hint == 'Optional'
field = DataModelFieldBase(
name='a',
data_type=DataType(is_list=True),
required=False,
is_list=True,
is_union=False,
)
assert field.type_hint == 'Optional[List]'
field = DataModelFieldBase(name='a', data_type=DataType(type='str'), required=True)
assert field.type_hint == 'str'
field = DataModelFieldBase(
name='a', data_type=DataType(type='str', is_list=True), required=True,
)
assert field.type_hint == 'List[str]'
field = DataModelFieldBase(name='a', data_type=DataType(type='str'), required=True)
assert field.type_hint == 'str'
field = DataModelFieldBase(name='a', data_type=DataType(type='str'), required=True,)
assert field.type_hint == 'str'
field = DataModelFieldBase(
name='a', data_type=DataType(type='str', is_list=True), required=True,
)
assert field.type_hint == 'List[str]'
field = DataModelFieldBase(name='a', data_type=DataType(type='str'), required=False)
assert field.type_hint == 'Optional[str]'
field = DataModelFieldBase(
name='a', data_type=DataType(type='str', is_list=True,), required=False,
)
assert field.type_hint == 'Optional[List[str]]'
field = DataModelFieldBase(
name='a', data_type=DataType(type='str'), required=False,
)
assert field.type_hint == 'Optional[str]'
field = DataModelFieldBase(
name='a', data_type=DataType(type='str'), required=False,
)
assert field.type_hint == 'Optional[str]'
field = DataModelFieldBase(
name='a', data_type=DataType(type='str', is_list=True,), required=False,
)
assert field.type_hint == 'Optional[List[str]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(data_types=[DataType(type='str'), DataType(type='int')]),
required=True,
)
assert field.type_hint == 'Union[str, int]'
field = DataModelFieldBase(
name='a',
data_type=DataType(
data_types=[DataType(type='str'), DataType(type='int')], is_list=True,
),
required=True,
)
assert field.type_hint == 'List[Union[str, int]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(data_types=[DataType(type='str'), DataType(type='int')]),
required=True,
)
assert field.type_hint == 'Union[str, int]'
field = DataModelFieldBase(
name='a',
data_type=DataType(data_types=[DataType(type='str'), DataType(type='int')]),
required=True,
)
assert field.type_hint == 'Union[str, int]'
field = DataModelFieldBase(
name='a',
data_type=DataType(
data_types=[DataType(type='str'), DataType(type='int')], is_list=True
),
required=True,
)
assert field.type_hint == 'List[Union[str, int]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(data_types=[DataType(type='str'), DataType(type='int')]),
required=False,
)
assert field.type_hint == 'Optional[Union[str, int]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(
data_types=[DataType(type='str'), DataType(type='int')], is_list=True,
),
required=False,
)
assert field.type_hint == 'Optional[List[Union[str, int]]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(data_types=[DataType(type='str'), DataType(type='int')]),
required=False,
)
assert field.type_hint == 'Optional[Union[str, int]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(data_types=[DataType(type='str'), DataType(type='int')]),
required=False,
)
assert field.type_hint == 'Optional[Union[str, int]]'
field = DataModelFieldBase(
name='a',
data_type=DataType(
data_types=[DataType(type='str'), DataType(type='int')], is_list=True
),
required=False,
)
assert field.type_hint == 'Optional[List[Union[str, int]]]'
field = DataModelFieldBase(
name='a', data_type=DataType(is_list=True), required=False
)
assert field.type_hint == 'Optional[List]'
| 32.225191
| 88
| 0.621817
| 973
| 8,443
| 5.224049
| 0.085303
| 0.05843
| 0.084399
| 0.115876
| 0.804643
| 0.789494
| 0.775723
| 0.749557
| 0.740704
| 0.740704
| 0
| 0.000308
| 0.23025
| 8,443
| 261
| 89
| 32.348659
| 0.781813
| 0.041218
| 0
| 0.594714
| 0
| 0
| 0.125062
| 0.005938
| 0
| 0
| 0
| 0
| 0.15859
| 1
| 0.035242
| false
| 0.008811
| 0.030837
| 0.004405
| 0.088106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a1a335209457155b6aaf93b83d95bcc622ec504
| 106
|
py
|
Python
|
alterego/__init__.py
|
Alphadelta14/aeexe
|
5bccfc367c69b96a009609104dd9621b61da9132
|
[
"MIT"
] | null | null | null |
alterego/__init__.py
|
Alphadelta14/aeexe
|
5bccfc367c69b96a009609104dd9621b61da9132
|
[
"MIT"
] | null | null | null |
alterego/__init__.py
|
Alphadelta14/aeexe
|
5bccfc367c69b96a009609104dd9621b61da9132
|
[
"MIT"
] | null | null | null |
from alterego.ai import markov
from alterego.database import state
from alterego.outgoing import twitter
| 21.2
| 37
| 0.849057
| 15
| 106
| 6
| 0.6
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122642
| 106
| 4
| 38
| 26.5
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8a1d9ca2baaba105a60186eb1b8c0f0a124c35e7
| 31
|
py
|
Python
|
setup.py
|
KanruXie/Virtual_Linac
|
7d321fcc744a6bec639bab495f5a2a71af4cbe4d
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
KanruXie/Virtual_Linac
|
7d321fcc744a6bec639bab495f5a2a71af4cbe4d
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
KanruXie/Virtual_Linac
|
7d321fcc744a6bec639bab495f5a2a71af4cbe4d
|
[
"Apache-2.0"
] | 1
|
2021-12-19T15:59:50.000Z
|
2021-12-19T15:59:50.000Z
|
from setuptools import setup
| 7.75
| 28
| 0.806452
| 4
| 31
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 31
| 3
| 29
| 10.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8a4c3f1f3cc12e0bf7596b54a6a7736d90dfade6
| 29,706
|
py
|
Python
|
symbols/faster/resnet_mx_101_e2e.py
|
Zhonghao2016/SNIPER
|
33f721a36f568b7a60b93562d87c30853e4aa06b
|
[
"Apache-2.0"
] | 1
|
2021-02-18T16:55:54.000Z
|
2021-02-18T16:55:54.000Z
|
symbols/faster/resnet_mx_101_e2e.py
|
Zhonghao2016/SNIPER
|
33f721a36f568b7a60b93562d87c30853e4aa06b
|
[
"Apache-2.0"
] | null | null | null |
symbols/faster/resnet_mx_101_e2e.py
|
Zhonghao2016/SNIPER
|
33f721a36f568b7a60b93562d87c30853e4aa06b
|
[
"Apache-2.0"
] | null | null | null |
import mxnet as mx
from symbols.symbol import Symbol
from operator_py.box_annotator_ohem import *
import numpy as np
import pdb
def checkpoint_callback(bbox_param_names, prefix, means, stds):
def _callback(iter_no, sym, arg, aux):
weight = arg[bbox_param_names[0]]
bias = arg[bbox_param_names[1]]
stds = np.array([0.1, 0.1, 0.2, 0.2])
arg[bbox_param_names[0]+'_test'] = (weight.T * mx.nd.array(stds)).T
arg[bbox_param_names[1]+'_test'] =bias * mx.nd.array(stds)
mx.model.save_checkpoint(prefix, iter_no + 1, sym, arg, aux)
arg.pop(bbox_param_names[0]+'_test')
arg.pop(bbox_param_names[1]+'_test')
return _callback
class resnet_mx_101_e2e(Symbol):
def __init__(self, n_proposals=400, momentum=0.95, fix_bn=False, test_nbatch=1):
"""
Use __init__ to define parameter network needs
"""
self.momentum = momentum
self.use_global_stats = True
self.workspace = 512
self.units = (3, 4, 23, 3) # use for 101
self.filter_list = [64, 256, 512, 1024, 2048]
self.fix_bn = fix_bn
self.test_nbatch= test_nbatch
def get_bbox_param_names(self):
return ['bbox_pred_weight', 'bbox_pred_bias']
def residual_unit(self, data, num_filter, stride, dim_match, name, bn_mom=0.9, workspace=512, memonger=False,
fix_bn=False):
if fix_bn or self.fix_bn:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn1')
else:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=int(num_filter * 0.25), kernel=(1, 1), stride=(1, 1),
pad=(0, 0),
no_bias=True, workspace=workspace, name=name + '_conv1')
if fix_bn or self.fix_bn:
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn2')
else:
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv2 = mx.sym.Convolution(data=act2, num_filter=int(num_filter * 0.25), kernel=(3, 3), stride=stride,
pad=(1, 1),
no_bias=True, workspace=workspace, name=name + '_conv2')
if fix_bn or self.fix_bn:
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn3')
else:
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn3')
act3 = mx.sym.Activation(data=bn3, act_type='relu', name=name + '_relu3')
conv3 = mx.sym.Convolution(data=act3, num_filter=num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0),
no_bias=True,
workspace=workspace, name=name + '_conv3')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1, 1), stride=stride, no_bias=True,
workspace=workspace, name=name + '_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv3 + shortcut
def residual_unit_dilate(self, data, num_filter, stride, dim_match, name, bn_mom=0.9, workspace=512,
memonger=False):
if self.fix_bn:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn1')
else:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=int(num_filter * 0.25), kernel=(1, 1), stride=(1, 1),
pad=(0, 0),
no_bias=True, workspace=workspace, name=name + '_conv1')
if self.fix_bn:
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn2')
else:
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv2 = mx.sym.Convolution(data=act2, num_filter=int(num_filter * 0.25), kernel=(3, 3), dilate=(2, 2),
stride=stride, pad=(2, 2),
no_bias=True, workspace=workspace, name=name + '_conv2')
if self.fix_bn:
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn3')
else:
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn3')
act3 = mx.sym.Activation(data=bn3, act_type='relu', name=name + '_relu3')
conv3 = mx.sym.Convolution(data=act3, num_filter=num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0),
no_bias=True,
workspace=workspace, name=name + '_conv3')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1, 1), stride=stride, no_bias=True,
workspace=workspace, name=name + '_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv3 + shortcut
def residual_unit_deform(self, data, num_filter, stride, dim_match, name, bn_mom=0.9, workspace=512,
memonger=False):
if self.fix_bn:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn1')
else:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=int(num_filter * 0.25), kernel=(1, 1), stride=(1, 1),
pad=(0, 0),
no_bias=True, workspace=workspace, name=name + '_conv1')
if self.fix_bn:
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn2')
else:
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
offset = mx.symbol.Convolution(name=name + '_offset', data=act2,
num_filter=72, pad=(2, 2), kernel=(3, 3), stride=(1, 1),
dilate=(2, 2), cudnn_off=True)
conv2 = mx.contrib.symbol.DeformableConvolution(name=name + '_conv2', data=act2,
offset=offset,
num_filter=512, pad=(2, 2), kernel=(3, 3),
num_deformable_group=4,
stride=(1, 1), dilate=(2, 2), no_bias=True)
if self.fix_bn:
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, use_global_stats=True, name=name + '_bn3')
else:
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=self.momentum, name=name + '_bn3')
act3 = mx.sym.Activation(data=bn3, act_type='relu', name=name + '_relu3')
conv3 = mx.sym.Convolution(data=act3, num_filter=num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0),
no_bias=True,
workspace=workspace, name=name + '_conv3')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1, 1), stride=stride, no_bias=True,
workspace=workspace, name=name + '_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv3 + shortcut
def get_rpn(self, conv_feat, num_anchors):
rpn_conv = mx.sym.Convolution(
data=conv_feat, kernel=(3, 3), pad=(1, 1), num_filter=512, name="rpn_conv_3x3")
rpn_relu = mx.sym.Activation(data=rpn_conv, act_type="relu", name="rpn_relu")
rpn_cls_score = mx.sym.Convolution(
data=rpn_relu, kernel=(1, 1), pad=(0, 0), num_filter=2 * num_anchors, name="rpn_cls_score")
rpn_bbox_pred = mx.sym.Convolution(
data=rpn_relu, kernel=(1, 1), pad=(0, 0), num_filter=4 * num_anchors, name="rpn_bbox_pred")
return rpn_cls_score, rpn_bbox_pred
def get_symbol_rpn(self, cfg, is_train=True):
num_anchors = cfg.network.NUM_ANCHORS
# input init
if is_train:
data = mx.sym.Variable(name="data")
rpn_label = mx.sym.Variable(name='label')
rpn_bbox_target = mx.sym.Variable(name='bbox_target')
rpn_bbox_weight = mx.sym.Variable(name='bbox_weight')
gt_boxes = mx.sym.Variable(name='gt_boxes')
valid_ranges = mx.sym.Variable(name='valid_ranges')
im_info = mx.sym.Variable(name='im_info')
else:
data = mx.sym.Variable(name="data")
im_info = mx.sym.Variable(name='im_info')
im_ids = mx.sym.Variable(name='im_ids')
# shared convolutional layers
conv_feat = self.resnetc4(data, fp16=cfg.TRAIN.fp16)
# res5
relut = self.resnetc5(conv_feat, deform=True)
relu1 = mx.symbol.Concat(*[conv_feat, relut], name='cat4')
if cfg.TRAIN.fp16:
relu1 = mx.sym.Cast(data=relu1, dtype=np.float32)
rpn_cls_score, rpn_bbox_pred = self.get_rpn(relu1, num_anchors)
rpn_cls_score_reshape = mx.sym.Reshape(data=rpn_cls_score, shape=(0, 2, -1, 0),
name="rpn_cls_score_reshape")
if is_train:
# prepare rpn data
if cfg.TRAIN.fp16 == True:
grad_scale = float(cfg.TRAIN.scale)
else:
grad_scale = 1.0
# classification
rpn_cls_prob = mx.sym.SoftmaxOutput(data=rpn_cls_score_reshape, label=rpn_label, multi_output=True,
normalization='valid', use_ignore=True, ignore_label=-1,
name="rpn_cls_prob", grad_scale=grad_scale)
if cfg.TRAIN.fp16 == True:
grad_scale = float(cfg.TRAIN.scale)
else:
grad_scale = 1.0
# bounding box regression
rpn_bbox_loss_ = rpn_bbox_weight * mx.sym.smooth_l1(name='rpn_bbox_loss_', scalar=1.0,
data=(rpn_bbox_pred - rpn_bbox_target))
rpn_bbox_loss = mx.sym.MakeLoss(name='rpn_bbox_loss', data=rpn_bbox_loss_,
grad_scale=3 * grad_scale / float(
cfg.TRAIN.BATCH_IMAGES * cfg.TRAIN.RPN_BATCH_SIZE))
group = mx.sym.Group([rpn_cls_prob, rpn_bbox_loss])
else:
# ROI Proposal
rpn_cls_prob = mx.sym.SoftmaxActivation(
data=rpn_cls_score_reshape, mode="channel", name="rpn_cls_prob")
rpn_cls_prob_reshape = mx.sym.Reshape(
data=rpn_cls_prob, shape=(0, 2 * num_anchors, -1, 0), name='rpn_cls_prob_reshape')
rois, rpn_scores = mx.sym.MultiProposal(cls_prob=rpn_cls_prob_reshape, bbox_pred=rpn_bbox_pred, im_info=im_info,
name='rois', batch_size=self.test_nbatch,
rpn_pre_nms_top_n=cfg.TEST.RPN_PRE_NMS_TOP_N,
rpn_post_nms_top_n=cfg.TEST.RPN_POST_NMS_TOP_N,
rpn_min_size=cfg.TEST.RPN_MIN_SIZE,
threshold=cfg.TEST.RPN_NMS_THRESH,
feature_stride=cfg.network.RPN_FEAT_STRIDE,
ratios=tuple(cfg.network.ANCHOR_RATIOS),
scales=tuple(cfg.network.ANCHOR_SCALES))
group = mx.sym.Group([rois, rpn_scores, im_ids])
self.sym = group
return group
def get_symbol_rcnn(self, cfg, is_train=True, num_classes=602):
num_anchors = cfg.network.NUM_ANCHORS
# input init
if is_train:
data = mx.sym.Variable(name="data")
rpn_label = mx.sym.Variable(name='label')
rpn_bbox_target = mx.sym.Variable(name='bbox_target')
rpn_bbox_weight = mx.sym.Variable(name='bbox_weight')
gt_boxes = mx.sym.Variable(name='gt_boxes')
valid_ranges = mx.sym.Variable(name='valid_ranges')
im_info = mx.sym.Variable(name='im_info')
else:
data = mx.sym.Variable(name="data")
im_info = mx.sym.Variable(name='im_info')
im_ids = mx.sym.Variable(name='im_ids')
# shared convolutional layers
conv_feat = self.resnetc4(data, fp16=not cfg.TRAIN.fp16)
# res5
relut = self.resnetc5(conv_feat, deform=True)
relu1 = mx.symbol.Concat(*[conv_feat, relut], name='cat4')
if cfg.TRAIN.fp16:
relu1 = mx.sym.Cast(data=relu1, dtype=np.float32)
rpn_cls_score, rpn_bbox_pred = self.get_rpn(conv_feat, num_anchors)
conv_new_1 = mx.sym.Convolution(data=relu1, kernel=(1, 1), num_filter=256, name="conv_new_1")
conv_new_1_relu = mx.sym.Activation(data=conv_new_1, act_type='relu', name='conv_new_1_relu')
rpn_cls_score_reshape = mx.sym.Reshape(data=rpn_cls_score, shape=(0, 2, -1, 0),
name="rpn_cls_score_reshape")
if is_train:
# prepare rpn data
if cfg.TRAIN.fp16 == True:
grad_scale = float(cfg.TRAIN.scale)
else:
grad_scale = 1.0
# classification
rpn_cls_prob = mx.sym.SoftmaxOutput(data=rpn_cls_score_reshape, label=rpn_label, multi_output=True,
normalization='valid', use_ignore=True, ignore_label=-1,
name="rpn_cls_prob", grad_scale=grad_scale)
rois, label, bbox_target, bbox_weight = mx.sym.MultiProposalTarget(cls_prob=rpn_cls_prob, bbox_pred=rpn_bbox_pred, im_info=im_info,
gt_boxes=gt_boxes, valid_ranges=valid_ranges, batch_size=cfg.TRAIN.BATCH_IMAGES, name='multi_proposal_target')
label = mx.symbol.Reshape(data=label, shape=(-1,), name='label_reshape')
offset_t = mx.contrib.sym.DeformablePSROIPooling(name='offset_t', data=conv_new_1_relu, rois=rois, group_size=1, pooled_size=7,
sample_per_part=4, no_trans=True, part_size=7, output_dim=256, spatial_scale=0.0625)
offset = mx.sym.FullyConnected(name='offset', data=offset_t, num_hidden=7 * 7 * 2, lr_mult=0.01)
offset_reshape = mx.sym.Reshape(data=offset, shape=(-1, 2, 7, 7), name="offset_reshape")
deformable_roi_pool = mx.contrib.sym.DeformablePSROIPooling(name='deformable_roi_pool', data=conv_new_1_relu, rois=rois,
trans=offset_reshape, group_size=1, pooled_size=7, sample_per_part=4,
no_trans=False, part_size=7, output_dim=256, spatial_scale=0.0625, trans_std=0.1)
# 2 fc
fc_new_1 = mx.sym.FullyConnected(name='fc_new_1', data=deformable_roi_pool, num_hidden=1024)
fc_new_1_relu = mx.sym.Activation(data=fc_new_1, act_type='relu', name='fc_new_1_relu')
fc_new_2 = mx.sym.FullyConnected(name='fc_new_2', data=fc_new_1_relu, num_hidden=1024)
fc_new_2_relu = mx.sym.Activation(data=fc_new_2, act_type='relu', name='fc_new_2_relu')
#num_classes = 81
num_reg_classes = 1
cls_score = mx.sym.FullyConnected(name='cls_score', data=fc_new_2_relu, num_hidden=num_classes)
bbox_pred = mx.sym.FullyConnected(name='bbox_pred', data=fc_new_2_relu, num_hidden=num_reg_classes * 4)
if cfg.TRAIN.fp16 == True:
grad_scale = float(cfg.TRAIN.scale)
else:
grad_scale = 1.0
cls_prob = mx.sym.SoftmaxOutput(name='cls_prob', data=cls_score, label=label, normalization='valid', use_ignore=True, ignore_label=-1,
grad_scale=grad_scale)
bbox_loss_ = bbox_weight * mx.sym.smooth_l1(name='bbox_loss_', scalar=1.0,
data=(bbox_pred - bbox_target))
bbox_loss = mx.sym.MakeLoss(name='bbox_loss', data=bbox_loss_, grad_scale=grad_scale / (188.0*16.0))
rcnn_label = label
# reshape output
cls_prob = mx.sym.Reshape(data=cls_prob, shape=(cfg.TRAIN.BATCH_IMAGES, -1, num_classes),
name='cls_prob_reshape')
bbox_loss = mx.sym.Reshape(data=bbox_loss, shape=(cfg.TRAIN.BATCH_IMAGES, -1, 4 * num_reg_classes),
name='bbox_loss_reshape')
# bounding box regression
rpn_bbox_loss_ = rpn_bbox_weight * mx.sym.smooth_l1(name='rpn_bbox_loss_', scalar=1.0,
data=(rpn_bbox_pred - rpn_bbox_target))
rpn_bbox_loss = mx.sym.MakeLoss(name='rpn_bbox_loss', data=rpn_bbox_loss_,
grad_scale=3 * grad_scale / float(
cfg.TRAIN.BATCH_IMAGES * cfg.TRAIN.RPN_BATCH_SIZE))
group = mx.sym.Group([rpn_cls_prob, rpn_bbox_loss, cls_prob, bbox_loss, mx.sym.BlockGrad(rcnn_label)])
else:
# ROI Proposal
rpn_cls_prob = mx.sym.SoftmaxActivation(
data=rpn_cls_score_reshape, mode="channel", name="rpn_cls_prob")
rpn_cls_prob_reshape = mx.sym.Reshape(
data=rpn_cls_prob, shape=(0, 2 * num_anchors, -1, 0), name='rpn_cls_prob_reshape')
rois, _ = mx.sym.MultiProposal(cls_prob=rpn_cls_prob_reshape, bbox_pred=rpn_bbox_pred, im_info=im_info,
name='rois', batch_size=self.test_nbatch,
rpn_pre_nms_top_n=cfg.TEST.RPN_PRE_NMS_TOP_N,
rpn_post_nms_top_n=cfg.TEST.RPN_POST_NMS_TOP_N,
rpn_min_size=cfg.TEST.RPN_MIN_SIZE,
threshold=cfg.TEST.RPN_NMS_THRESH,
feature_stride=cfg.network.RPN_FEAT_STRIDE,
ratios=tuple(cfg.network.ANCHOR_RATIOS),
scales=tuple(cfg.network.ANCHOR_SCALES))
offset_t = mx.contrib.sym.DeformablePSROIPooling(name='offset_t', data=conv_new_1_relu, rois=rois,
group_size=1, pooled_size=7,
sample_per_part=4, no_trans=True, part_size=7,
output_dim=256, spatial_scale=0.0625)
offset = mx.sym.FullyConnected(name='offset', data=offset_t, num_hidden=7 * 7 * 2, lr_mult=0.01)
offset_reshape = mx.sym.Reshape(data=offset, shape=(-1, 2, 7, 7), name="offset_reshape")
deformable_roi_pool = mx.contrib.sym.DeformablePSROIPooling(name='deformable_roi_pool',
data=conv_new_1_relu, rois=rois,
trans=offset_reshape, group_size=1,
pooled_size=7, sample_per_part=4,
no_trans=False, part_size=7, output_dim=256,
spatial_scale=0.0625, trans_std=0.1)
# 2 fc
fc_new_1 = mx.sym.FullyConnected(name='fc_new_1', data=deformable_roi_pool, num_hidden=1024)
fc_new_1_relu = mx.sym.Activation(data=fc_new_1, act_type='relu', name='fc_new_1_relu')
fc_new_2 = mx.sym.FullyConnected(name='fc_new_2', data=fc_new_1_relu, num_hidden=1024)
fc_new_2_relu = mx.sym.Activation(data=fc_new_2, act_type='relu', name='fc_new_2_relu')
#num_classes = 81
#num_classes = 602
num_reg_classes = 1
cls_score = mx.sym.FullyConnected(name='cls_score', data=fc_new_2_relu, num_hidden=num_classes)
bbox_pred = mx.sym.FullyConnected(name='bbox_pred', data=fc_new_2_relu, num_hidden=num_reg_classes * 4)
cls_prob = mx.sym.SoftmaxActivation(name='cls_prob', data=cls_score)
cls_prob = mx.sym.Reshape(data=cls_prob, shape=(self.test_nbatch, -1, num_classes),
name='cls_prob_reshape')
bbox_pred = mx.sym.Reshape(data=bbox_pred, shape=(self.test_nbatch, -1, 4 * num_reg_classes),
name='bbox_pred_reshape')
group = mx.sym.Group([rois, cls_prob, bbox_pred, im_ids])
self.sym = group
return group
def resnetc4(self, data, fp16=False):
units = self.units
filter_list = self.filter_list
bn_mom = self.momentum
workspace = self.workspace
num_stage = len(units)
memonger = False
data = mx.sym.BatchNorm(data=data, fix_gamma=True, eps=2e-5, use_global_stats=True, name='bn_data')
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(7, 7), stride=(2, 2), pad=(3, 3),
no_bias=True, name="conv0", workspace=workspace)
if fp16:
body = mx.sym.Cast(data=body, dtype=np.float16)
body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, use_global_stats=True, name='bn0')
body = mx.sym.Activation(data=body, act_type='relu', name='relu0')
body = mx.symbol.Pooling(data=body, kernel=(3, 3), stride=(2, 2), pad=(1, 1), pool_type='max')
for i in range(num_stage - 1):
body = self.residual_unit(body, filter_list[i + 1], (1 if i == 0 else 2, 1 if i == 0 else 2), False,
name='stage%d_unit%d' % (i + 1, 1), workspace=workspace,
memonger=memonger, fix_bn=(i == 0))
for j in range(units[i] - 1):
body = self.residual_unit(body, filter_list[i + 1], (1, 1), True,
name='stage%d_unit%d' % (i + 1, j + 2),
workspace=workspace, memonger=memonger, fix_bn=(i == 0))
return body
def resnetc5(self, body, deform):
units = self.units
filter_list = self.filter_list
workspace = self.workspace
num_stage = len(units)
memonger = False
i = num_stage - 1
if deform:
body = self.residual_unit_deform(body, filter_list[i + 1], (1, 1), False,
name='stage%d_unit%d' % (i + 1, 1), workspace=workspace,
memonger=memonger)
else:
body = self.residual_unit_dilate(body, filter_list[i + 1], (1, 1), False,
name='stage%d_unit%d' % (i + 1, 1), workspace=workspace,
memonger=memonger)
for j in range(units[i] - 1):
if deform:
body = self.residual_unit_deform(body, filter_list[i + 1], (1, 1), True,
name='stage%d_unit%d' % (i + 1, j + 2),
workspace=workspace, memonger=memonger)
else:
body = self.residual_unit_dilate(body, filter_list[i + 1], (1, 1), True,
name='stage%d_unit%d' % (i + 1, j + 2),
workspace=workspace, memonger=memonger)
return body
def init_weight_rcnn(self, cfg, arg_params, aux_params):
arg_params['stage4_unit1_offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit1_offset_weight'])
arg_params['stage4_unit1_offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit1_offset_bias'])
arg_params['stage4_unit2_offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit2_offset_weight'])
arg_params['stage4_unit2_offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit2_offset_bias'])
arg_params['stage4_unit3_offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit3_offset_weight'])
arg_params['stage4_unit3_offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit3_offset_bias'])
arg_params['rpn_conv_3x3_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['rpn_conv_3x3_weight'])
arg_params['rpn_conv_3x3_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['rpn_conv_3x3_bias'])
arg_params['rpn_cls_score_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['rpn_cls_score_weight'])
arg_params['rpn_cls_score_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['rpn_cls_score_bias'])
arg_params['rpn_bbox_pred_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['rpn_bbox_pred_weight'])
arg_params['rpn_bbox_pred_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['rpn_bbox_pred_bias'])
arg_params['conv_new_1_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['conv_new_1_weight'])
arg_params['conv_new_1_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['conv_new_1_bias'])
arg_params['offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['offset_weight'])
arg_params['offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['offset_bias'])
arg_params['fc_new_1_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['fc_new_1_weight'])
arg_params['fc_new_1_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['fc_new_1_bias'])
arg_params['fc_new_2_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['fc_new_2_weight'])
arg_params['fc_new_2_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['fc_new_2_bias'])
arg_params['cls_score_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['cls_score_weight'])
arg_params['cls_score_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['cls_score_bias'])
arg_params['bbox_pred_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['bbox_pred_weight'])
arg_params['bbox_pred_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['bbox_pred_bias'])
def init_weight_rpn(self, cfg, arg_params, aux_params):
arg_params['stage4_unit1_offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit1_offset_weight'])
arg_params['stage4_unit1_offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit1_offset_bias'])
arg_params['stage4_unit2_offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit2_offset_weight'])
arg_params['stage4_unit2_offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit2_offset_bias'])
arg_params['stage4_unit3_offset_weight'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit3_offset_weight'])
arg_params['stage4_unit3_offset_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['stage4_unit3_offset_bias'])
arg_params['rpn_conv_3x3_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['rpn_conv_3x3_weight'])
arg_params['rpn_conv_3x3_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['rpn_conv_3x3_bias'])
arg_params['rpn_cls_score_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['rpn_cls_score_weight'])
arg_params['rpn_cls_score_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['rpn_cls_score_bias'])
arg_params['rpn_bbox_pred_weight'] = mx.random.normal(0, 0.01, shape=self.arg_shape_dict['rpn_bbox_pred_weight'])
arg_params['rpn_bbox_pred_bias'] = mx.nd.zeros(shape=self.arg_shape_dict['rpn_bbox_pred_bias'])
def init_weight(self, cfg, arg_params, aux_params):
self.init_weight_rcnn(cfg, arg_params, aux_params)
| 62.27673
| 189
| 0.577998
| 3,968
| 29,706
| 4.033014
| 0.066532
| 0.035931
| 0.026995
| 0.038243
| 0.8664
| 0.833156
| 0.810161
| 0.796476
| 0.777167
| 0.758483
| 0
| 0.036076
| 0.304821
| 29,706
| 476
| 190
| 62.407563
| 0.73885
| 0.012085
| 0
| 0.66416
| 0
| 0
| 0.089411
| 0.022626
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037594
| false
| 0
| 0.012531
| 0.002506
| 0.077694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a56a11ea9cdf98a641aa1d3f75bbab5c0d0c5bc
| 29
|
py
|
Python
|
src/Lexer/__init__.py
|
RashikaKarki/Storm
|
0861de24c02ad34fe603ebf87f153e75af0e2dad
|
[
"Apache-2.0"
] | 8
|
2021-06-01T20:30:58.000Z
|
2021-10-01T04:03:48.000Z
|
src/Lexer/__init__.py
|
RashikaKarki/Storm
|
0861de24c02ad34fe603ebf87f153e75af0e2dad
|
[
"Apache-2.0"
] | null | null | null |
src/Lexer/__init__.py
|
RashikaKarki/Storm
|
0861de24c02ad34fe603ebf87f153e75af0e2dad
|
[
"Apache-2.0"
] | null | null | null |
from Lexer.lexer import Lexer
| 29
| 29
| 0.862069
| 5
| 29
| 5
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8a77c2290a8617c774ebf0576d37b8b94083d226
| 8,028
|
py
|
Python
|
api/v1/test/test_rsvp.py
|
Ogollah/Questioner
|
eb0845bf26f5a38d712cd8b6a8b83731fa8bc16b
|
[
"MIT"
] | null | null | null |
api/v1/test/test_rsvp.py
|
Ogollah/Questioner
|
eb0845bf26f5a38d712cd8b6a8b83731fa8bc16b
|
[
"MIT"
] | 14
|
2019-01-04T18:42:10.000Z
|
2019-01-17T08:10:30.000Z
|
api/v1/test/test_rsvp.py
|
Ogollah/Questioner
|
eb0845bf26f5a38d712cd8b6a8b83731fa8bc16b
|
[
"MIT"
] | 1
|
2019-01-09T21:14:01.000Z
|
2019-01-09T21:14:01.000Z
|
"""
This file is for testing rsvp test cases.
"""
import unittest
import json
#local imports
from api.v1.test.base_test import BaseTestCase
class TestRsvpTestCases(BaseTestCase):
def setUp(self):
self.meetup_data =dict(
topic= "Reserve",
description= "string",
images= "string",
Tags="string",
createdOn= "string",
happeningOn= "2018-06-29 08:15",
host= "string",
hostFrom= "string"
)
self.meetup_data_succ =dict(
topic= "success",
description= "string",
images= "string",
Tags="string",
createdOn= "string",
happeningOn= "2018-06-29 08:15",
host= "string",
hostFrom= "string"
)
self.meetup_data_no =dict(
topic= "status",
description= "string",
images= "string",
Tags="string",
createdOn= "string",
happeningOn= "2018-06-29 08:15",
host= "string",
hostFrom= "string"
)
self.status =dict(
status= "yes",
)
self.status_no =dict(
status= "no",
)
self.status_nill =dict(
status= "",
)
def login_user(self):
"""This helper method helps log in a test user."""
return self.client.post(
'/api/v1/user/auth/signin',
data=json.dumps(dict(
email='meetups@mail.com',
password='42qwR@#'
)),
content_type='application/json'
)
def login_user_admin(self):
"""This helper method helps log in a test user."""
return self.client.post(
'/api/v1/user/auth/signin',
data=json.dumps(dict(
email='admin@admin.com',
password='#Sadm@3In'
)),
content_type='application/json'
)
def test_successful_reserve_meetup(self):
with self.client:
"""
Test succesfuly reserve a meetup
"""
resp = self.login_user()
admin_login = self.login_user_admin()
admin_header = json.loads(resp.data.decode())['access_token']
access_token = json.loads(resp.data.decode())['access_token']
rt = self.client.post('/api/v1/meetups/create', headers=dict(Authorization=admin_header),data=json.dumps(self.meetup_data_succ),content_type='application/json')
response = self.client.post('api/v1/meetups/3/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status),content_type='application/json')
result = json.loads(response.data.decode())
self.assertTrue(result['status'] == 201)
self.assertEqual(response.status_code, 201)
def test_duplicate_reserve_meetup(self):
with self.client:
"""
Test can not rsvp duplicate meetups
"""
resp = self.login_user()
admin_login = self.login_user_admin()
admin_header = json.loads(resp.data.decode())['access_token']
access_token = json.loads(resp.data.decode())['access_token']
rt = self.client.post('/api/v1/meetups/create', headers=dict(Authorization=admin_header),data=json.dumps(self.meetup_data),content_type='application/json')
self.client.post('api/v1/meetups/1/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status),content_type='application/json')
response = self.client.post('api/v1/meetups/1/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status),content_type='application/json')
result = json.loads(response.data.decode())
self.assertTrue(result['status'] == 409)
self.assertEqual(response.status_code, 409)
def test_unavailable_meetup(self):
with self.client:
"""
Test unavailable meetup can not be reserved
"""
resp = self.login_user()
admin_login = self.login_user_admin()
admin_header = json.loads(resp.data.decode())['access_token']
access_token = json.loads(resp.data.decode())['access_token']
rt = self.client.post('/api/v1/meetups/create', headers=dict(Authorization=admin_header),data=json.dumps(self.meetup_data_no),content_type='application/json')
response = self.client.post('api/v1/meetups/30/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status_no),content_type='application/json')
result = json.loads(response.data.decode())
self.assertTrue(result['status'] == 404)
self.assertEqual(response.status_code, 404)
def test_status_no(self):
with self.client:
"""
Test status no can not reserve meetup
"""
resp = self.login_user()
admin_login= self.login_user_admin()
admin_header = json.loads(admin_login.data.decode())['access_token']
access_token = json.loads(resp.data.decode())['access_token']
rt = self.client.post('/api/v1/meetups/create', headers=dict(Authorization=admin_header),data=json.dumps(self.meetup_data_no),content_type='application/json')
response = self.client.post('api/v1/meetups/3/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status_no),content_type='application/json')
result = json.loads(response.data.decode())
self.assertTrue(result['status'] == 200)
self.assertTrue(result['message'] == 'Meetup is not reserved.')
self.assertEqual(response.status_code, 200)
def test_admin_can_not_rsvpt(self):
with self.client:
"""
Test status no does not reserve meetup
"""
resp = self.login_user_admin()
access_token = json.loads(resp.data.decode())['access_token']
rt = self.client.post('/api/v1/meetups/create', headers=dict(Authorization=access_token),data=json.dumps(self.meetup_data_no),content_type='application/json')
response = self.client.post('api/v1/meetups/4/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status),content_type='application/json')
result = json.loads(response.data.decode())
self.assertTrue(result['status'] == 401)
self.assertTrue(result['message'] == 'Admin cannot reserve a meetup')
self.assertEqual(response.status_code, 401)
def test_blank_status(self):
with self.client:
"""
Test status no does not reserve meetup
"""
resp = self.login_user()
admin_login= self.login_user_admin()
admin_header = json.loads(admin_login.data.decode())['access_token']
access_token = json.loads(resp.data.decode())['access_token']
rt = self.client.post('/api/v1/meetups/create', headers=dict(Authorization=admin_header),data=json.dumps(self.meetup_data_no),content_type='application/json')
response = self.client.post('api/v1/meetups/3/rsvp', headers=dict(Authorization=access_token),data=json.dumps(self.status_nill),content_type='application/json')
result = json.loads(response.data.decode())
self.assertTrue(result['status'] == 400)
self.assertTrue(result['message'] == 'Provide a status (yes, no, or maybe).')
self.assertEqual(response.status_code, 400)
if __name__ == '__main__':
unittest.main()
| 45.355932
| 176
| 0.578724
| 880
| 8,028
| 5.132955
| 0.134091
| 0.060881
| 0.046491
| 0.056453
| 0.823113
| 0.766438
| 0.760239
| 0.738101
| 0.738101
| 0.735665
| 0
| 0.017491
| 0.294968
| 8,028
| 177
| 177
| 45.355932
| 0.780565
| 0.018062
| 0
| 0.507576
| 0
| 0
| 0.145842
| 0.044211
| 0
| 0
| 0
| 0
| 0.113636
| 1
| 0.068182
| false
| 0.015152
| 0.022727
| 0
| 0.113636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.