hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d2ff4a8ff294546a0560451052497ab30e23a41f
| 380
|
py
|
Python
|
tests/unit/test_helpers.py
|
mabuelhagag/hijri-converter
|
779516a06bcb3f72e4919d2deeed53d2c6db5e5b
|
[
"MIT"
] | 32
|
2019-08-07T10:36:17.000Z
|
2022-01-23T13:57:04.000Z
|
tests/unit/test_helpers.py
|
mabuelhagag/hijri-converter
|
779516a06bcb3f72e4919d2deeed53d2c6db5e5b
|
[
"MIT"
] | 8
|
2019-12-28T19:55:32.000Z
|
2021-09-06T08:31:20.000Z
|
tests/unit/test_helpers.py
|
mabuelhagag/hijri-converter
|
779516a06bcb3f72e4919d2deeed53d2c6db5e5b
|
[
"MIT"
] | 12
|
2019-06-02T19:48:01.000Z
|
2021-11-12T09:07:03.000Z
|
from hijri_converter import helpers
def test_julian_to_ordinal():
assert helpers.jdn_to_ordinal(2447977) == 726552
def test_ordinal_to_julian():
assert helpers.ordinal_to_jdn(726552) == 2447977
def test_julian_to_reduced_julian():
assert helpers.jdn_to_rjd(2456087) == 56087
def test_reduced_julian_to_julian():
assert helpers.rjd_to_jdn(56087) == 2456087
| 21.111111
| 52
| 0.778947
| 55
| 380
| 4.963636
| 0.327273
| 0.102564
| 0.208791
| 0.10989
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152439
| 0.136842
| 380
| 17
| 53
| 22.352941
| 0.679878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.444444
| true
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
961fbb60916709f046c0db38cb8b181402fd92bd
| 206
|
py
|
Python
|
test/privacyalgorithms/density.py
|
OPAL-Project/OPAL-AlgoService
|
542b39f31551a849f45f91b6d85bfa3238e21767
|
[
"MIT"
] | 1
|
2020-03-04T15:38:52.000Z
|
2020-03-04T15:38:52.000Z
|
test/privacyalgorithms/density.py
|
OPAL-Project/OPAL-AlgoService
|
542b39f31551a849f45f91b6d85bfa3238e21767
|
[
"MIT"
] | 9
|
2018-07-05T07:32:40.000Z
|
2021-10-19T00:32:07.000Z
|
test/privacyalgorithms/density.py
|
OPAL-Project/OPAL-AlgoService
|
542b39f31551a849f45f91b6d85bfa3238e21767
|
[
"MIT"
] | 2
|
2018-09-16T17:17:38.000Z
|
2019-05-09T13:25:52.000Z
|
# -*- coding: utf-8 -*-
from opalalgorithms.core import OPALPrivacy
class Density(OPALPrivacy):
def __init__(self):
pass
def __call__(self, result, params, salt):
return result;
| 17.166667
| 45
| 0.650485
| 23
| 206
| 5.478261
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006369
| 0.237864
| 206
| 11
| 46
| 18.727273
| 0.796178
| 0.101942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
824ad44f38442f804e35a7bbc5ff10f6dac70d2e
| 261
|
py
|
Python
|
tests/test_model_to_dict.py
|
boba-and-beer/vectorhub
|
fc536a59c77755f4051af37338839e24e0add5c4
|
[
"Apache-2.0"
] | 385
|
2020-10-26T13:12:11.000Z
|
2021-10-07T15:14:48.000Z
|
tests/test_model_to_dict.py
|
boba-and-beer/vectorhub
|
fc536a59c77755f4051af37338839e24e0add5c4
|
[
"Apache-2.0"
] | 24
|
2020-10-29T13:16:31.000Z
|
2021-08-31T06:47:33.000Z
|
tests/test_model_to_dict.py
|
boba-and-beer/vectorhub
|
fc536a59c77755f4051af37338839e24e0add5c4
|
[
"Apache-2.0"
] | 45
|
2020-10-29T15:25:19.000Z
|
2021-09-05T21:50:57.000Z
|
from vectorhub.auto_encoder import *
def test_get_model_definitions():
assert isinstance(get_model_definitions(json_fn=None), list)
assert isinstance(get_model_definitions(json_fn=None)[0], dict)
assert len(get_model_definitions(json_fn=None)) > 0
| 37.285714
| 67
| 0.793103
| 38
| 261
| 5.105263
| 0.5
| 0.164948
| 0.391753
| 0.35567
| 0.623711
| 0.623711
| 0.623711
| 0.463918
| 0
| 0
| 0
| 0.008621
| 0.111111
| 261
| 6
| 68
| 43.5
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
828b69d23aba38d3f5b78cb02a0691e970db1be8
| 5,619
|
py
|
Python
|
tests/test_services/test_vdc/actions.py
|
openvcloud/ays_templates
|
7ce0bd5844ccfaefae554dd0dedeab2730a365cc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_services/test_vdc/actions.py
|
openvcloud/ays_templates
|
7ce0bd5844ccfaefae554dd0dedeab2730a365cc
|
[
"Apache-2.0"
] | 10
|
2017-10-25T13:23:23.000Z
|
2018-03-28T16:00:06.000Z
|
tests/test_services/test_vdc/actions.py
|
openvcloud/ays_templates
|
7ce0bd5844ccfaefae554dd0dedeab2730a365cc
|
[
"Apache-2.0"
] | null | null | null |
def init_actions_(service, args):
return {
'test_create': ['install'],
'test_delete': ['install'],
'test_routeros': ['install'],
'test_enable': ['enable'],
'test_disable': ['disable'],
}
##############
# dummy methods for making tests depend on the actions they test
def enable(job):
pass
def disable(job):
pass
##############
def test_create(job):
import sys
RESULT_OK = 'OK : %s '
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s'
service = job.service
try:
g8client = service.producers['g8client'][0]
config_instance = "{}_{}".format(g8client.aysrepo.name, g8client.model.data.instance)
client = j.clients.openvcloud.get(instance=config_instance, create=False, die=True, sshkey_path="/root/.ssh/ays_repos_key")
vdc = service.producers['vdc'][0]
vdc_id = vdc.model.data.cloudspaceID
cloudspace = client.api.cloudapi.cloudspaces.get(cloudspaceId=vdc_id)
if vdc.name != cloudspace['name']:
failure = vdc.name + '!=' + cloudspace['name']
service.model.data.result = RESULT_FAILED % failure
else:
service.model.data.result = RESULT_OK % 'test_create_vdc'
except Exception as e:
service.model.data.result = RESULT_ERROR % (str(sys.exc_info()[:2]) + str(e))
service.save()
def test_delete(job):
import sys
RESULT_OK = 'OK : %s '
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s'
service = job.service
try:
g8client = service.producers['g8client'][0]
config_instance = "{}_{}".format(g8client.aysrepo.name, g8client.model.data.instance)
client = j.clients.openvcloud.get(instance=config_instance, create=False, die=True, sshkey_path="/root/.ssh/ays_repos_key")
vdc = service.producers['vdc'][0]
vdc_id = vdc.model.data.cloudspaceID
cloudspace = client.api.cloudapi.cloudspaces.get(cloudspaceId=vdc_id)
if cloudspace['status'] != 'DESTROYED':
failed = 'Cloudspace was not deleted'
service.model.data.result = RESULT_FAILED % failed
else:
service.model.data.result = RESULT_OK % 'test_delete_vdc'
except Exception as e:
service.model.data.result = RESULT_ERROR % (str(sys.exc_info()[:2]) + str(e))
service.save()
def test_enable(job):
import sys
RESULT_OK = 'OK : %s '
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s'
service = job.service
try:
g8client = service.producers['g8client'][0]
config_instance = "{}_{}".format(g8client.aysrepo.name, g8client.model.data.instance)
client = j.clients.openvcloud.get(instance=config_instance, create=False, die=True, sshkey_path="/root/.ssh/ays_repos_key")
vdc = service.producers['vdc'][0]
vdc_id = vdc.model.data.cloudspaceID
cloudspace = client.api.cloudapi.cloudspaces.get(cloudspaceId=vdc_id)
# check if vdc is enabled
if cloudspace['status'] != 'DEPLOYED':
failure = 'vdc is not deployed'
service.model.data.result = RESULT_FAILED % failure
else:
service.model.data.result = RESULT_OK % 'test_enable_vdc'
except Exception as e:
service.model.data.result = RESULT_ERROR % (str(sys.exc_info()[:2]) + str(e))
service.save()
def test_disable(job):
import sys
RESULT_OK = 'OK : %s '
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s'
service = job.service
try:
g8client = service.producers['g8client'][0]
config_instance = "{}_{}".format(g8client.aysrepo.name, g8client.model.data.instance)
client = j.clients.openvcloud.get(instance=config_instance, create=False, die=True, sshkey_path="/root/.ssh/ays_repos_key")
vdc = service.producers['vdc'][0]
vdc_id = vdc.model.data.cloudspaceID
cloudspace = client.api.cloudapi.cloudspaces.get(cloudspaceId=vdc_id)
# check if vdc is disabled
if cloudspace['status'] != 'DISABLED':
failure = 'vdc is not disabled'
service.model.data.result = RESULT_FAILED % failure
else:
service.model.data.result = RESULT_OK % 'test_disable_vdc'
except Exception as e:
service.model.data.result = RESULT_ERROR % (str(sys.exc_info()[:2]) + str(e))
service.save()
def test_routeros(job):
import sys
import requests
RESULT_OK = 'OK : %s '
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s'
service = job.service
g8client = service.producers['g8client'][0]
config_instance = "{}_{}".format(g8client.aysrepo.name, g8client.model.data.instance)
client = j.clients.openvcloud.get(instance=config_instance, create=False, die=True, sshkey_path="/root/.ssh/ays_repos_key")
vdc = service.producers['vdc'][0]
vdc_id = vdc.model.data.cloudspaceID
cloud_space = client.api.cloudapi.cloudspaces.get(cloudspaceId=vdc_id)
try:
# Check if we can reach the routeros page after running the routeros script
requests.get('http://{ip}:9080'.format(ip=cloud_space['publicipaddress']))
service.model.data.result = RESULT_OK % 'test_routeros'
except requests.ConnectionError:
failure = "Couldn't reach router os web page"
service.model.data.result = RESULT_FAILED % failure
except Exception as e:
service.model.data.result = RESULT_ERROR % (str(sys.exc_info()[:2]) + str(e))
finally:
client.api.cloudapi.cloudspaces.delete(cloudspaceId=vdc_id)
service.save()
| 35.789809
| 131
| 0.641395
| 691
| 5,619
| 5.072359
| 0.154848
| 0.064194
| 0.068474
| 0.094151
| 0.77632
| 0.77632
| 0.766619
| 0.745221
| 0.720685
| 0.720685
| 0
| 0.008922
| 0.222104
| 5,619
| 156
| 132
| 36.019231
| 0.792954
| 0.032924
| 0
| 0.7
| 0
| 0
| 0.128124
| 0.022218
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0.016667
| 0.05
| 0.008333
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
82b048fc6d2c604a335dd97e46846152df37e1f1
| 95
|
py
|
Python
|
portfolio/models.py
|
xinging-birds/personalized-portfolio
|
8888cf744980c480635ac800a0760379c42439b3
|
[
"MIT"
] | 1
|
2019-03-17T15:39:08.000Z
|
2019-03-17T15:39:08.000Z
|
portfolio/models.py
|
xinging-birds/personalized-portfolio
|
8888cf744980c480635ac800a0760379c42439b3
|
[
"MIT"
] | 1
|
2020-09-25T23:29:11.000Z
|
2020-09-25T23:29:11.000Z
|
portfolio/models.py
|
xinging-birds/personalized-portfolio
|
8888cf744980c480635ac800a0760379c42439b3
|
[
"MIT"
] | 1
|
2019-02-24T23:18:03.000Z
|
2019-02-24T23:18:03.000Z
|
from django.db import models
# Create your models here.
#serializer converts to and from JSON
| 19
| 37
| 0.789474
| 15
| 95
| 5
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168421
| 95
| 4
| 38
| 23.75
| 0.949367
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
82c16f6761b904d3361165a12c953ecf4d1a0f1f
| 162
|
py
|
Python
|
server/api/utilities/helpers/response.py
|
koiic/favorite-things
|
f34944dfbc78e454c6245b76f036f6dd24d018eb
|
[
"MIT"
] | null | null | null |
server/api/utilities/helpers/response.py
|
koiic/favorite-things
|
f34944dfbc78e454c6245b76f036f6dd24d018eb
|
[
"MIT"
] | 5
|
2020-07-17T10:43:13.000Z
|
2022-02-26T12:16:12.000Z
|
server/api/utilities/helpers/response.py
|
koiic/favorite-things
|
f34944dfbc78e454c6245b76f036f6dd24d018eb
|
[
"MIT"
] | null | null | null |
def response(status, message, data, status_code=200):
return {
"status": status,
"message": message,
"data": data,
}, status_code
| 23.142857
| 53
| 0.574074
| 17
| 162
| 5.352941
| 0.470588
| 0.285714
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026087
| 0.290123
| 162
| 6
| 54
| 27
| 0.765217
| 0
| 0
| 0
| 0
| 0
| 0.104938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0.166667
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
82cca2a409c8f42d8df7f8b06aa382d2241e71c5
| 2,533
|
py
|
Python
|
setup.py
|
slaclab/epix-hr-2M-10k
|
33b74fb2d6ed9ecca38ed1a1818cfb778beb9d50
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2022-01-27T00:40:46.000Z
|
2022-01-27T00:40:46.000Z
|
setup.py
|
slaclab/epix-hr-2M-10k
|
33b74fb2d6ed9ecca38ed1a1818cfb778beb9d50
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2021-08-28T00:13:45.000Z
|
2021-09-02T15:29:29.000Z
|
setup.py
|
slaclab/epix-hr-single-10k
|
0c43b2a160442c485a94feceaab9aac3fc5e47b4
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from setuptools import setup
# use softlinks to make the various "board-support-package" submodules
# look like subpackages. Then __init__.py will modify
# sys.path so that the correct "local" versions of surf etc. are
# picked up. A better approach would be using relative imports
# in the submodules, but that's more work. -cpo
setup(
name = 'epix_hr_single_10k',
description = 'Epix HR package',
packages = [
'epix_hr_single_10k',
'epix_hr_single_10k.ePixAsics',
'epix_hr_single_10k.ePixFpga',
'epix_hr_single_10k.ePixViewer',
'epix_hr_single_10k.XilinxKcu1500Pgp3',
'epix_hr_single_10k.surf',
'epix_hr_single_10k.surf.devices',
'epix_hr_single_10k.surf.devices.analog_devices',
'epix_hr_single_10k.surf.devices.cypress',
'epix_hr_single_10k.surf.devices.intel',
'epix_hr_single_10k.surf.devices.linear',
'epix_hr_single_10k.surf.devices.microchip',
'epix_hr_single_10k.surf.devices.micron',
'epix_hr_single_10k.surf.devices.nxp',
'epix_hr_single_10k.surf.devices.silabs',
'epix_hr_single_10k.surf.devices.ti',
'epix_hr_single_10k.surf.devices.transceivers',
'epix_hr_single_10k.surf.ethernet',
'epix_hr_single_10k.surf.ethernet.gige',
'epix_hr_single_10k.surf.ethernet.mac',
'epix_hr_single_10k.surf.ethernet.ten_gig',
'epix_hr_single_10k.surf.ethernet.udp',
'epix_hr_single_10k.surf.ethernet.xaui',
'epix_hr_single_10k.surf.misc',
'epix_hr_single_10k.surf.protocols',
'epix_hr_single_10k.surf.protocols.batcher',
'epix_hr_single_10k.surf.protocols.clink',
'epix_hr_single_10k.surf.protocols.i2c',
'epix_hr_single_10k.surf.protocols.jesd204b',
'epix_hr_single_10k.surf.protocols.pgp',
'epix_hr_single_10k.surf.protocols.rssi',
'epix_hr_single_10k.surf.protocols.ssi',
'epix_hr_single_10k.surf.xilinx',
],
package_dir = {
'epix_hr_single_10k': 'software/python',
'epix_hr_single_10k.surf': 'firmware/submodules/surf/python/surf',
'epix_hr_single_10k.axi-pcie-core': 'firmware/submodules/axi-pcie-core/python/axipcie',
'epix_hr_single_10k.ePixAsics': 'software/python/ePixAsics',
'epix_hr_single_10k.ePixFpga': 'software/python/ePixFpga',
'epix_hr_single_10k.ePixViewer': 'software/python/ePixViewer',
'epix_hr_single_10k.XilinxKcu1500Pgp3': 'software/python/XilinxKcu1500Pgp3',
}
)
| 45.232143
| 95
| 0.703119
| 343
| 2,533
| 4.813411
| 0.279883
| 0.152635
| 0.298001
| 0.372502
| 0.611145
| 0.529982
| 0.039976
| 0
| 0
| 0
| 0
| 0.048863
| 0.183972
| 2,533
| 55
| 96
| 46.054545
| 0.749879
| 0.115673
| 0
| 0
| 0
| 0
| 0.713966
| 0.676365
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.020408
| 0
| 0.020408
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7d54dedb0a3561e63d5b55f92a6e799b4f5d1dd6
| 132
|
py
|
Python
|
xtract_sdk/client/__init__.py
|
xtracthub/xtract-sdk
|
fe88cd440591110d60e74c101a0b49d2385ac0a4
|
[
"MIT"
] | 4
|
2020-08-06T19:11:29.000Z
|
2021-12-01T20:08:02.000Z
|
xtract_sdk/client/__init__.py
|
xtracthub/xtract-sdk
|
fe88cd440591110d60e74c101a0b49d2385ac0a4
|
[
"MIT"
] | 10
|
2020-08-06T15:58:28.000Z
|
2022-01-12T19:06:36.000Z
|
xtract_sdk/client/__init__.py
|
xtracthub/xtract-sdk
|
fe88cd440591110d60e74c101a0b49d2385ac0a4
|
[
"MIT"
] | null | null | null |
from .routes import XTRACT_CRAWLER, XTRACT_CRAWLER_DEV, XTRACT_SERVICE, XTRACT_SERVICE_DEV
from .xtract_client import XtractClient
| 33
| 90
| 0.871212
| 18
| 132
| 6
| 0.5
| 0.240741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 132
| 3
| 91
| 44
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7d70788703fb10419ac9aeb391e2db78a901b6b0
| 223
|
py
|
Python
|
templates/qualityScoreVisualizer/views.py
|
mocklee/PHRED-Quality-Visualizer
|
a7b875e56efe4d26bb75b9092809ad920ada01cf
|
[
"MIT"
] | null | null | null |
templates/qualityScoreVisualizer/views.py
|
mocklee/PHRED-Quality-Visualizer
|
a7b875e56efe4d26bb75b9092809ad920ada01cf
|
[
"MIT"
] | 4
|
2019-11-01T20:58:38.000Z
|
2021-03-09T07:48:31.000Z
|
templates/qualityScoreVisualizer/views.py
|
mocklee/PHRED-Quality-Visualizer
|
a7b875e56efe4d26bb75b9092809ad920ada01cf
|
[
"MIT"
] | null | null | null |
from flask import render_template, Blueprint
qualityScoreVisualizer_blueprint = Blueprint('qualityScoreVisualizer', __name__)
@qualityScoreVisualizer_blueprint.route('/')
def index():
return render_template('index.html')
| 31.857143
| 80
| 0.829596
| 21
| 223
| 8.428571
| 0.619048
| 0.158192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071749
| 223
| 7
| 81
| 31.857143
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0.147321
| 0.098214
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.6
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
|
0
| 5
|
7d7bbf249cf2557b8daa18a5c84df9a15622f2a0
| 56
|
py
|
Python
|
HEPTools/__init__.py
|
weissercn/HEPtools
|
e724e33c81e3738850be30dd15005feb5fd54a0a
|
[
"MIT"
] | null | null | null |
HEPTools/__init__.py
|
weissercn/HEPtools
|
e724e33c81e3738850be30dd15005feb5fd54a0a
|
[
"MIT"
] | null | null | null |
HEPTools/__init__.py
|
weissercn/HEPtools
|
e724e33c81e3738850be30dd15005feb5fd54a0a
|
[
"MIT"
] | null | null | null |
import sys
import os
os.environ["HEPTools"]=os.getcwd()
| 14
| 34
| 0.75
| 9
| 56
| 4.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089286
| 56
| 3
| 35
| 18.666667
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7d876f3595d6730ce33e6677704b573bdc71cf37
| 9,459
|
py
|
Python
|
vines/geometry/geometry.py
|
AndrewGibbs/vines
|
4c4f75adc8f601f06e2ab12fbaa95a047ef4354e
|
[
"MIT"
] | 4
|
2020-07-05T19:01:52.000Z
|
2022-03-07T18:27:21.000Z
|
vines/geometry/geometry.py
|
AndrewGibbs/vines
|
4c4f75adc8f601f06e2ab12fbaa95a047ef4354e
|
[
"MIT"
] | null | null | null |
vines/geometry/geometry.py
|
AndrewGibbs/vines
|
4c4f75adc8f601f06e2ab12fbaa95a047ef4354e
|
[
"MIT"
] | 5
|
2020-07-31T13:57:44.000Z
|
2022-02-02T08:49:03.000Z
|
from numba import njit, prange
import numpy as np
def generatedomain(res, dx, dy, dz):
# Get minimum number of voxels in each direction
nx = np.int(np.max((1.0, np.round(dx / res))))
ny = np.int(np.max((1.0, np.round(dy / res))))
nz = np.int(np.max((1.0, np.round(dz / res))))
Dx = nx * res
Dy = ny * res
Dz = nz * res
x = np.arange(0.0, Dx, res) + (-Dx / 2 + res/2)
y = np.arange(0.0, Dy, res) + (-Dy / 2 + res/2)
z = np.arange(0.0, Dz, res) + (-Dz / 2 + res/2)
r, L, M, N = grid3d(x, y, z)
return r, L, M, N
@njit(parallel=True)
def grid3d(x, y, z):
# define the dimensions
L = x.shape[0]
M = y.shape[0]
N = z.shape[0]
# allocate space
r = np.zeros((L, M, N, 3))
for ix in prange(0, L):
xx = x[ix]
for iy in range(0, M):
yy = y[iy]
for iz in range(0, N):
zz = z[iz]
r[ix, iy, iz, :] = np.array((xx, yy, zz))
return r, L, M, N
def generatedomain2d(res, dx, dy):
# Get minimum number of voxels in each direction
nx = np.int(np.max((1.0, np.round(dx / res))))
ny = np.int(np.max((1.0, np.round(dy / res))))
Dx = nx * res
Dy = ny * res
x = np.arange(0.0, Dx, res) + (-Dx / 2 + res/2)
y = np.arange(0.0, Dy, res) + (-Dy / 2 + res/2)
r, L, M = grid2d(x, y)
return r, L, M
@njit(parallel=True)
def grid2d(x, y):
# define the dimensions
L = x.shape[0]
M = y.shape[0]
# allocate space
r = np.zeros((L, M, 2))
for ix in prange(0, L):
xx = x[ix]
for iy in range(0, M):
yy = y[iy]
r[ix, iy, :] = np.array((xx, yy))
return r, L, M
def koch_snowflake(order, scale=1):
"""
Return two lists x, y of point coordinates of the Koch snowflake.
Arguments
---------
order : int
The recursion depth.
scale : float
The extent of the snowflake (edge length of the base triangle).
"""
def _koch_snowflake_complex(order):
if order == 0:
# initial triangle
angles = np.array([0, 120, 240]) + 90
return scale * np.exp(np.deg2rad(angles) * 1j)
# return scale / np.sqrt(3) * np.exp(np.deg2rad(angles) * 1j)
else:
ZR = 0.5 - 0.5j * np.sqrt(3) / 3
p1 = _koch_snowflake_complex(order - 1) # start points
p2 = np.roll(p1, shift=-1) # end points
dp = p2 - p1 # connection vectors
new_points = np.empty(len(p1) * 4, dtype=np.complex128)
new_points[::4] = p1
new_points[1::4] = p1 + dp / 3
new_points[2::4] = p1 + dp * ZR
new_points[3::4] = p1 + dp / 3 * 2
return new_points
points = _koch_snowflake_complex(order)
x, y = points.real, points.imag
# Stick coordinates into an array (useful for contains_points function)
P = np.zeros((x.shape[0], 2), dtype=np.float64)
P[:, 0] = x
P[:, 1] = y
return x, y, P
def shape_size_param(geom, refInd, sizeParam, nPerLam, aspectRatio):
import numpy as np
from matplotlib import path
if geom in 'hex':
a = 1
b = np.sqrt(3)/2 * a
dom_x = 2 * a
dom_y = 2 * b
dom_z = a * aspectRatio
theta = np.arange(0, 7) * 2*np.pi/6
verts = a * np.exp(1j * theta)
P = np.zeros((verts.shape[0], 2), dtype=np.float64)
P[:, 0] = verts.real
P[:, 1] = verts.imag
# elif geom in 'tri':
# a = 1
# theta = np.arange(0, 4) * 2 * np.pi/6
elif geom in 'koch':
a = 1
x, y, _ = koch_snowflake(order=5, scale=a)
dom_x = np.max(x) - np.min(x)
dom_y = np.max(y) - np.min(y)
dom_z = a * aspectRatio
P = np.zeros((x.shape[0]+1, 2), dtype=np.float64)
P[:-1, 0] = x
P[:-1, 1] = y
P[-1, 0] = x[0]
P[-1, 1] = y[0]
elif geom in 'sphere':
a = 1
dom_x = 2 * a
dom_y = dom_x
dom_z = dom_x
P = [] # vertices leave blank
lambda_ext = 2 * np.pi * a / sizeParam # exterior wavelength
lambda_int = lambda_ext / np.real(refInd) # interior wavelength
# Discretise geometry into voxels
h_pref = dom_x # enforce precise discretisation in x-direction
res_temp = lambda_int / nPerLam # provisional resolution
N = np.int(np.ceil(h_pref / res_temp))
res = h_pref / N
r, L, M, N = generatedomain(res, dom_x, dom_y, dom_z)
# Determine which points lie inside shape
if geom in 'sphere':
r_sq = r[:, :, :, 0]**2 + r[:, :, :, 1]**2 + r[:, :, :, 2]**2
idx = (r_sq <= a)
# from IPython import embed; embed()
else:
# Polyhedron
points = r[:, :, :, 0:2].reshape(L*M*N, 2, order='F')
p = path.Path(P)
idx = p.contains_points(points).reshape(L, M, N, order='F')
return r, idx, res, P, lambda_ext, lambda_int
def shape(geom, refInd, lambda_ext, radius, nPerLam, aspectRatio):
import numpy as np
from matplotlib import path
if geom in 'hex':
a = radius
b = np.sqrt(3)/2 * a
dom_x = 2 * a
dom_y = 2 * b
dom_z = a * aspectRatio
theta = np.arange(0, 7) * 2*np.pi/6
verts = a * np.exp(1j * theta)
P = np.zeros((verts.shape[0], 2), dtype=np.float64)
P[:, 0] = verts.real
P[:, 1] = verts.imag
elif geom in 'koch':
a = radius
x, y, _ = koch_snowflake(order=5, scale=a)
dom_x = np.max(x) - np.min(x)
dom_y = np.max(y) - np.min(y)
dom_z = a * aspectRatio
P = np.zeros((x.shape[0]+1, 2), dtype=np.float64)
P[:-1, 0] = x
P[:-1, 1] = y
P[-1, 0] = x[0]
P[-1, 1] = y[0]
elif geom in 'sphere':
a = radius
dom_x = 2 * a
dom_y = dom_x
dom_z = dom_x
P = [] # vertices leave blank
elif geom in 'ellipsoid':
a = radius[0]
b = radius[1]
c = radius[2]
dom_x = 2 * a
dom_y = 2 * b
dom_z = 2 * c
P = [] # vertices leave blank
# lambda_ext = 2 * np.pi * a / sizeParam # exterior wavelength
lambda_int = lambda_ext / np.real(refInd) # interior wavelength
# Discretise geometry into voxels
h_pref = dom_x # enforce precise discretisation in x-direction
res_temp = lambda_int / nPerLam # provisional resolution
N = np.int(np.ceil(h_pref / res_temp))
res = h_pref / N
r, L, M, N = generatedomain(res, dom_x, dom_y, dom_z)
# Determine which points lie inside shape
if geom in 'sphere':
r_sq = r[:, :, :, 0]**2 + r[:, :, :, 1]**2 + r[:, :, :, 2]**2
idx = (r_sq <= a**2)
# from IPython import embed; embed()
elif geom in 'ellipsoid':
r_el = (r[:, :, :, 0] / a)**2 + (r[:, :, :, 1] / b)**2 + \
(r[:, :, :, 2] / c)**2
idx = (r_el <= 1)
else:
# Polyhedron
points = r[:, :, :, 0:2].reshape(L*M*N, 2, order='F')
p = path.Path(P)
idx = p.contains_points(points).reshape(L, M, N, order='F')
return r, idx, res, P, lambda_int
def shape_2d(geom, refInd, lambda_ext, radius, nPerLam):
import numpy as np
from matplotlib import path
if geom in 'hex':
a = radius
b = np.sqrt(3)/2 * a
dom_x = 2 * a
dom_y = 2 * b
theta = np.arange(0, 7) * 2*np.pi/6
verts = a * np.exp(1j * theta)
P = np.zeros((verts.shape[0], 2), dtype=np.float64)
P[:, 0] = verts.real
P[:, 1] = verts.imag
elif geom in 'koch':
a = radius
x, y, _ = koch_snowflake(order=5, scale=a)
dom_x = np.max(x) - np.min(x)
dom_y = np.max(y) - np.min(y)
P = np.zeros((x.shape[0]+1, 2), dtype=np.float64)
P[:-1, 0] = x
P[:-1, 1] = y
P[-1, 0] = x[0]
P[-1, 1] = y[0]
elif geom in 'square':
a = radius
b = np.sqrt(2)/2 * a
dom_x = 2 * b
dom_y = dom_x
theta = np.arange(0, 5) * 2*np.pi/4 + np.pi/4
verts = a * np.exp(1j * theta)
P = np.zeros((verts.shape[0], 2), dtype=np.float64)
P[:, 0] = verts.real
P[:, 1] = verts.imag
elif geom in 'circle':
a = radius
dom_x = 2 * a
dom_y = dom_x
P = [] # vertices leave blank
elif geom in 'ellipse':
a = radius[0]
b = radius[1]
dom_x = 2 * a
dom_y = 2 * b
P = [] # vertices leave blank
# lambda_ext = 2 * np.pi * a / sizeParam # exterior wavelength
lambda_int = lambda_ext / np.real(refInd) # interior wavelength
# Discretise geometry into voxels
h_pref = dom_x # enforce precise discretisation in x-direction
res_temp = lambda_int / nPerLam # provisional resolution
L = np.int(np.ceil(h_pref / res_temp))
res = h_pref / L
r, L, M = generatedomain2d(res, dom_x, dom_y)
# Determine which points lie inside shape
if geom in 'circle':
r_sq = r[:, :, 0]**2 + r[:, :, 1]**2
idx = (r_sq <= a**2)
# from IPython import embed; embed()
elif geom in 'ellipse':
r_el = (r[:, :, 0] / a)**2 + (r[:, :, 1] / b)**2
idx = (r_el <= 1)
else:
# Polygon
points = r.reshape(L*M, 2, order='F')
p = path.Path(P)
idx = p.contains_points(points).reshape(L, M, order='F')
return r, idx, res, P, lambda_int
| 29.28483
| 75
| 0.505973
| 1,504
| 9,459
| 3.102394
| 0.120346
| 0.020574
| 0.012859
| 0.025718
| 0.758466
| 0.73832
| 0.701672
| 0.695242
| 0.681955
| 0.646592
| 0
| 0.043541
| 0.339571
| 9,459
| 322
| 76
| 29.375776
| 0.703378
| 0.160588
| 0
| 0.739316
| 1
| 0
| 0.01286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.034188
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7d9c396544ae55ce37c542905df2c740a82288f9
| 280
|
py
|
Python
|
web-gui/flaskr/model/__init__.py
|
kpk-pl/wallet
|
68bcc2685d1932e4a5ec021c09854c1d6a7294a9
|
[
"MIT"
] | null | null | null |
web-gui/flaskr/model/__init__.py
|
kpk-pl/wallet
|
68bcc2685d1932e4a5ec021c09854c1d6a7294a9
|
[
"MIT"
] | null | null | null |
web-gui/flaskr/model/__init__.py
|
kpk-pl/wallet
|
68bcc2685d1932e4a5ec021c09854c1d6a7294a9
|
[
"MIT"
] | null | null | null |
from .asset import Asset, AssetCurrency
from .assetPricing import AssetPricing, AssetPricingQuotes, AssetPricingParametrized
from .assetOperation import AssetOperation, AssetOperationType
from .quote import Quote, QuoteCurrencyPair, QuoteHistoryItem
from .types import PyObjectId
| 46.666667
| 84
| 0.867857
| 26
| 280
| 9.346154
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092857
| 280
| 5
| 85
| 56
| 0.956693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7def6e1bbfe834b492146824156aa06539dc1d65
| 152
|
py
|
Python
|
lib/dataset/__init__.py
|
fourmi1995/IronExperiment-DCN
|
5292539764588e0168016c7e7b4df038358e9f38
|
[
"MIT"
] | 2
|
2020-11-10T07:37:09.000Z
|
2021-02-09T06:26:25.000Z
|
lib/dataset/__init__.py
|
fourmi1995/IronExperiment-DCN
|
5292539764588e0168016c7e7b4df038358e9f38
|
[
"MIT"
] | null | null | null |
lib/dataset/__init__.py
|
fourmi1995/IronExperiment-DCN
|
5292539764588e0168016c7e7b4df038358e9f38
|
[
"MIT"
] | 1
|
2019-08-07T02:35:16.000Z
|
2019-08-07T02:35:16.000Z
|
from imdb import IMDB
from pascal_voc import PascalVOC
from cityscape import CityScape
from coco import coco
from icdar_str_devkit import TableStr
| 25.333333
| 38
| 0.835526
| 23
| 152
| 5.391304
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164474
| 152
| 5
| 39
| 30.4
| 0.976378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
817484bc51a50d9844a86c3901993b6012192d7c
| 9,684
|
py
|
Python
|
tsfresh/convenience/bindings.py
|
TKlerx/tsfresh
|
2ad7b3a6144e948e4dae53b73c0500aac9d17dcc
|
[
"MIT"
] | 2
|
2020-04-11T12:08:22.000Z
|
2020-05-04T19:18:48.000Z
|
tsfresh/convenience/bindings.py
|
TKlerx/tsfresh
|
2ad7b3a6144e948e4dae53b73c0500aac9d17dcc
|
[
"MIT"
] | null | null | null |
tsfresh/convenience/bindings.py
|
TKlerx/tsfresh
|
2ad7b3a6144e948e4dae53b73c0500aac9d17dcc
|
[
"MIT"
] | 1
|
2020-09-07T10:01:19.000Z
|
2020-09-07T10:01:19.000Z
|
from functools import partial
from tsfresh.feature_extraction.extraction import _do_extraction_on_chunk
from tsfresh.feature_extraction.settings import ComprehensiveFCParameters
import pandas as pd
def _feature_extraction_on_chunk_helper(df, column_id, column_kind,
column_sort, column_value,
default_fc_parameters, kind_to_fc_parameters):
"""
Helper function wrapped around _do_extraction_on_chunk to use the correct format
of the "chunk" and output a pandas dataframe.
Is used e.g. in the convenience functions for dask and spark.
For the definitions of the parameters, please see these convenience functions.
"""
if default_fc_parameters is None and kind_to_fc_parameters is None:
default_fc_parameters = ComprehensiveFCParameters()
elif default_fc_parameters is None and kind_to_fc_parameters is not None:
default_fc_parameters = {}
chunk = df[column_id].iloc[0], df[column_kind].iloc[0], df.sort_values(column_sort)[column_value]
features = _do_extraction_on_chunk(chunk, default_fc_parameters=default_fc_parameters,
kind_to_fc_parameters=kind_to_fc_parameters)
features = pd.DataFrame(features)
features["value"] = features["value"].astype("double")
return features[[column_id, "variable", "value"]]
def dask_feature_extraction_on_chunk(df, column_id, column_kind,
column_sort, column_value,
default_fc_parameters=None, kind_to_fc_parameters=None):
"""
Extract features on a grouped dask dataframe given the column names and the extraction settings.
This wrapper function should only be used if you have a dask dataframe as input.
All format handling (input and output) needs to be done before or after that.
Examples
========
For example if you want to extract features on the robot example dataframe (stored as csv):
Import statements:
>>> from dask import dataframe as dd
>>> from tsfresh.convenience.bindings import dask_feature_extraction_on_chunk
>>> from tsfresh.feature_extraction.settings import MinimalFCParameters
Read in the data
>>> df = dd.read_csv("robot.csv")
Prepare the data into correct format.
The format needs to be a grouped dataframe (grouped by time series id and feature kind),
where each group chunk consists of a dataframe with exactly 4 columns: ``column_id``,
``column_kind``, ``column_sort`` and ``column_value``.
You can find the description of the columns in :ref:`data-formats-label`.
Please note: for this function to work you need to have all columns present!
If necessary create the columns and fill them with dummy values.
>>> df = df.melt(id_vars=["id", "time"],
... value_vars=["F_x", "F_y", "F_z", "T_x", "T_y", "T_z"],
... var_name="kind", value_name="value")
>>> df_grouped = df.groupby(["id", "kind"])
Call the feature extraction
>>> features = dask_feature_extraction_on_chunk(df_grouped, column_id="id", column_kind="kind",
... column_sort="time", column_value="value",
... default_fc_parameters=MinimalFCParameters())
Write out the data in a tabular format
>>> features = features.categorize(columns=["variable"])
>>> features = features.reset_index(drop=True) \\
... .pivot_table(index="id", columns="variable", values="value", aggfunc="mean")
>>> features.to_csv("output")
:param df: A dask dataframe grouped by id and kind.
:type df: dask.dataframe.groupby.DataFrameGroupBy
:param default_fc_parameters: mapping from feature calculator names to parameters. Only those names
which are keys in this dict will be calculated. See the class:`ComprehensiveFCParameters` for
more information.
:type default_fc_parameters: dict
:param kind_to_fc_parameters: mapping from kind names to objects of the same type as the ones for
default_fc_parameters. If you put a kind as a key here, the fc_parameters
object (which is the value), will be used instead of the default_fc_parameters. This means
that kinds, for which kind_of_fc_parameters doe not have any entries, will be ignored by
the feature selection.
:type kind_to_fc_parameters: dict
:param column_id: The name of the id column to group by.
:type column_id: str
:param column_sort: The name of the sort column.
:type column_sort: str
:param column_kind: The name of the column keeping record on the kind of the value.
:type column_kind: str
:param column_value: The name for the column keeping the value itself.
:type column_value: str
:return: A dask dataframe with the columns ``column_id``, "variable" and "value". The index is taken
from the grouped dataframe.
:rtype: dask.dataframe.DataFrame (id int64, variable object, value float64)
"""
feature_extraction = partial(_feature_extraction_on_chunk_helper,
column_id=column_id, column_kind=column_kind,
column_sort=column_sort, column_value=column_value,
default_fc_parameters=default_fc_parameters,
kind_to_fc_parameters=kind_to_fc_parameters)
return df.apply(feature_extraction, meta={column_id: 'int64', 'variable': 'object', 'value': 'float64'})
def spark_feature_extraction_on_chunk(df, column_id, column_kind,
column_sort, column_value,
default_fc_parameters, kind_to_fc_parameters=None):
"""
Extract features on a grouped spark dataframe given the column names and the extraction settings.
This wrapper function should only be used if you have a spark dataframe as input.
All format handling (input and output) needs to be done before or after that.
Examples
========
For example if you want to extract features on the robot example dataframe (stored as csv):
Import statements:
>>> from tsfresh.convenience.bindings import spark_feature_extraction_on_chunk
>>> from tsfresh.feature_extraction.settings import MinimalFCParameters
Read in the data
>>> df = spark.read(...)
Prepare the data into correct format.
The format needs to be a grouped dataframe (grouped by time series id and feature kind),
where each group chunk consists of a dataframe with exactly 4 columns: ``column_id``,
``column_kind``, ``column_sort`` and ``column_value``.
You can find the description of the columns in :ref:`data-formats-label`.
Please note: for this function to work you need to have all columns present!
If necessary create the columns and fill them with dummy values.
>>> df = ...
>>> df_grouped = df.groupby(["id", "kind"])
Call the feature extraction
>>> features = spark_feature_extraction_on_chunk(df_grouped, column_id="id", column_kind="kind",
... column_sort="time", column_value="value",
... default_fc_parameters=MinimalFCParameters())
Write out the data in a tabular format
>>> features = features.groupby("id").pivot("variable").sum("value")
>>> features.write.csv("output")
:param df: A spark dataframe grouped by id and kind.
:type df: pyspark.sql.group.GroupedData
:param default_fc_parameters: mapping from feature calculator names to parameters. Only those names
which are keys in this dict will be calculated. See the class:`ComprehensiveFCParameters` for
more information.
:type default_fc_parameters: dict
:param kind_to_fc_parameters: mapping from kind names to objects of the same type as the ones for
default_fc_parameters. If you put a kind as a key here, the fc_parameters
object (which is the value), will be used instead of the default_fc_parameters.
This means that kinds, for which kind_of_fc_parameters doe not have any entries,
will be ignored by the feature selection.
:type kind_to_fc_parameters: dict
:param column_id: The name of the id column to group by.
:type column_id: str
:param column_sort: The name of the sort column.
:type column_sort: str
:param column_kind: The name of the column keeping record on the kind of the value.
:type column_kind: str
:param column_value: The name for the column keeping the value itself.
:type column_value: str
:return: A dask dataframe with the columns ``column_id``, "variable" and "value".
:rtype: pyspark.sql.DataFrame[id: bigint, variable: string, value: double]
"""
from pyspark.sql.functions import pandas_udf, PandasUDFType
feature_extraction = partial(_feature_extraction_on_chunk_helper,
column_id=column_id, column_kind=column_kind,
column_sort=column_sort, column_value=column_value,
default_fc_parameters=default_fc_parameters,
kind_to_fc_parameters=kind_to_fc_parameters)
feature_extraction_udf = pandas_udf(f"{column_id} long, variable string, value double",
PandasUDFType.GROUPED_MAP)(feature_extraction)
return df.apply(feature_extraction_udf)
| 45.895735
| 108
| 0.673069
| 1,277
| 9,684
| 4.90603
| 0.155051
| 0.080447
| 0.069753
| 0.043097
| 0.795211
| 0.763128
| 0.749721
| 0.749721
| 0.739186
| 0.729769
| 0
| 0.00166
| 0.253511
| 9,684
| 210
| 109
| 46.114286
| 0.864988
| 0.668525
| 0
| 0.368421
| 0
| 0
| 0.039037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.131579
| 0
| 0.289474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8183491fa38b2f08aeda88afbf532a73aeabcd1e
| 325
|
py
|
Python
|
mypo/rebalancer/__init__.py
|
sonesuke/my-portfolio
|
4fd19fdee8a0aa13194cab0df53c83218c5664e3
|
[
"MIT"
] | 2
|
2021-03-14T00:14:25.000Z
|
2021-09-04T16:26:02.000Z
|
mypo/rebalancer/__init__.py
|
sonesuke/my-portfolio
|
4fd19fdee8a0aa13194cab0df53c83218c5664e3
|
[
"MIT"
] | 104
|
2021-02-21T08:11:11.000Z
|
2021-09-26T03:02:27.000Z
|
mypo/rebalancer/__init__.py
|
sonesuke/mypo
|
4fd19fdee8a0aa13194cab0df53c83218c5664e3
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from mypo.rebalancer.base_rebalancer import BaseRebalancer
from mypo.rebalancer.monthly_rebalancer import MonthlyRebalancer
from mypo.rebalancer.no_rebalancer import NoRebalancer
from mypo.rebalancer.plain_rebalancer import PlainRebalancer
from mypo.rebalancer.threshold_rebalancer import ThresholdRebalancer
| 40.625
| 68
| 0.889231
| 37
| 325
| 7.675676
| 0.432432
| 0.140845
| 0.316901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003322
| 0.073846
| 325
| 7
| 69
| 46.428571
| 0.940199
| 0.036923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
819ac26e64de6444dc2893e5fdcd9ae4cd428913
| 89
|
py
|
Python
|
nsd1805/python/day20/mysite/market/admin.py
|
MrWangwf/nsd1806
|
069e993b0bb64cb21adc2a25aa56f6da674453bc
|
[
"Apache-2.0"
] | null | null | null |
nsd1805/python/day20/mysite/market/admin.py
|
MrWangwf/nsd1806
|
069e993b0bb64cb21adc2a25aa56f6da674453bc
|
[
"Apache-2.0"
] | null | null | null |
nsd1805/python/day20/mysite/market/admin.py
|
MrWangwf/nsd1806
|
069e993b0bb64cb21adc2a25aa56f6da674453bc
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import Userdb
admin.site.register(Userdb)
| 17.8
| 32
| 0.820225
| 13
| 89
| 5.615385
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 89
| 4
| 33
| 22.25
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c4fbd87e66ed26b38272aa557112b1a5756b6193
| 186
|
py
|
Python
|
tests/test_keyring.py
|
gzm55/sagecipher
|
9fcc8a9701719a0391034812c56defa5a6bfa5e3
|
[
"Apache-2.0"
] | 7
|
2016-05-04T12:25:19.000Z
|
2022-03-17T05:21:03.000Z
|
tests/test_keyring.py
|
gzm55/sagecipher
|
9fcc8a9701719a0391034812c56defa5a6bfa5e3
|
[
"Apache-2.0"
] | 11
|
2021-02-15T07:00:16.000Z
|
2021-09-14T15:37:19.000Z
|
tests/test_keyring.py
|
gzm55/sagecipher
|
9fcc8a9701719a0391034812c56defa5a6bfa5e3
|
[
"Apache-2.0"
] | 5
|
2018-07-22T09:16:06.000Z
|
2021-09-14T15:20:31.000Z
|
from keyring.testing.backend import BackendBasicTests
from sagecipher.keyring import Keyring
class TestKeyring(BackendBasicTests):
def init_keyring(self):
return Keyring()
| 23.25
| 53
| 0.790323
| 20
| 186
| 7.3
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150538
| 186
| 7
| 54
| 26.571429
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
f217a2117a341e3bf603f32bd0a3043e989be3d1
| 24
|
py
|
Python
|
python/testData/completion/attributeOfIndirectlyImportedPackage/a.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/attributeOfIndirectlyImportedPackage/a.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/attributeOfIndirectlyImportedPackage/a.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
import pkg1.m1
pkg1.foo
| 8
| 14
| 0.791667
| 5
| 24
| 3.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.125
| 24
| 3
| 15
| 8
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1ee76fa34d9004501bd540e9952f185e7080e623
| 17
|
py
|
Python
|
hg.py
|
yagneshyaggiifj/yag
|
0afcf598639fe58cd2016add4776756e5da74e77
|
[
"bzip2-1.0.6"
] | null | null | null |
hg.py
|
yagneshyaggiifj/yag
|
0afcf598639fe58cd2016add4776756e5da74e77
|
[
"bzip2-1.0.6"
] | null | null | null |
hg.py
|
yagneshyaggiifj/yag
|
0afcf598639fe58cd2016add4776756e5da74e77
|
[
"bzip2-1.0.6"
] | null | null | null |
print(word[:])
| 8.5
| 16
| 0.529412
| 2
| 17
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 17
| 1
| 17
| 17
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
1ef58b93931d867e71a73b0d78a200ee967ea72d
| 203
|
py
|
Python
|
moto/s3control/__init__.py
|
andormarkus/moto
|
67cda6d7d6f42118ccd7e2170e7ff0a1f92fa6a6
|
[
"Apache-2.0"
] | null | null | null |
moto/s3control/__init__.py
|
andormarkus/moto
|
67cda6d7d6f42118ccd7e2170e7ff0a1f92fa6a6
|
[
"Apache-2.0"
] | null | null | null |
moto/s3control/__init__.py
|
andormarkus/moto
|
67cda6d7d6f42118ccd7e2170e7ff0a1f92fa6a6
|
[
"Apache-2.0"
] | null | null | null |
"""s3control module initialization; sets value for base decorator."""
from .models import s3control_backends
from ..core.models import base_decorator
mock_s3control = base_decorator(s3control_backends)
| 33.833333
| 69
| 0.827586
| 25
| 203
| 6.52
| 0.56
| 0.239264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021858
| 0.098522
| 203
| 5
| 70
| 40.6
| 0.868852
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1efd2aa95e152cd185de1f84418bb5cf2edfc9fd
| 246
|
py
|
Python
|
models/credentials.py
|
Astatine404/spiritus
|
1ae04aa17e57a86970e85af2c8ae7ba69d2b6ced
|
[
"MIT"
] | 1
|
2018-02-10T13:07:24.000Z
|
2018-02-10T13:07:24.000Z
|
models/credentials.py
|
Astatine404/spiritus
|
1ae04aa17e57a86970e85af2c8ae7ba69d2b6ced
|
[
"MIT"
] | null | null | null |
models/credentials.py
|
Astatine404/spiritus
|
1ae04aa17e57a86970e85af2c8ae7ba69d2b6ced
|
[
"MIT"
] | 3
|
2018-02-27T10:27:07.000Z
|
2020-09-23T13:18:04.000Z
|
CONSUMER_KEY = 'WVQrIJcorH11hQoP6mHKvXIZJ'
CONSUMER_SECRET = 'Ui3V1dEsa5owJnhu3nLNyqdz2hFf6HmvICPObiShmkzBszKnah'
ACCESS_TOKEN = '218405160-0iabe9XqpwAJ4z4BYsaXwH3ydKpFZhnzj5xpHxpI'
ACCESS_SECRET = 'PdPNfcgkc5x7TO54cxVjGOjSrqY2jbcaayV46ys9IkLj3'
| 49.2
| 70
| 0.898374
| 13
| 246
| 16.692308
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141026
| 0.04878
| 246
| 4
| 71
| 61.5
| 0.786325
| 0
| 0
| 0
| 0
| 0
| 0.691057
| 0.691057
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
48418ac72aad9d8a7040990d2c095aa1c387540a
| 150
|
py
|
Python
|
api/admin.py
|
OtchereDev/Omdbapi-Clone
|
411c89944a1d6e7ba77e225a8ae263db08d6150f
|
[
"MIT"
] | null | null | null |
api/admin.py
|
OtchereDev/Omdbapi-Clone
|
411c89944a1d6e7ba77e225a8ae263db08d6150f
|
[
"MIT"
] | null | null | null |
api/admin.py
|
OtchereDev/Omdbapi-Clone
|
411c89944a1d6e7ba77e225a8ae263db08d6150f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Movie, Genre
# Register your models here.
admin.site.register(Genre)
admin.site.register(Movie)
| 21.428571
| 32
| 0.8
| 22
| 150
| 5.454545
| 0.545455
| 0.15
| 0.283333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113333
| 150
| 7
| 33
| 21.428571
| 0.902256
| 0.173333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4857926c931e7c75ea97b79f73b32dfe9a736cf5
| 220
|
py
|
Python
|
safe_transaction_service/history/apps.py
|
bigman1208000/safe-transaction-service
|
283a278eeb06aaf9227047b96816e63d2a7b9466
|
[
"MIT"
] | 5
|
2021-01-28T17:41:42.000Z
|
2021-11-14T17:09:18.000Z
|
safe_transaction_service/history/apps.py
|
bigman1208000/safe-transaction-service
|
283a278eeb06aaf9227047b96816e63d2a7b9466
|
[
"MIT"
] | 2
|
2021-06-09T17:58:44.000Z
|
2021-06-10T19:42:32.000Z
|
safe_transaction_service/history/apps.py
|
bigman1208000/safe-transaction-service
|
283a278eeb06aaf9227047b96816e63d2a7b9466
|
[
"MIT"
] | 5
|
2021-04-06T17:20:02.000Z
|
2022-01-13T10:58:08.000Z
|
from django.apps import AppConfig
class HistoryConfig(AppConfig):
name = 'safe_transaction_service.history'
verbose_name = 'Safe Transaction Service'
def ready(self):
from . import signals # noqa
| 22
| 45
| 0.718182
| 25
| 220
| 6.2
| 0.72
| 0.103226
| 0.245161
| 0.335484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209091
| 220
| 9
| 46
| 24.444444
| 0.890805
| 0.018182
| 0
| 0
| 0
| 0
| 0.261682
| 0.149533
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
48655129b1c9e8c0abc263521c56f22ff38665ba
| 155
|
py
|
Python
|
python/databp/core/svc/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/databp/core/svc/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/databp/core/svc/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
from .export_mapped_events import ExportMappedEvents
from .map_raw_events import MapRawEvents
from .query_bluepages_endpoint import QueryBluepagesEndpoint
| 38.75
| 60
| 0.903226
| 18
| 155
| 7.444444
| 0.722222
| 0.179104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077419
| 155
| 3
| 61
| 51.666667
| 0.937063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4865c57296b71f54169cb336063a8ac0aa8718e8
| 41,539
|
py
|
Python
|
tests/changes/backends/jenkins/test_builder.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 443
|
2015-01-03T16:28:39.000Z
|
2021-04-26T16:39:46.000Z
|
tests/changes/backends/jenkins/test_builder.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 12
|
2015-07-30T19:07:16.000Z
|
2016-11-07T23:11:21.000Z
|
tests/changes/backends/jenkins/test_builder.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 47
|
2015-01-09T10:04:00.000Z
|
2020-11-18T17:58:19.000Z
|
from __future__ import absolute_import
import mock
import os.path
import responses
import pytest
import re
import time
from flask import current_app
from uuid import UUID
from changes.config import db, redis
from changes.constants import Status, Result
from changes.lib.artifact_store_lib import ArtifactState
from changes.lib.artifact_store_mock import ArtifactStoreMock
from changes.models.artifact import Artifact
from changes.models.failurereason import FailureReason
from changes.models.filecoverage import FileCoverage
from changes.models.job import Job
from changes.models.log import LogSource
from changes.models.patch import Patch
from changes.models.test import TestCase
from changes.models.testartifact import TestArtifact
from changes.backends.jenkins.builder import JenkinsBuilder, MASTER_BLACKLIST_KEY, JENKINS_LOG_NAME
from changes.testutils import (
BackendTestCase, eager_tasks, SAMPLE_DIFF, SAMPLE_XUNIT, SAMPLE_COVERAGE,
SAMPLE_XUNIT_TESTARTIFACTS
)
class BaseTestCase(BackendTestCase):
builder_cls = JenkinsBuilder
builder_options = {
'master_urls': ['http://jenkins.example.com'],
'diff_urls': ['http://jenkins-diff.example.com'],
'job_name': 'server',
}
def setUp(self):
self.project = self.create_project()
ArtifactStoreMock.reset()
super(BaseTestCase, self).setUp()
def get_builder(self, **options):
base_options = self.builder_options.copy()
base_options.update(options)
return self.builder_cls(app=current_app, **base_options)
def load_fixture(self, filename):
filepath = os.path.join(
os.path.dirname(__file__),
filename,
)
with open(filepath, 'rb') as fp:
return fp.read()
class CreateBuildTest(BaseTestCase):
def test_sets_cluster(self):
job_id = '81d1596fd4d642f4a6bdf86c45e014e8'
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(job_id))
builder = self.get_builder(cluster='foobar')
with mock.patch.object(builder, 'create_jenkins_build') as create_jenkins_build:
def fake_update(step, **kwargs):
step.data.update({'master': 'fake', 'item_id': '99', 'build_no': None})
return {'queued': True}
create_jenkins_build.side_effect = fake_update
builder.create_job(job)
step = job.phases[0].steps[0]
assert step.cluster == 'foobar'
@responses.activate
def test_queued_creation(self):
job_id = '81d1596fd4d642f4a6bdf86c45e014e8'
responses.add(
responses.POST, 'http://jenkins.example.com/job/server/build',
body='',
status=201)
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'),
body=self.load_fixture('fixtures/GET/queue_item_by_job_id.xml'))
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/job/server/api/xml/\\?depth=1&xpath=/queue/item\\[action/parameter/name=%22CHANGES_BID%22%20and%20action/parameter/value=%22.*?%22\\]/id'),
status=404)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(job_id))
builder = self.get_builder()
builder.create_job(job)
step = job.phases[0].steps[0]
assert step.data == {
'build_no': None,
'item_id': '13',
'job_name': 'server',
'queued': True,
'uri': None,
'master': 'http://jenkins.example.com',
}
@responses.activate
def test_active_creation(self):
job_id = 'f9481a17aac446718d7893b6e1c6288b'
responses.add(
responses.POST, 'http://jenkins.example.com/job/server/build',
body='',
status=201)
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'),
status=404)
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'),
body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(hex=job_id),
)
builder = self.get_builder()
builder.create_job(job)
step = job.phases[0].steps[0]
assert step.data == {
'build_no': '1',
'item_id': None,
'job_name': 'server',
'queued': False,
'uri': None,
'master': 'http://jenkins.example.com',
}
@responses.activate
@mock.patch.object(JenkinsBuilder, '_find_job')
def test_patch(self, find_job):
responses.add(
responses.POST, 'http://jenkins-diff.example.com/job/server/build',
body='',
status=201)
find_job.return_value = {
'build_no': '1',
'item_id': None,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins-diff.example.com',
}
patch = Patch(
repository=self.project.repository,
parent_revision_sha='7ebd1f2d750064652ef5bbff72452cc19e1731e0',
diff=SAMPLE_DIFF,
)
db.session.add(patch)
source = self.create_source(self.project, patch=patch)
build = self.create_build(self.project, source=source)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8')
)
builder = self.get_builder()
builder.create_job(job)
@responses.activate
def test_multi_master(self):
job_id = 'f9481a17aac446718d7893b6e1c6288b'
responses.add(
responses.GET, 'http://jenkins-2.example.com/queue/api/json/',
body=self.load_fixture('fixtures/GET/queue_list_other_jobs.json'),
status=200)
responses.add(
responses.GET, 'http://jenkins.example.com/queue/api/json/',
body=self.load_fixture('fixtures/GET/queue_list.json'),
status=200)
responses.add(
responses.POST, 'http://jenkins-2.example.com/job/server/build',
body='',
status=201)
responses.add(
responses.GET,
re.compile('http://jenkins-2\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'),
status=404)
responses.add(
responses.GET,
re.compile('http://jenkins-2\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'),
body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(hex=job_id),
)
builder = self.get_builder()
builder.master_urls = [
'http://jenkins.example.com',
'http://jenkins-2.example.com',
]
builder.create_job(job)
step = job.phases[0].steps[0]
assert step.data['master'] == 'http://jenkins-2.example.com'
@responses.activate
def test_multi_master_one_bad(self):
job_id = 'f9481a17aac446718d7893b6e1c6288b'
responses.add(
responses.GET, 'http://jenkins-2.example.com/queue/api/json/',
body=self.load_fixture('fixtures/GET/queue_list_other_jobs.json'),
status=200)
# This one has a failure status.
responses.add(
responses.GET, 'http://jenkins.example.com/queue/api/json/',
body='',
status=503)
responses.add(
responses.POST, 'http://jenkins-2.example.com/job/server/build',
body='',
status=201)
responses.add(
responses.GET,
re.compile('http://jenkins-2\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'),
status=404)
responses.add(
responses.GET,
re.compile('http://jenkins-2\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'),
body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(hex=job_id),
)
builder = self.get_builder()
builder.master_urls = [
'http://jenkins.example.com',
'http://jenkins-2.example.com',
]
builder.create_job(job)
step = job.phases[0].steps[0]
assert step.data['master'] == 'http://jenkins-2.example.com'
def test_pick_master_with_blacklist(self):
redis.sadd(MASTER_BLACKLIST_KEY, 'http://jenkins.example.com')
builder = self.get_builder()
builder.master_urls = [
'http://jenkins.example.com',
'http://jenkins-2.example.com',
]
assert 'http://jenkins-2.example.com' == builder._pick_master('job1')
@responses.activate
def test_jobstep_replacement(self):
job_id = 'f9481a17aac446718d7893b6e1c6288b'
responses.add(
responses.POST, 'http://jenkins.example.com/job/server/build',
body='',
status=201)
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'),
status=404)
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'),
body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(hex=job_id),
)
builder = self.get_builder()
builder.create_job(job)
failstep = job.phases[0].steps[0]
failstep.result = Result.infra_failed
failstep.status = Status.finished
db.session.add(failstep)
db.session.commit()
replacement_step = builder.create_job(job, replaces=failstep)
# new jobstep should still be part of same job/phase
assert replacement_step.job == job
assert replacement_step.phase == failstep.phase
# make sure .steps actually includes the new jobstep
assert len(failstep.phase.steps) == 2
# make sure replacement id is correctly set
assert failstep.replacement_id == replacement_step.id
assert replacement_step.data == {
'build_no': '1',
'item_id': None,
'job_name': 'server',
'queued': False,
'uri': None,
'master': 'http://jenkins.example.com',
}
class CancelStepTest(BaseTestCase):
@responses.activate
def test_queued(self):
responses.add(
responses.POST, 'http://jenkins.example.com/queue/cancelItem?id=13',
match_querystring=True, status=302)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'item_id': 13,
'job_name': 'server',
'master': 'http://jenkins.example.com',
}, status=Status.queued)
builder = self.get_builder()
builder.cancel_step(step)
assert step.result == Result.aborted
assert step.status == Status.finished
@responses.activate
def test_active(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/stop/',
body='', status=302)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': 2,
'job_name': 'server',
'master': 'http://jenkins.example.com',
}, status=Status.in_progress)
builder = self.get_builder()
builder.cancel_step(step)
assert step.status == Status.finished
assert step.result == Result.aborted
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
def test_timeouts_sync_log(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_building.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '7'},
body='Foo bar')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
builder = self.get_builder()
# The job is not yet complete after this sync step so no logs yet.
builder.sync_step(step)
source = LogSource.query.filter_by(job=job).first()
assert source is None
step.data['timed_out'] = True
builder.cancel_step(step)
source = LogSource.query.filter_by(job=job).first()
assert source.step == step
assert source.name == JENKINS_LOG_NAME
assert source.project == self.project
assert source.date_created == step.date_started
assert step.data.get('log_offset') == 7
bucket_name = step.id.hex + '-jenkins'
artifact_name = step.data['log_artifact_name']
artifact = ArtifactStoreMock('').get_artifact(bucket_name, artifact_name)
assert artifact.name == artifact_name
assert artifact.path == JENKINS_LOG_NAME
assert artifact.size == 7
assert artifact.state == ArtifactState.UPLOADED
assert ArtifactStoreMock('').get_artifact_content(bucket_name, artifact_name).getvalue() == 'Foo bar'
class SyncStepTest(BaseTestCase):
@responses.activate
def test_waiting_in_queue(self):
responses.add(
responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/',
body=self.load_fixture('fixtures/GET/queue_details_pending.json'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': None,
'item_id': 13,
'job_name': 'server',
'queued': True,
'master': 'http://jenkins.example.com',
})
builder = self.get_builder()
builder.sync_step(step)
assert step.status == Status.queued
@responses.activate
def test_cancelled_in_queue(self):
responses.add(
responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/',
body=self.load_fixture('fixtures/GET/queue_details_cancelled.json'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': None,
'item_id': 13,
'job_name': 'server',
'queued': True,
'master': 'http://jenkins.example.com',
})
builder = self.get_builder()
builder.sync_step(step)
assert step.status == Status.finished
assert step.result == Result.aborted
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
def test_queued_to_active(self):
responses.add(
responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/',
body=self.load_fixture('fixtures/GET/queue_details_building.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_building.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '0'},
body='')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': None,
'item_id': 13,
'job_name': 'server',
'queued': True,
'master': 'http://jenkins.example.com',
})
builder = self.get_builder()
builder.sync_step(step)
assert step.data['build_no'] == 2
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
def test_success_result(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_success.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '0'},
body='')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
})
builder = self.get_builder()
builder.sync_step(step)
assert step.data['build_no'] == 2
assert step.status == Status.finished
assert step.result == Result.passed
assert step.date_finished is not None
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
def test_failed_result(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_failed.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '0'},
body='')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
builder = self.get_builder()
builder.sync_step(step)
assert step.data['build_no'] == 2
assert step.status == Status.finished
assert step.result == Result.failed
assert step.date_finished is not None
def test_present_manifest(self):
build = self.create_build(self.project)
job = self.create_job(build=build)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
})
artifacts = [self.create_artifact(step, 'manifest.json')]
builder = self.get_builder()
builder.verify_final_artifacts(step, artifacts)
assert not FailureReason.query.filter(
FailureReason.step_id == step.id
).first()
def test_missing_manifest_result(self):
build = self.create_build(self.project)
job = self.create_job(build=build)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, status=Status.finished)
builder = self.get_builder()
builder.verify_final_artifacts(step, [])
assert FailureReason.query.filter(
FailureReason.step_id == step.id,
FailureReason.reason == 'missing_manifest_json'
).first()
assert step.result == Result.infra_failed
@responses.activate
@mock.patch('changes.backends.jenkins.builder.time')
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
def test_result_slow_log(self, mock_time):
mock_time.time.return_value = time.time()
def log_text_callback(request):
# Zoom 10 minutes into the future; this should cause the console
# downloading code to bail
mock_time.time.return_value += 10 * 60
data = "log\n" * 10000
return (200, {'X-Text-Size': str(len(data))}, data)
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_failed.json'))
responses.add_callback(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
callback=log_text_callback)
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
builder = self.get_builder()
builder.sync_step(step)
assert len(step.logsources) == 1
bucket_name = step.id.hex + '-jenkins'
artifact_name = step.data['log_artifact_name']
assert "LOG TRUNCATED" in ArtifactStoreMock('').\
get_artifact_content(bucket_name, artifact_name).getvalue()
class SyncGenericResultsTest(BaseTestCase):
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock)
def test_does_sync_log(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_failed.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '7'},
body='Foo bar')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
builder = self.get_builder()
builder.sync_step(step)
source = LogSource.query.filter_by(job=job).first()
assert source.step == step
assert source.name == JENKINS_LOG_NAME
assert source.project == self.project
assert source.date_created == step.date_started
assert step.data.get('log_offset') == 7
bucket_name = step.id.hex + '-jenkins'
artifact_name = step.data['log_artifact_name']
artifact = ArtifactStoreMock('').get_artifact(bucket_name, artifact_name)
assert artifact.name == artifact_name
assert artifact.path == JENKINS_LOG_NAME
assert artifact.size == 7
assert artifact.state == ArtifactState.UPLOADED
assert ArtifactStoreMock('').get_artifact_content(bucket_name, artifact_name).getvalue() == 'Foo bar'
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock)
def test_does_save_artifacts(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_with_artifacts.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '0'},
body='')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
})
builder = self.get_builder()
builder.sync_step(step)
expected_artifacts_data = dict()
expected_artifacts_data['foobar.log'] = {
"displayPath": "foobar.log",
"fileName": "foobar.log",
"relativePath": "artifacts/foobar.log",
}
expected_artifacts_data['foo/tests.xml'] = {
"displayPath": "tests.xml",
"fileName": "tests.xml",
"relativePath": "artifacts/foo/tests.xml",
}
expected_artifacts_data['tests.xml'] = {
"displayPath": "tests.xml",
"fileName": "tests.xml",
"relativePath": "artifacts/tests.xml",
}
for name, data in expected_artifacts_data.iteritems():
artifact = Artifact.query.filter(
Artifact.name == name,
Artifact.step == step,
).first()
assert artifact.data == data
class ArtifactsManagerMatchTest(BaseTestCase):
def test_standard(self):
builder = self.get_builder()
mgr = builder.get_artifact_manager(mock.Mock())
assert not mgr.can_process('build_report.log')
def test_fetch_jenkins(self):
builder = self.get_builder(debug_config={'fetch_jenkins_logs': True})
mgr = builder.get_artifact_manager(mock.Mock())
assert mgr.can_process('build_report.log')
class SyncArtifactTest(BaseTestCase):
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock)
def test_sync_artifact_xunit(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/xunit.xml',
body=SAMPLE_XUNIT,
stream=True)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
artifact = self.create_artifact(step, name='xunit.xml', data={
"displayPath": "xunit.xml",
"fileName": "xunit.xml",
"relativePath": "artifacts/xunit.xml"
})
builder = self.get_builder()
builder.sync_artifact(artifact)
test_list = list(TestCase.query.filter(
TestCase.job_id == job.id
))
assert len(test_list) == 3
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock)
def test_sync_artifact_coverage(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/coverage.xml',
body=SAMPLE_COVERAGE,
stream=True)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
artifact = self.create_artifact(step, name='coverage.xml', data={
"displayPath": "coverage.xml",
"fileName": "coverage.xml",
"relativePath": "artifacts/coverage.xml"
})
builder = self.get_builder()
builder.sync_artifact(artifact)
cover_list = list(FileCoverage.query.filter(
FileCoverage.job_id == job.id
))
assert len(cover_list) == 2
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock)
def test_sync_artifact_file(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/foo.bar',
body=SAMPLE_COVERAGE,
stream=True)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
artifact = self.create_artifact(step, name='foo.bar', data={
"displayPath": "foo.bar",
"fileName": "foo.bar",
"relativePath": "artifacts/foo.bar"
})
builder = self.get_builder()
builder.sync_artifact(artifact)
class SyncTestArtifactsTest(BaseTestCase):
@responses.activate
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.models.testresult.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock)
def test_sync_testartifacts(self):
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/xunit.xml',
body=SAMPLE_XUNIT_TESTARTIFACTS,
stream=True)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'),
data={
'build_no': 2,
'item_id': 13,
'job_name': 'server',
'queued': False,
'master': 'http://jenkins.example.com',
},
)
phase = self.create_jobphase(job)
step = self.create_jobstep(phase, data=job.data)
artifact = self.create_artifact(step, name='xunit.xml', data={
"displayPath": "xunit.xml",
"fileName": "xunit.xml",
"relativePath": "artifacts/xunit.xml"
})
builder = self.get_builder()
builder.sync_artifact(artifact)
test_artifacts = list(TestArtifact.query)
test = TestCase.query.first()
assert len(test_artifacts) == 1
test_artifact = test_artifacts[0]
assert test_artifact.file.get_file().read() == "sample_content"
assert test_artifact.name == "sample_name.txt"
assert str(test_artifact.type) == "Text"
assert test_artifact.test == test
class JenkinsIntegrationTest(BaseTestCase):
"""
This test should ensure a full cycle of tasks completes successfully within
the jenkins builder space.
"""
# it's possible for this test to infinitely hang due to continuous polling,
# so let's ensure we set a timeout
@pytest.mark.timeout(5)
@mock.patch('changes.config.redis.lock', mock.MagicMock())
@mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock)
@mock.patch('changes.jobs.sync_job_step.ArtifactStoreClient', ArtifactStoreMock)
@eager_tasks
@responses.activate
def test_full(self):
from changes.jobs.create_job import create_job
job_id = '81d1596fd4d642f4a6bdf86c45e014e8'
# TODO: move this out of this file and integrate w/ buildstep
responses.add(
responses.POST, 'http://jenkins.example.com/job/server/build',
body='',
status=201)
responses.add(
responses.GET,
re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'),
body=self.load_fixture('fixtures/GET/queue_item_by_job_id.xml'))
responses.add(
responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/',
body=self.load_fixture('fixtures/GET/queue_details_building.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/api/json/',
body=self.load_fixture('fixtures/GET/job_details_success.json'))
responses.add(
responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0',
match_querystring=True,
adding_headers={'X-Text-Size': '7'},
body='Foo bar')
responses.add(
responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml',
body=self.load_fixture('fixtures/GET/node_config.xml'))
artifacts_store_requests_re = re.compile(r'http://localhost:1234/buckets/.+/artifacts')
# Simulate test type which doesn't interact with artifacts store.
responses.add(
responses.GET, artifacts_store_requests_re,
body='',
status=404)
build = self.create_build(self.project)
job = self.create_job(
build=build,
id=UUID(job_id))
plan = self.create_plan(self.project)
self.create_step(
plan, order=0, implementation='changes.backends.jenkins.buildstep.JenkinsBuildStep', data={
'job_name': 'server',
'jenkins_url': 'http://jenkins.example.com',
},
)
self.create_job_plan(job, plan)
job_id = job.id.hex
build_id = build.id.hex
create_job.delay(
job_id=job_id,
task_id=job_id,
parent_task_id=build_id,
)
job = Job.query.get(job_id)
assert job.status == Status.finished
assert job.result == Result.passed
assert job.date_created
assert job.date_started
assert job.date_finished
phase_list = job.phases
assert len(phase_list) == 1
assert phase_list[0].status == Status.finished
assert phase_list[0].result == Result.passed
assert phase_list[0].date_created
assert phase_list[0].date_started
assert phase_list[0].date_finished
step_list = phase_list[0].steps
assert len(step_list) == 1
assert step_list[0].status == Status.finished
assert step_list[0].result == Result.passed
assert step_list[0].date_created
assert step_list[0].date_started
assert step_list[0].date_finished
assert step_list[0].data == {
'item_id': '13',
'queued': False,
'log_offset': 7,
'log_artifact_name': JENKINS_LOG_NAME,
'jenkins_bucket_name': step_list[0].id.hex + '-jenkins',
'job_name': 'server',
'build_no': 2,
'uri': 'https://jenkins.build.itc.dropbox.com/job/server/2/',
'master': 'http://jenkins.example.com',
}
node = step_list[0].node
assert node.label == 'server-ubuntu-10.04 (ami-746cf244) (i-836023b7)'
assert [n.label for n in node.clusters] == ['server-runner']
source = LogSource.query.filter_by(job=job).first()
assert source.name == JENKINS_LOG_NAME
assert source.step == step_list[0]
assert source.project == self.project
assert source.date_created == job.date_started
bucket_name = step_list[0].id.hex + '-jenkins'
artifact_name = step_list[0].data['log_artifact_name']
artifact = ArtifactStoreMock('').get_artifact(bucket_name, artifact_name)
assert artifact.name == artifact_name
assert artifact.path == JENKINS_LOG_NAME
assert artifact.size == 7
assert artifact.state == ArtifactState.UPLOADED
assert ArtifactStoreMock('').get_artifact_content(bucket_name, artifact_name).getvalue() == 'Foo bar'
| 37.355216
| 240
| 0.610101
| 4,636
| 41,539
| 5.320966
| 0.080673
| 0.038106
| 0.053267
| 0.062145
| 0.766459
| 0.743554
| 0.723893
| 0.72138
| 0.700422
| 0.683152
| 0
| 0.03659
| 0.261802
| 41,539
| 1,111
| 241
| 37.388839
| 0.767871
| 0.015937
| 0
| 0.684783
| 0
| 0.033696
| 0.267364
| 0.072906
| 0
| 0
| 0
| 0.0009
| 0.1
| 1
| 0.03587
| false
| 0.004348
| 0.026087
| 0
| 0.078261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
486ac585a5446b2e4cacd722943025bfc6303b32
| 55
|
py
|
Python
|
chalice/tests/__init__.py
|
alphagov-mirror/csw-backend
|
687b1db3a5931a2405881a846709fe12a9cde2b8
|
[
"MIT"
] | 1
|
2019-10-14T11:34:24.000Z
|
2019-10-14T11:34:24.000Z
|
chalice/tests/__init__.py
|
alphagov-mirror/csw-backend
|
687b1db3a5931a2405881a846709fe12a9cde2b8
|
[
"MIT"
] | 64
|
2018-08-21T07:06:35.000Z
|
2021-11-01T09:22:27.000Z
|
chalice/tests/__init__.py
|
alphagov-mirror/csw-backend
|
687b1db3a5931a2405881a846709fe12a9cde2b8
|
[
"MIT"
] | 3
|
2018-09-05T12:40:58.000Z
|
2021-04-10T20:12:27.000Z
|
import os
os.environ["CSW_CRITERIA_UNIT_TESTING"] = "1"
| 27.5
| 45
| 0.781818
| 9
| 55
| 4.444444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.072727
| 55
| 2
| 45
| 27.5
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0.464286
| 0.446429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
48752af1d066afbfaf906d610bfb8b83215adb0c
| 87
|
py
|
Python
|
__init__.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 23
|
2018-05-25T02:16:59.000Z
|
2022-03-24T06:56:34.000Z
|
__init__.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 1
|
2019-06-11T06:59:21.000Z
|
2019-06-11T06:59:40.000Z
|
__init__.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 8
|
2018-08-29T16:43:12.000Z
|
2022-01-17T11:54:40.000Z
|
"""a pyTorch Extension for Applied Mathematics"""
from . import optim
from . import nn
| 21.75
| 49
| 0.747126
| 12
| 87
| 5.416667
| 0.833333
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16092
| 87
| 3
| 50
| 29
| 0.890411
| 0.494253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6f870f32e1f9f48b02d86926f3d5697b84bd6702
| 2,202
|
py
|
Python
|
tests/Argo_global_range_check_validation.py
|
BillMills/AutoQC
|
cb56fa5bb2115170ec204edd84e2d69ce84be820
|
[
"MIT"
] | 17
|
2015-01-31T00:35:58.000Z
|
2020-10-26T19:01:46.000Z
|
tests/Argo_global_range_check_validation.py
|
castelao/AutoQC
|
eb85422c1a6a5ff965a1ef96b3cb29240a66b506
|
[
"MIT"
] | 163
|
2015-01-21T03:44:42.000Z
|
2022-01-09T22:03:12.000Z
|
tests/Argo_global_range_check_validation.py
|
BillMills/AutoQC
|
cb56fa5bb2115170ec204edd84e2d69ce84be820
|
[
"MIT"
] | 11
|
2015-06-04T14:32:22.000Z
|
2021-04-11T05:18:09.000Z
|
import qctests.Argo_global_range_check
import util.testingProfile
import numpy
from util import obs_utils
##### Argo_global_range_check ---------------------------------------------------
def test_Argo_global_range_check_temperature():
'''
Make sure AGRC is flagging temperature excursions
'''
# should fail despite rounding
p = util.testingProfile.fakeProfile([-2.500000001], [100], latitude=0.0)
qc = qctests.Argo_global_range_check.test(p, None)
truth = numpy.zeros(1, dtype=bool)
truth[0] = True
assert numpy.array_equal(qc, truth), 'failed to flag temperature slightly colder than -2.5 C'
# -2.5 OK
p = util.testingProfile.fakeProfile([-2.5], [100], latitude=0.0)
qc = qctests.Argo_global_range_check.test(p, None)
truth = numpy.zeros(1, dtype=bool)
assert numpy.array_equal(qc, truth), 'incorrectly flagging -2.5 C'
# 40 OK
p = util.testingProfile.fakeProfile([40], [100], latitude=0.0)
qc = qctests.Argo_global_range_check.test(p, None)
truth = numpy.zeros(1, dtype=bool)
assert numpy.array_equal(qc, truth), 'incorrectly flagging 40 C'
# should fail despite rounding
p = util.testingProfile.fakeProfile([40.0000001], [100], latitude=0.0)
qc = qctests.Argo_global_range_check.test(p, None)
truth = numpy.zeros(1, dtype=bool)
truth[0] = True
assert numpy.array_equal(qc, truth), 'failed to flag temperature slightly warmer than 40 C'
def test_Argo_global_range_check_pressure():
'''
Make sure AGRC is flagging pressure excursions
'''
# should fail despite rounding
p = util.testingProfile.fakeProfile([5], obs_utils.pressure_to_depth([-5.00000001], 0.0), latitude=0.0)
qc = qctests.Argo_global_range_check.test(p, None)
truth = numpy.zeros(1, dtype=bool)
truth[0] = True
assert numpy.array_equal(qc, truth), 'failed to flag pressure slightly below -5 '
# -5 OK
p = util.testingProfile.fakeProfile([5], obs_utils.pressure_to_depth([-5], 0.0), latitude=0.0)
qc = qctests.Argo_global_range_check.test(p, None)
truth = numpy.zeros(1, dtype=bool)
assert numpy.array_equal(qc, truth), 'incorrectly flagging pressure of -5'
| 39.321429
| 108
| 0.684832
| 314
| 2,202
| 4.652866
| 0.194268
| 0.068446
| 0.102669
| 0.136893
| 0.853525
| 0.778919
| 0.717317
| 0.717317
| 0.679671
| 0.611225
| 0
| 0.047855
| 0.174387
| 2,202
| 55
| 109
| 40.036364
| 0.755776
| 0.127157
| 0
| 0.454545
| 0
| 0
| 0.124934
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.060606
| false
| 0
| 0.121212
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6ff505e3b8ee9f65d288c4d5479b67bd35856203
| 144
|
py
|
Python
|
tests/conftest.py
|
kylenstone/python-frameio-client
|
5f12cb40d68f83cafe53752d869f4a9880a1169e
|
[
"MIT"
] | 1
|
2019-06-08T18:44:55.000Z
|
2019-06-08T18:44:55.000Z
|
tests/conftest.py
|
jpylisela/python-frameio-client
|
527ee5b9f85329982913b6ea54e097a5e089ff57
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
jpylisela/python-frameio-client
|
527ee5b9f85329982913b6ea54e097a5e089ff57
|
[
"MIT"
] | null | null | null |
import pytest
from frameioclient import FrameioClient
@pytest.fixture
def frameioclient(token):
return FrameioClient("aaaabbbbccccddddeeee")
| 20.571429
| 46
| 0.840278
| 14
| 144
| 8.642857
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 144
| 6
| 47
| 24
| 0.930769
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
b50dbd77a83697e31c0e4fc69022dc98b1fb259c
| 189
|
py
|
Python
|
tests/samples/valid/DictAchievement.py
|
raviolliii/dev-achievements
|
da0a92f0d53b20192ef68ee1222cbf01fb0c0ed0
|
[
"MIT"
] | 12
|
2021-02-05T15:38:08.000Z
|
2021-09-04T21:19:03.000Z
|
tests/samples/valid/DictAchievement.py
|
raviolliii/dev-achievements
|
da0a92f0d53b20192ef68ee1222cbf01fb0c0ed0
|
[
"MIT"
] | null | null | null |
tests/samples/valid/DictAchievement.py
|
raviolliii/dev-achievements
|
da0a92f0d53b20192ef68ee1222cbf01fb0c0ed0
|
[
"MIT"
] | null | null | null |
# cases where DictAchievement should unlock
# >> CASE
{'name': 'John Doe', 'age': 24}
# >> CASE
{
'name': 'John Doe',
'age': 24
}
# >> CASE
func({'name': 'John Doe', 'age': 24})
| 13.5
| 43
| 0.534392
| 24
| 189
| 4.208333
| 0.5
| 0.237624
| 0.326733
| 0.415842
| 0.594059
| 0.435644
| 0.435644
| 0
| 0
| 0
| 0
| 0.041096
| 0.227513
| 189
| 13
| 44
| 14.538462
| 0.650685
| 0.343915
| 0
| 0
| 0
| 0
| 0.378151
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b50e9feb1fdb5ccfd803542f057009b98d272cba
| 79
|
py
|
Python
|
CodeWars/7 Kyu/Find the capitals.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
CodeWars/7 Kyu/Find the capitals.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
CodeWars/7 Kyu/Find the capitals.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
def capitals(word):
return [i for (i, c) in enumerate(word) if c.isupper()]
| 39.5
| 59
| 0.658228
| 14
| 79
| 3.714286
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177215
| 79
| 2
| 59
| 39.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
82f8ade6a0016a76c42a940a20565b452e455cd3
| 235
|
py
|
Python
|
dude/admin.py
|
demosdemon/dude-wheres-my-car
|
c4db13dd8f2ec7439a37bdef096c8adeb3e54553
|
[
"MIT"
] | null | null | null |
dude/admin.py
|
demosdemon/dude-wheres-my-car
|
c4db13dd8f2ec7439a37bdef096c8adeb3e54553
|
[
"MIT"
] | 2
|
2018-08-30T01:35:29.000Z
|
2018-09-01T02:08:15.000Z
|
dude/admin.py
|
demosdemon/dude-wheres-my-car
|
c4db13dd8f2ec7439a37bdef096c8adeb3e54553
|
[
"MIT"
] | null | null | null |
"""Admin config."""
from flask_admin.contrib.sqla import ModelView
from dude.extensions import admin, db
from dude.user.models import Role, User
admin.add_view(ModelView(Role, db.session))
admin.add_view(ModelView(User, db.session))
| 26.111111
| 46
| 0.787234
| 36
| 235
| 5.055556
| 0.472222
| 0.087912
| 0.131868
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093617
| 235
| 8
| 47
| 29.375
| 0.85446
| 0.055319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d20de33b5ab5c7d6d4257b4bc9158c6af147fc47
| 661
|
py
|
Python
|
station/robot_goal_and_obstacle_finder/detection/acuro_markers/marker_position.py
|
GLO3013-E4/COViRondelle2021
|
f8d23903d0a906e93a7698a555d90ebecdf83969
|
[
"MIT"
] | null | null | null |
station/robot_goal_and_obstacle_finder/detection/acuro_markers/marker_position.py
|
GLO3013-E4/COViRondelle2021
|
f8d23903d0a906e93a7698a555d90ebecdf83969
|
[
"MIT"
] | null | null | null |
station/robot_goal_and_obstacle_finder/detection/acuro_markers/marker_position.py
|
GLO3013-E4/COViRondelle2021
|
f8d23903d0a906e93a7698a555d90ebecdf83969
|
[
"MIT"
] | null | null | null |
class MarkerPosition:
def __init__(self, markers_points, rotation_vector, translation_vector):
self.markers_points = markers_points
self.rotation_vector = rotation_vector
self.translation_vector = translation_vector
def set_markers_points(self, markers_points):
self.markers_points = markers_points
def set_rotation_vector(self, rotation_vector):
self.rotation_vector = rotation_vector
def get_markers_points(self):
return self.markers_points
def get_rotation_vector(self):
return self.rotation_vector
def get_translation_vector(self):
return self.translation_vector
| 30.045455
| 76
| 0.741301
| 77
| 661
| 5.948052
| 0.168831
| 0.255459
| 0.18559
| 0.104803
| 0.406114
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202723
| 661
| 21
| 77
| 31.47619
| 0.86907
| 0
| 0
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d215ec26475dddf3f75340a8265fdcb36ecbaaab
| 355
|
py
|
Python
|
blocks/bricks/recurrent/__init__.py
|
KIKOcaoyue/blocks
|
dfbeb400cfacfc1abe75e377cc03c1bf61b9c2fa
|
[
"BSD-3-Clause"
] | 1,067
|
2015-05-16T23:39:15.000Z
|
2019-02-10T13:33:00.000Z
|
blocks/bricks/recurrent/__init__.py
|
KIKOcaoyue/blocks
|
dfbeb400cfacfc1abe75e377cc03c1bf61b9c2fa
|
[
"BSD-3-Clause"
] | 577
|
2015-05-16T18:52:53.000Z
|
2018-11-27T15:31:09.000Z
|
blocks/bricks/recurrent/__init__.py
|
KIKOcaoyue/blocks
|
dfbeb400cfacfc1abe75e377cc03c1bf61b9c2fa
|
[
"BSD-3-Clause"
] | 379
|
2015-05-21T03:24:04.000Z
|
2019-01-29T02:55:00.000Z
|
from .base import BaseRecurrent, recurrent
from .architectures import SimpleRecurrent, LSTM, GatedRecurrent
from .misc import Bidirectional, RecurrentStack, RECURRENTSTACK_SEPARATOR
__all__ = ("BaseRecurrent", "recurrent", "SimpleRecurrent", "LSTM",
"GatedRecurrent", "Bidirectional", "RecurrentStack",
"RECURRENTSTACK_SEPARATOR")
| 39.444444
| 73
| 0.760563
| 28
| 355
| 9.428571
| 0.5
| 0.166667
| 0.25
| 0.378788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.143662
| 355
| 8
| 74
| 44.375
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0.298592
| 0.067606
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d218d93da30da6a551c41df5254d435af92afcdd
| 42
|
py
|
Python
|
test/login.py
|
Cyril-hub/git-info
|
3ad1464b6b7470240181d5bf2b01b89ba2b49416
|
[
"MIT"
] | null | null | null |
test/login.py
|
Cyril-hub/git-info
|
3ad1464b6b7470240181d5bf2b01b89ba2b49416
|
[
"MIT"
] | null | null | null |
test/login.py
|
Cyril-hub/git-info
|
3ad1464b6b7470240181d5bf2b01b89ba2b49416
|
[
"MIT"
] | null | null | null |
num = 10
num1 = 10
num2 = 20
num3 = 30
| 5.25
| 9
| 0.547619
| 8
| 42
| 2.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.407407
| 0.357143
| 42
| 7
| 10
| 6
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d23241a81c4a64c678d32e4719447026d3ff0cd6
| 51
|
py
|
Python
|
primenumberslib/__init__.py
|
artigupt/primenumberslib
|
afb3a9ed914d018231a4588574428624cad91dcd
|
[
"MIT"
] | null | null | null |
primenumberslib/__init__.py
|
artigupt/primenumberslib
|
afb3a9ed914d018231a4588574428624cad91dcd
|
[
"MIT"
] | null | null | null |
primenumberslib/__init__.py
|
artigupt/primenumberslib
|
afb3a9ed914d018231a4588574428624cad91dcd
|
[
"MIT"
] | null | null | null |
from primenumberslib.prime_numbers import is_prime
| 25.5
| 50
| 0.901961
| 7
| 51
| 6.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 1
| 51
| 51
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d2547d39109421834a4661dbb1752da4928f9b90
| 41
|
py
|
Python
|
streamlit_app.py
|
ffreemt/app2
|
af092aa7f9a290f16ba650b012e439bea99f1848
|
[
"MIT"
] | null | null | null |
streamlit_app.py
|
ffreemt/app2
|
af092aa7f9a290f16ba650b012e439bea99f1848
|
[
"MIT"
] | null | null | null |
streamlit_app.py
|
ffreemt/app2
|
af092aa7f9a290f16ba650b012e439bea99f1848
|
[
"MIT"
] | null | null | null |
import streamlit as st
st.write("Hey ya")
| 20.5
| 22
| 0.756098
| 8
| 41
| 3.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 2
| 23
| 20.5
| 0.861111
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d27e40f75805fbdceffeaf3dd0716aefdd13f4c8
| 196
|
py
|
Python
|
src/spacel/provision/app/__init__.py
|
mycloudandme/spacel-provision
|
900b8ada0017f727163c5c2ae464e17d747ba0e8
|
[
"MIT"
] | 2
|
2016-05-18T11:10:27.000Z
|
2016-05-18T13:25:04.000Z
|
src/spacel/provision/app/__init__.py
|
mycloudandme/spacel-provision
|
900b8ada0017f727163c5c2ae464e17d747ba0e8
|
[
"MIT"
] | null | null | null |
src/spacel/provision/app/__init__.py
|
mycloudandme/spacel-provision
|
900b8ada0017f727163c5c2ae464e17d747ba0e8
|
[
"MIT"
] | null | null | null |
from .app_spot import AppSpotTemplateDecorator
from .cloudwatch_logs import CloudWatchLogsDecorator
from .ingress_resource import IngressResourceFactory
from .space import SpaceElevatorAppFactory
| 39.2
| 52
| 0.897959
| 19
| 196
| 9.105263
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 196
| 4
| 53
| 49
| 0.961111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
962dedad7cd1ac6129e395d809ed5e503d977805
| 93
|
py
|
Python
|
cooey-bot/customcom/__init__.py
|
kennnyshiwa/cooey-tools
|
dc3a20a31d52711593ab08228cb4ae5bfe3955ec
|
[
"Apache-2.0"
] | 2
|
2021-01-27T20:28:09.000Z
|
2022-02-20T14:43:54.000Z
|
cooey-bot/customcom/__init__.py
|
kennnyshiwa/cooey-tools
|
dc3a20a31d52711593ab08228cb4ae5bfe3955ec
|
[
"Apache-2.0"
] | null | null | null |
cooey-bot/customcom/__init__.py
|
kennnyshiwa/cooey-tools
|
dc3a20a31d52711593ab08228cb4ae5bfe3955ec
|
[
"Apache-2.0"
] | 2
|
2021-01-11T01:56:36.000Z
|
2021-01-27T20:28:09.000Z
|
from .customcom import CustomCommands
def setup(bot):
bot.add_cog(CustomCommands(bot))
| 15.5
| 37
| 0.763441
| 12
| 93
| 5.833333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139785
| 93
| 5
| 38
| 18.6
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
963991bf5ca93de7084e589a59777a7b5ef54033
| 218
|
py
|
Python
|
discourse/discourse/admin.py
|
GonnaFlyMethod/discourse-django-vue
|
e323fc38c970a97788620a176f94bf3f2c830a60
|
[
"MIT"
] | null | null | null |
discourse/discourse/admin.py
|
GonnaFlyMethod/discourse-django-vue
|
e323fc38c970a97788620a176f94bf3f2c830a60
|
[
"MIT"
] | null | null | null |
discourse/discourse/admin.py
|
GonnaFlyMethod/discourse-django-vue
|
e323fc38c970a97788620a176f94bf3f2c830a60
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Topic, Comment, TagOfTopic, TopicSection
admin.site.register(Topic)
admin.site.register(Comment)
admin.site.register(TagOfTopic)
admin.site.register(TopicSection)
| 24.222222
| 60
| 0.825688
| 28
| 218
| 6.428571
| 0.428571
| 0.2
| 0.377778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077982
| 218
| 8
| 61
| 27.25
| 0.895522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9655d0c33ac8d6f4d585fbbbe28b181446f8d757
| 22
|
py
|
Python
|
qcodes_contrib_drivers/drivers/ZurichInstruments/__init__.py
|
ThorstenGroh/Qcodes_contrib_drivers
|
97e05f8f5d8762953ee9db9bc461d0814eef657d
|
[
"MIT"
] | 223
|
2016-10-29T15:00:24.000Z
|
2022-03-20T06:53:34.000Z
|
qcodes_contrib_drivers/drivers/ZurichInstruments/__init__.py
|
ThorstenGroh/Qcodes_contrib_drivers
|
97e05f8f5d8762953ee9db9bc461d0814eef657d
|
[
"MIT"
] | 3,406
|
2016-10-25T10:44:50.000Z
|
2022-03-31T09:47:35.000Z
|
qcodes_contrib_drivers/drivers/ZurichInstruments/__init__.py
|
ThorstenGroh/Qcodes_contrib_drivers
|
97e05f8f5d8762953ee9db9bc461d0814eef657d
|
[
"MIT"
] | 263
|
2016-10-25T11:35:36.000Z
|
2022-03-31T08:53:20.000Z
|
# empty __init__ file
| 11
| 21
| 0.772727
| 3
| 22
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.722222
| 0.863636
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
96950fe4cbc4bd7a61ff83f0f9e281dc70ae5f4d
| 2,113
|
py
|
Python
|
notebook/pillow_rotate.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/pillow_rotate.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/pillow_rotate.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
from PIL import Image
im = Image.open('data/src/lena.jpg')
# 
im_rotate = im.rotate(90)
im_rotate.save('data/dst/lena_rotate_90.jpg', quality=95)
# 
im_rotate = im.rotate(45)
im_rotate.save('data/dst/lena_rotate_45.jpg', quality=95)
# 
im_rotate = im.rotate(45, resample=Image.BICUBIC)
im_rotate.save('data/dst/lena_rotate_45_bicubic.jpg', quality=95)
# 
im_rotate = im.rotate(90, expand=True)
im_rotate.save('data/dst/lena_rotate_90_expand.jpg', quality=95)
# 
im_rotate = im.rotate(45, expand=True)
im_rotate.save('data/dst/lena_rotate_45_expand.jpg', quality=95)
# 
im_rotate = im.rotate(45, center=(0, 60))
im_rotate.save('data/dst/lena_rotate_45_change_center.jpg', quality=95)
# 
im_rotate = im.rotate(45, center=(0, 60), expand=True)
im_rotate.save('data/dst/lena_rotate_45_change_center_expand.jpg', quality=95)
# 
im_rotate = im.rotate(0, translate=(100, 50))
im_rotate.save('data/dst/lena_rotate_0_translate.jpg', quality=95)
# 
im_rotate = im.rotate(45, translate=(100, 50))
im_rotate.save('data/dst/lena_rotate_45_translate.jpg', quality=95)
# 
im_rotate = im.rotate(45, translate=(100, 50), expand=True)
im_rotate.save('data/dst/lena_rotate_45_translate_expand.jpg', quality=95)
# 
im_rotate = im.rotate(45, fillcolor=(255, 128, 0), expand=True)
im_rotate.save('data/dst/lena_rotate_45_fillcolor_expand.jpg', quality=95)
# 
| 34.639344
| 90
| 0.789872
| 365
| 2,113
| 4.216438
| 0.084932
| 0.17154
| 0.187135
| 0.243015
| 0.933073
| 0.917479
| 0.74334
| 0.547758
| 0.355426
| 0.246264
| 0
| 0.06754
| 0.061051
| 2,113
| 60
| 91
| 35.216667
| 0.708165
| 0.361098
| 0
| 0
| 0
| 0
| 0.317128
| 0.304413
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9699450b573a259666b90dbea5aa88838e30f09e
| 76
|
py
|
Python
|
server/app/services/tasks_scheduler/timer_tasks/app/device_count/__init__.py
|
goodfree/ActorCloud
|
e8db470830ea6f6f208ad43c2e56a2e8976bc468
|
[
"Apache-2.0"
] | 173
|
2019-06-10T07:14:49.000Z
|
2022-03-31T08:42:36.000Z
|
server/app/services/tasks_scheduler/timer_tasks/app/device_count/__init__.py
|
zlyz12345/ActorCloud
|
9c34b371c23464981323ef9865d9913bde1fe09c
|
[
"Apache-2.0"
] | 27
|
2019-06-12T08:25:29.000Z
|
2022-02-26T11:37:15.000Z
|
server/app/services/tasks_scheduler/timer_tasks/app/device_count/__init__.py
|
zlyz12345/ActorCloud
|
9c34b371c23464981323ef9865d9913bde1fe09c
|
[
"Apache-2.0"
] | 67
|
2019-06-10T08:40:05.000Z
|
2022-03-09T03:43:56.000Z
|
from .count_task import device_count_task
__all__ = ['device_count_task']
| 15.2
| 41
| 0.802632
| 11
| 76
| 4.727273
| 0.545455
| 0.519231
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118421
| 76
| 4
| 42
| 19
| 0.776119
| 0
| 0
| 0
| 0
| 0
| 0.223684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
96be8607822c16150cf5e728dfb5ba04640bee1e
| 6,419
|
py
|
Python
|
pyramid_cloudflare_access/tests/test_access.py
|
teamniteo/pyramid_cloudflare_access
|
60326378b64e16c7839302b184ec5412a39ab7cc
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
pyramid_cloudflare_access/tests/test_access.py
|
teamniteo/pyramid_cloudflare_access
|
60326378b64e16c7839302b184ec5412a39ab7cc
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
pyramid_cloudflare_access/tests/test_access.py
|
teamniteo/pyramid_cloudflare_access
|
60326378b64e16c7839302b184ec5412a39ab7cc
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
"""Tests for HerokuappAccess tween."""
from pyramid import testing
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.httpexceptions import HTTPForbidden
from pyramid_cloudflare_access import CloudflareAccess
import pytest
# Taken from example at https://pyjwt.readthedocs.io/en/latest/usage.html#retrieve-rsa-signing-keys-from-a-jwks-endpoint
sample_jwk = {
"keys": [
{
"alg": "RS256",
"kty": "RSA",
"use": "sig",
"n": "0wtlJRY9-ru61LmOgieeI7_rD1oIna9QpBMAOWw8wTuoIhFQFwcIi7MFB7IEfelCPj08vkfLsuFtR8cG07EE4uvJ78bAqRjMsCvprWp4e2p7hqPnWcpRpDEyHjzirEJle1LPpjLLVaSWgkbrVaOD0lkWkP1T1TkrOset_Obh8BwtO-Ww-UfrEwxTyz1646AGkbT2nL8PX0trXrmira8GnrCkFUgTUS61GoTdb9bCJ19PLX9Gnxw7J0BtR0GubopXq8KlI0ThVql6ZtVGN2dvmrCPAVAZleM5TVB61m0VSXvGWaF6_GeOhbFoyWcyUmFvzWhBm8Q38vWgsSI7oHTkEw",
"e": "AQAB",
"kid": "NEE1QURBOTM4MzI5RkFDNTYxOTU1MDg2ODgwQ0UzMTk1QjYyRkRFQw",
"x5t": "NEE1QURBOTM4MzI5RkFDNTYxOTU1MDg2ODgwQ0UzMTk1QjYyRkRFQw",
"x5c": [
"MIIDBzCCAe+gAwIBAgIJNtD9Ozi6j2jJMA0GCSqGSIb3DQEBCwUAMCExHzAdBgNVBAMTFmRldi04N2V2eDlydS5hdXRoMC5jb20wHhcNMTkwNjIwMTU0NDU4WhcNMzMwMjI2MTU0NDU4WjAhMR8wHQYDVQQDExZkZXYtODdldng5cnUuYXV0aDAuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0wtlJRY9+ru61LmOgieeI7/rD1oIna9QpBMAOWw8wTuoIhFQFwcIi7MFB7IEfelCPj08vkfLsuFtR8cG07EE4uvJ78bAqRjMsCvprWp4e2p7hqPnWcpRpDEyHjzirEJle1LPpjLLVaSWgkbrVaOD0lkWkP1T1TkrOset/Obh8BwtO+Ww+UfrEwxTyz1646AGkbT2nL8PX0trXrmira8GnrCkFUgTUS61GoTdb9bCJ19PLX9Gnxw7J0BtR0GubopXq8KlI0ThVql6ZtVGN2dvmrCPAVAZleM5TVB61m0VSXvGWaF6/GeOhbFoyWcyUmFvzWhBm8Q38vWgsSI7oHTkEwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQlGXpmYaXFB7Q3eG69Uhjd4cFp/jAOBgNVHQ8BAf8EBAMCAoQwDQYJKoZIhvcNAQELBQADggEBAIzQOF/h4T5WWAdjhcIwdNS7hS2Deq+UxxkRv+uavj6O9mHLuRG1q5onvSFShjECXaYT6OGibn7Ufw/JSm3+86ZouMYjBEqGh4OvWRkwARy1YTWUVDGpT2HAwtIq3lfYvhe8P4VfZByp1N4lfn6X2NcJflG+Q+mfXNmRFyyft3Oq51PCZyyAkU7bTun9FmMOyBtmJvQjZ8RXgBLvu9nUcZB8yTVoeUEg4cLczQlli/OkiFXhWgrhVr8uF0/9klslMFXtm78iYSgR8/oC+k1pSNd1+ESSt7n6+JiAQ2Co+ZNKta7LTDGAjGjNDymyoCrZpeuYQwwnHYEHu/0khjAxhXo="
],
},
{
"alg": "RS256",
"kty": "RSA",
"use": "sig",
"n": "qMDEywqsPbiQbnSPVoKOb1HrQ_2KxI4JDe-AK-kbpb2Q3QXFl6IM3pJCvfYOm-f3DuEtBpll_Rg28WWeXl8pXAhmHk3V2Ig57f81uzGXg5xFtZDrqAG0chgwCQPD15FG00xrLDTvSDkIEPJZq-Y4IlJ3NbzQ8gn_JiappjMc8FjqQMz_4uUF-iIPU_aUgbLLtN98moKeNLAVV2lV3H5kVhNP8Fqd6piiH-mdma_KdY--GahAFC7Lt72_QtxnxowalbdkdMDim7paTeqxoZUKKHJsRVPXbiGx1zB3cfgBH7meU8ILv7JX3odu0juy0y2gagSaMkEd9-mcTLr8Bg0-5Q",
"e": "AQAB",
"kid": "Dt0jkFkY7KkmYdDb2BaI1",
"x5t": "lhmczC7hbLpBZh6MBSygH1D9qeE",
"x5c": [
"MIIDBzCCAe+gAwIBAgIJHjJKyTNJE/wIMA0GCSqGSIb3DQEBCwUAMCExHzAdBgNVBAMTFmRldi04N2V2eDlydS5hdXRoMC5jb20wHhcNMjAwMzExMjA1OTM5WhcNMzMxMTE4MjA1OTM5WjAhMR8wHQYDVQQDExZkZXYtODdldng5cnUuYXV0aDAuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqMDEywqsPbiQbnSPVoKOb1HrQ/2KxI4JDe+AK+kbpb2Q3QXFl6IM3pJCvfYOm+f3DuEtBpll/Rg28WWeXl8pXAhmHk3V2Ig57f81uzGXg5xFtZDrqAG0chgwCQPD15FG00xrLDTvSDkIEPJZq+Y4IlJ3NbzQ8gn/JiappjMc8FjqQMz/4uUF+iIPU/aUgbLLtN98moKeNLAVV2lV3H5kVhNP8Fqd6piiH+mdma/KdY++GahAFC7Lt72/QtxnxowalbdkdMDim7paTeqxoZUKKHJsRVPXbiGx1zB3cfgBH7meU8ILv7JX3odu0juy0y2gagSaMkEd9+mcTLr8Bg0+5QIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQxVQqyrrAtXsBznZj2GN4nfJf+sTAOBgNVHQ8BAf8EBAMCAoQwDQYJKoZIhvcNAQELBQADggEBAGsIl6yxIG8GOkHlfcSEa4//4WDxwfw8lg6zNPri6nhYtF1kbHTO5PqUbE+kasMnvqEV5Y0QXvyxwIjLLbbYiySK6aWp5XS2Wy5hYlMjXOimAw6mwbkNVhujRsPjTY3P+bv/9eiv2zO9yEfzfmfr6jhYcmnOdTFgAujsL4AyDpUh4/jKDtDNFl6lMdn8J7DcdRNZM/8OsAk6GgZYlzStfh4aI/uE3ekJ84XAxxdHUzwDUu5B8CetmHvfxQvV9MjmozLR8SbkTEhUv4//tr8SfGc8jS78E5w8NJN6DtYVcUNweHrAlvQXRevCmBRu3D9hKARjXfTBqilBBF9nNt93mYM="
],
},
]
}
sample_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5FRTFRVVJCT1RNNE16STVSa0ZETlRZeE9UVTFNRGcyT0Rnd1EwVXpNVGsxUWpZeVJrUkZRdyJ9.eyJpc3MiOiJodHRwczovL2Rldi04N2V2eDlydS5hdXRoMC5jb20vIiwic3ViIjoiYVc0Q2NhNzl4UmVMV1V6MGFFMkg2a0QwTzNjWEJWdENAY2xpZW50cyIsImF1ZCI6Imh0dHBzOi8vZXhwZW5zZXMtYXBpIiwiaWF0IjoxNTcyMDA2OTU0LCJleHAiOjE1NzIwMDY5NjQsImF6cCI6ImFXNENjYTc5eFJlTFdVejBhRTJINmtEME8zY1hCVnRDIiwiZ3R5IjoiY2xpZW50LWNyZWRlbnRpYWxzIn0.PUxE7xn52aTCohGiWoSdMBZGiYAHwE5FYie0Y1qUT68IHSTXwXVd6hn02HTah6epvHHVKA2FqcFZ4GGv5VTHEvYpeggiiZMgbxFrmTEY0csL6VNkX1eaJGcuehwQCRBKRLL3zKmA5IKGy5GeUnIbpPHLHDxr-GXvgFzsdsyWlVQvPX2xjeaQ217r2PtxDeqjlf66UYl6oY6AqNS8DH3iryCvIfCcybRZkc_hdy-6ZMoKT6Piijvk_aXdm7-QQqKJFHLuEqrVSOuBqqiNfVrG27QzAPuPOxvfXTVLXL2jek5meH6n-VWgrBdoMFH93QEszEDowDAEhQPHVs0xj7SIzA"
sample_audience = "https://expenses-api"
@pytest.mark.freeze_time("2017-05-21")
def test_happy_path(mocker) -> None:
"""Test that JWT token is parsed and authorized."""
mocker.patch(
"pyramid_cloudflare_access.PyJWKClient.fetch_data", return_value=sample_jwk
)
tween_handler = mocker.Mock()
request = testing.DummyRequest()
request.cookies = {"CF_Authorization": sample_token}
request.registry.settings = {
"pyramid_cloudflare_access.policy_audience": sample_audience,
"pyramid_cloudflare_access.team": "auth0",
}
CloudflareAccess(tween_handler, request.registry)(request)
tween_handler.assert_called_with(request)
def test_missing_cookie(mocker) -> None:
"""Test that access is denied on wrong request."""
mocker.patch(
"pyramid_cloudflare_access.PyJWKClient.fetch_data", return_value=sample_jwk
)
tween_handler = mocker.Mock()
request = testing.DummyRequest()
request.registry.settings = {
"pyramid_cloudflare_access.policy_audience": sample_audience,
"pyramid_cloudflare_access.team": "auth0",
}
with pytest.raises(HTTPBadRequest):
CloudflareAccess(tween_handler, request.registry)(request)
def test_auth_failed(mocker) -> None:
"""Test that access is denied on expired auth."""
mocker.patch(
"pyramid_cloudflare_access.PyJWKClient.fetch_data", return_value=sample_jwk
)
tween_handler = mocker.Mock()
request = testing.DummyRequest()
request.cookies = {"CF_Authorization": sample_token}
request.registry.settings = {
"pyramid_cloudflare_access.policy_audience": sample_audience,
"pyramid_cloudflare_access.team": "auth0",
}
with pytest.raises(HTTPForbidden):
CloudflareAccess(tween_handler, request.registry)(request)
| 66.864583
| 1,058
| 0.827387
| 366
| 6,419
| 14.319672
| 0.456284
| 0.032437
| 0.043885
| 0.010303
| 0.426255
| 0.315207
| 0.286587
| 0.278573
| 0.265598
| 0.265598
| 0
| 0.090782
| 0.107649
| 6,419
| 95
| 1,059
| 67.568421
| 0.824197
| 0.044555
| 0
| 0.513514
| 0
| 0.027027
| 0.685158
| 0.660448
| 0
| 1
| 0
| 0
| 0.013514
| 1
| 0.040541
| false
| 0
| 0.067568
| 0
| 0.108108
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
96c312e68999533ed362eaa287fa966ce7efd08c
| 100
|
py
|
Python
|
enthought/naming/initial_context.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/naming/initial_context.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/naming/initial_context.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from apptools.naming.initial_context import *
| 25
| 45
| 0.85
| 13
| 100
| 6.076923
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11
| 100
| 3
| 46
| 33.333333
| 0.88764
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
739130d3aa2c6c235c2f52f8955404ceabcfca40
| 71,937
|
py
|
Python
|
bespin_api_v2/tests_api.py
|
Duke-GCB/bespin-api
|
cea5c20fb2ff592adabe6ebb7ca934939aa11a34
|
[
"MIT"
] | null | null | null |
bespin_api_v2/tests_api.py
|
Duke-GCB/bespin-api
|
cea5c20fb2ff592adabe6ebb7ca934939aa11a34
|
[
"MIT"
] | 137
|
2016-12-09T18:59:45.000Z
|
2021-06-10T18:55:47.000Z
|
bespin_api_v2/tests_api.py
|
Duke-GCB/bespin-api
|
cea5c20fb2ff592adabe6ebb7ca934939aa11a34
|
[
"MIT"
] | 3
|
2017-11-14T16:05:58.000Z
|
2018-12-28T18:07:43.000Z
|
import json
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
from data.tests_api import UserLogin
from data.models import Workflow, WorkflowVersion, WorkflowConfiguration, JobStrategy, ShareGroup, JobFlavor, \
JobSettings, CloudSettingsOpenStack, VMProject, JobFileStageGroup, DDSUserCredential, DDSEndpoint, Job, \
JobRuntimeK8s, LandoConnection, JobRuntimeStepK8s, EmailMessage, EmailTemplate, WorkflowVersionToolDetails
from data.tests_models import create_vm_job_settings
from bespin_api_v2.jobtemplate import STRING_VALUE_PLACEHOLDER, INT_VALUE_PLACEHOLDER, \
REQUIRED_ERROR_MESSAGE, PLACEHOLDER_ERROR_MESSAGE
from mock import patch, Mock
class AdminCreateListRetrieveMixin(object):
"""
Many of our Admin models are CreateListRetrieveModelViewSet subclasses, therefore
most of the API tests follow the same pattern. This base class provides test for the standard behaviors
"""
# Override these variables and methods in implementation
BASE_NAME = None # Name of the base_view from urls, e.g. 'v2-workflowversiontooldetails'
MODEL_CLS = None # Name of the model class
def create_model_object(self):
raise NotImplemented('Override create_model_object to use this base class')
def build_post_data(self):
raise NotImplemented('Override build_post_data to use this base class')
def check_single_response(self, model_object, response_data):
raise NotImplemented('Override check_single_response to use this base class')
# May override
def check_list_response(self, model_object, response_data):
self.assertEqual(len(response_data), 1, 'Should have one item as one item was created')
self.check_single_response(model_object, response_data[0])
# Do not override
def list_url(self):
return reverse('{}-list'.format(self.BASE_NAME))
def object_url(self, pk):
return '{}{}/'.format(self.list_url(), pk)
def get_model_object(self, pk):
return self.MODEL_CLS.objects.get(pk=pk)
def test_list_fails_unauthenticated(self):
self.user_login.become_unauthorized()
url = self.list_url()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list_fails_not_admin_user(self):
self.user_login.become_normal_user()
url = self.list_url()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_list_with_admin_user(self):
model_object = self.create_model_object()
self.user_login.become_admin_user()
url = self.list_url()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.check_list_response(model_object, response.data)
def test_retrieve_with_admin_user(self):
model_object = self.create_model_object()
self.user_login.become_admin_user()
url = self.object_url(model_object.id)
response = self.client.get(url ,format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.check_single_response(model_object, response.data)
def test_create_with_admin_user(self):
self.user_login.become_admin_user()
url = self.list_url()
response = self.client.post(url, format='json', data=self.build_post_data())
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
model_object = self.get_model_object(response.data['id'])
self.check_single_response(model_object, response.data)
def test_put_fails_with_admin_user(self):
self.user_login.become_admin_user()
url = self.object_url('placeholder-id')
response = self.client.put(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_delete_fails_with_admin_user(self):
self.user_login.become_admin_user()
url = self.object_url('placeholder-id')
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class AdminWorkflowViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-admin_workflow'
MODEL_CLS = Workflow
def setUp(self):
self.user_login = UserLogin(self.client)
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/workflows/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/workflows/3/')
def create_model_object(self):
model_object = Workflow.objects.create(name='Exome Seq', tag='exomeseq')
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['tag'], 'exomeseq')
def build_post_data(self):
return {
'name': 'Exome Seq',
'tag': 'exomeseq',
}
class AdminWorkflowVersionViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-admin_workflowversion'
MODEL_CLS = WorkflowVersion
def setUp(self):
self.user_login = UserLogin(self.client)
self.workflow = Workflow.objects.create(name='Exome Seq', tag='exomeseq')
self.version_change_log = 'https://github.com/bespin-workflows/exomeseq-gatk3/blob/release-4.1/CHANGELOG.md'
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/workflow-versions/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/workflow-versions/3/')
def create_model_object(self):
model_object = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v1 exomeseq',
version='1.0.1',
version_info_url=self.version_change_log,
url='https://someurl.com',
fields=[{"name":"threads", "class": "int"}],
)
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['workflow'], self.workflow.id)
self.assertEqual(response_data['description'], 'v1 exomeseq')
self.assertEqual(response_data['version'], '1.0.1')
self.assertEqual(response_data['url'], 'https://someurl.com')
self.assertEqual(response_data['fields'], [{"name": "threads", "class": "int"}])
def build_post_data(self):
return {
'workflow': self.workflow.id,
'description': 'v1 exomeseq',
'version': '1.0.1',
'url': 'https://someurl.com',
'fields': [{"name": "threads", "class": "int"}],
}
# Additional tests
def test_create_with_version_change_log(self):
self.user_login.become_admin_user()
url = reverse('v2-admin_workflowversion-list')
response = self.client.post(url, format='json', data={
'workflow': self.workflow.id,
'description': 'v1 exomseq',
'version': '2.0.1',
'url': 'https://someurl.com',
'version_info_url': 'https://someurl.com/changelog',
'fields': [{"name": "threads", "class": "int"}],
})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['description'], 'v1 exomseq')
self.assertEqual(response.data['enable_ui'], False)
workflow_versions = WorkflowVersion.objects.all()
self.assertEqual(len(workflow_versions), 1)
self.assertEqual(workflow_versions[0].version, '2.0.1')
self.assertEqual(workflow_versions[0].version_info_url, 'https://someurl.com/changelog')
self.assertEqual(workflow_versions[0].fields, [{"name": "threads", "class": "int"}])
def test_sorted_by_workflow_and_version(self):
wf1 = Workflow.objects.create(name='workflow1', tag='one')
wfv_1 = WorkflowVersion.objects.create(workflow=wf1, version="1", url='', fields=[])
wfv_2_2_2_dev = WorkflowVersion.objects.create(workflow=wf1, version="2.2.2-dev", url='', fields=[])
wfv_1_3_1 = WorkflowVersion.objects.create(workflow=wf1, version="1.3.1", url='', fields=[])
wf2 = Workflow.objects.create(name='workflow2', tag='two')
wfv_5 = WorkflowVersion.objects.create(workflow=wf2, version="5", url='', fields=[])
self.user_login.become_admin_user()
url = reverse('v2-admin_workflowversion-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 4)
workflow_versions_ary = [(item['workflow'], item['version']) for item in response.data]
self.assertEqual(workflow_versions_ary, [
(wf1.id, '1'),
(wf1.id, '1.3.1'),
(wf1.id, '2.2.2-dev'),
(wf2.id, '5'),
])
def test_includes_tool_details(self):
workflow_version = self.create_model_object()
details = WorkflowVersionToolDetails.objects.create(
workflow_version=workflow_version,
details=[{'k':'v'}]
)
self.user_login.become_admin_user()
url = self.object_url(workflow_version.id)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['tool_details'], details.pk)
class AdminWorkflowConfigurationViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-admin_workflowconfiguration'
MODEL_CLS = WorkflowConfiguration
def setUp(self):
self.user_login = UserLogin(self.client)
self.workflow = Workflow.objects.create(name='Exome Seq', tag='exomeseq')
self.workflow_version = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v1 exomeseq',
version='1',
url='',
fields=[{"name": "threads", "class": "int"}]
)
job_flavor = JobFlavor.objects.create(name='large')
vm_project = VMProject.objects.create()
lando_connection = LandoConnection.objects.create(
cluster_type=LandoConnection.K8S_TYPE,
host='somehost',
username='user1',
password='secret',
queue_name='lando'
)
job_settings = JobSettings.objects.create(lando_connection=lando_connection, job_runtime_k8s=JobRuntimeK8s.objects.create())
self.job_strategy = JobStrategy.objects.create(name='default', job_flavor=job_flavor, job_settings=job_settings)
self.share_group = ShareGroup.objects.create()
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/workflow-configurations/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/workflow-configurations/3/')
def create_model_object(self):
model_object = WorkflowConfiguration.objects.create(
tag='b37xGen',
workflow=self.workflow,
system_job_order={"A":"B"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['tag'], 'b37xGen')
self.assertEqual(response_data['workflow'], self.workflow.id)
self.assertEqual(response_data['system_job_order'], {"A": "B"})
self.assertEqual(response_data['default_job_strategy'], self.job_strategy.id)
self.assertEqual(response_data['share_group'], self.share_group.id)
def build_post_data(self):
return {
'workflow': self.workflow.id,
'tag': 'b37xGen',
'system_job_order': {"A": "B"},
'default_job_strategy': self.job_strategy.id,
'share_group': self.share_group.id,
}
class AdminWorkflowVersionToolDetailsViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-workflowversiontooldetails'
MODEL_CLS = WorkflowVersionToolDetails
def setUp(self):
self.user_login = UserLogin(self.client)
self.workflow = Workflow.objects.create(name='Test Workflow', tag='test')
self.workflow_version = WorkflowVersion.objects.create(
workflow=self.workflow,
description='Test vABC',
version='vABC',
url='https://example.org/test.zip',
fields=[{'name': 'size', 'type': 'int'},]
)
self.details = [{'k1': 'v1'}, {'k2': 'v2'}]
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/workflow-version-tool-details/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/workflow-version-tool-details/3/')
def create_model_object(self):
model_object = WorkflowVersionToolDetails.objects.create(
workflow_version=self.workflow_version,
details=self.details
)
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['workflow_version'], self.workflow_version.id)
self.assertEqual(response_data['details'], self.details)
def build_post_data(self):
return {
'workflow_version': self.workflow_version.id,
'details': self.details
}
class JobStrategyViewSetTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
self.job_flavor = JobFlavor.objects.create(name='large')
self.job_settings = create_vm_job_settings()
def test_list_fails_unauthenticated(self):
self.user_login.become_unauthorized()
url = reverse('v2-jobstrategies-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list_normal_user(self):
self.job_strategy = JobStrategy.objects.create(name='default', job_flavor=self.job_flavor,
job_settings=self.job_settings)
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['id'], self.job_strategy.id)
self.assertEqual(response.data[0]['name'], 'default')
self.assertEqual(response.data[0]['job_flavor']['name'], 'large')
self.assertEqual(response.data[0]['job_settings'], self.job_settings.id)
def test_list_filtering(self):
JobStrategy.objects.create(name='default', job_flavor=self.job_flavor, job_settings=self.job_settings)
JobStrategy.objects.create(name='better', job_flavor=self.job_flavor, job_settings=self.job_settings)
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(set([item['name'] for item in response.data]), set(['default', 'better']))
url = reverse('v2-jobstrategies-list') + "?name=better"
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(set([item['name'] for item in response.data]), set(['better']))
def test_retrieve_with_normal_user(self):
self.job_strategy = JobStrategy.objects.create(name='default', job_flavor=self.job_flavor,
job_settings=self.job_settings)
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list') + str(self.job_strategy.id) + '/'
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], self.job_strategy.id)
self.assertEqual(response.data['name'], 'default')
self.assertEqual(response.data['job_flavor']['id'], self.job_flavor.id)
self.assertEqual(response.data['job_settings'], self.job_settings.id)
def test_post_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list') + '1/'
response = self.client.post(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_put_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list') + '1/'
response = self.client.put(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_delete_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list') + '1/'
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class WorkflowConfigurationViewSetTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
self.workflow = Workflow.objects.create(name='Exome Seq', tag='exomeseq')
self.workflow2 = Workflow.objects.create(name='Microbiome', tag='microbiome')
self.workflow_version = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v1 exomeseq',
version='1',
url='',
fields=[{"name":"threads", "type": "int"},{"name":"items", "type": "int"}],
)
self.workflow_version2 = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v2 exomeseq',
version='2',
url='',
fields=[{"name":"threads", "type": "int"}],
)
job_flavor = JobFlavor.objects.create(name='large')
job_settings = create_vm_job_settings()
self.job_strategy = JobStrategy.objects.create(name='default', job_flavor=job_flavor, job_settings=job_settings)
self.share_group = ShareGroup.objects.create()
self.endpoint = DDSEndpoint.objects.create(name='DukeDS', agent_key='secret',
api_root='https://someserver.com/api')
def test_list_fails_unauthenticated(self):
self.user_login.become_unauthorized()
url = reverse('v2-workflowconfigurations-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list_normal_user(self):
workflow_configuration = WorkflowConfiguration.objects.create(
tag='b37xGen',
workflow=self.workflow,
system_job_order={"A": "B"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
self.user_login.become_normal_user()
url = reverse('v2-workflowconfigurations-list')
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['id'], workflow_configuration.id)
self.assertEqual(response.data[0]['tag'], 'b37xGen')
self.assertEqual(response.data[0]['workflow'], self.workflow.id)
self.assertEqual(response.data[0]['system_job_order'], {"A": "B"})
self.assertEqual(response.data[0]['default_job_strategy'], self.job_strategy.id)
self.assertEqual(response.data[0]['share_group'], self.share_group.id)
def test_list_normal_user_with_workflow_tag_filtering(self):
workflow_configuration1 = WorkflowConfiguration.objects.create(
tag='b37xGen',
workflow=self.workflow,
system_job_order={"A": "B"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
workflow_configuration2 = WorkflowConfiguration.objects.create(
tag='b37other',
workflow=self.workflow2,
system_job_order={"A": "C"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
self.user_login.become_normal_user()
url = reverse('v2-workflowconfigurations-list')
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 2)
url = reverse('v2-workflowconfigurations-list') + "?workflow__tag=microbiome"
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['tag'], 'b37other')
def test_list_normal_user_with_tag_filtering(self):
workflow_configuration1 = WorkflowConfiguration.objects.create(
tag='b37xGen',
workflow=self.workflow,
system_job_order={"A": "B"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
workflow_configuration2 = WorkflowConfiguration.objects.create(
tag='b37other',
workflow=self.workflow2,
system_job_order={"A": "C"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
self.user_login.become_normal_user()
url = reverse('v2-workflowconfigurations-list')
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 2)
url = reverse('v2-workflowconfigurations-list') + "?tag=b37other"
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['tag'], 'b37other')
def test_retrieve_normal_user(self):
workflow_configuration = WorkflowConfiguration.objects.create(
tag='b37xGen',
workflow=self.workflow,
system_job_order={"items": 4},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
self.user_login.become_normal_user()
url = reverse('v2-workflowconfigurations-list') + str(workflow_configuration.id) + '/'
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], workflow_configuration.id)
self.assertEqual(response.data['tag'], 'b37xGen')
self.assertEqual(response.data['workflow'], self.workflow.id)
self.assertEqual(response.data['system_job_order'], {"items": 4})
self.assertEqual(response.data['default_job_strategy'], self.job_strategy.id)
self.assertEqual(response.data['share_group'], self.share_group.id)
def test_create_with_admin_user(self):
self.user_login.become_admin_user()
url = reverse('v2-workflowconfigurations-list')
response = self.client.post(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_put_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-workflowconfigurations-list') + '1/'
response = self.client.put(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_delete_fails_with_admin_user(self):
self.user_login.become_normal_user()
url = reverse('v2-workflowconfigurations-list') + '1/'
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_create_with_admin_user(self):
self.user_login.become_admin_user()
url = reverse('v2-workflowconfigurations-list')
response = self.client.post(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class JobTemplatesViewSetTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
self.workflow = Workflow.objects.create(name='Exome Seq', tag='exomeseq')
self.workflow2 = Workflow.objects.create(name='Microbiome', tag='microbiome')
self.workflow_version = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v1 exomeseq',
version='v1',
url='',
fields=[{"name": "threads", "type": "int"}, {"name": "items", "type": "string"}],
)
job_flavor = JobFlavor.objects.create(name='large')
job_settings = create_vm_job_settings()
self.job_strategy = JobStrategy.objects.create(name='default', job_flavor=job_flavor, job_settings=job_settings)
self.share_group = ShareGroup.objects.create()
self.endpoint = DDSEndpoint.objects.create(name='DukeDS', agent_key='secret',
api_root='https://someserver.com/api')
workflow_configuration1 = WorkflowConfiguration.objects.create(
tag='b37xGen',
workflow=self.workflow,
system_job_order={"A": "B"},
default_job_strategy=self.job_strategy,
share_group=self.share_group,
)
def test_init(self):
user = self.user_login.become_normal_user()
DDSUserCredential.objects.create(endpoint=self.endpoint, user=user, token='secret1', dds_id='1')
stage_group = JobFileStageGroup.objects.create(user=user)
url = reverse('v2-jobtemplate_init')
response = self.client.post(url, format='json', data={
'tag': 'exomeseq/v1/b37xGen'
})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['tag'], 'exomeseq/v1/b37xGen')
self.assertEqual(response.data['name'], STRING_VALUE_PLACEHOLDER)
self.assertEqual(response.data['fund_code'], STRING_VALUE_PLACEHOLDER)
self.assertEqual(response.data['job_order'],
{'threads': INT_VALUE_PLACEHOLDER, 'items': STRING_VALUE_PLACEHOLDER})
def test_validate(self):
user = self.user_login.become_normal_user()
url = reverse('v2-jobtemplate_validate')
response = self.client.post(url, format='json', data={
'tag': 'exomeseq/v1/b37xGen',
'name': 'My Job',
'fund_code': '001',
'job_order': {'items': 'cheese', 'threads': 1},
'share_group': None,
'stage_group': None,
})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_validate_no_tag(self):
user = self.user_login.become_normal_user()
url = reverse('v2-jobtemplate_validate')
response = self.client.post(url, format='json', data={
'name': 'My Job',
'fund_code': '001',
'job_order': {'items': 'cheese', 'threads': 1},
'share_group': None,
'stage_group': None,
})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'tag': [REQUIRED_ERROR_MESSAGE]
})
def test_validate_missing_values(self):
user = self.user_login.become_normal_user()
url = reverse('v2-jobtemplate_validate')
response = self.client.post(url, format='json', data={
'tag': 'exomeseq/v1/b37xGen',
'job_order': {'threads': 1},
'share_group': None,
'stage_group': None,
})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'name': [REQUIRED_ERROR_MESSAGE],
'fund_code': [REQUIRED_ERROR_MESSAGE],
'job_order.items': [REQUIRED_ERROR_MESSAGE],
})
def test_validate_placeholder_values(self):
user = self.user_login.become_normal_user()
url = reverse('v2-jobtemplate_validate')
response = self.client.post(url, format='json', data={
'tag': 'exomeseq/v1/b37xGen',
'name': STRING_VALUE_PLACEHOLDER,
'fund_code': '001',
'job_order': {'items': STRING_VALUE_PLACEHOLDER, 'threads': INT_VALUE_PLACEHOLDER},
'share_group': None,
'stage_group': None,
})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {
'name': [PLACEHOLDER_ERROR_MESSAGE],
'job_order.items': [PLACEHOLDER_ERROR_MESSAGE],
'job_order.threads': [PLACEHOLDER_ERROR_MESSAGE],
})
def test_create_job(self):
user = self.user_login.become_normal_user()
DDSUserCredential.objects.create(endpoint=self.endpoint, user=user, token='secret1', dds_id='1')
stage_group = JobFileStageGroup.objects.create(user=user)
url = reverse('v2-jobtemplate_createjob')
response = self.client.post(url, format='json', data={
'tag': 'exomeseq/v1/b37xGen',
'name': 'My Job',
'fund_code': '001',
'stage_group': stage_group.id,
'job_order': {'threads': 12, 'items': 'pie'},
'share_group': self.share_group.id
})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['name'], 'My Job')
jobs = Job.objects.all()
self.assertEqual(len(jobs), 1)
self.assertEqual(jobs[0].name, 'My Job')
self.assertEqual(jobs[0].fund_code, '001')
self.assertEqual(json.loads(jobs[0].job_order), {'A':'B', 'threads': 12, 'items': 'pie'})
def test_create_job_with_vm_strategy(self):
user = self.user_login.become_normal_user()
DDSUserCredential.objects.create(endpoint=self.endpoint, user=user, token='secret1', dds_id='1')
stage_group = JobFileStageGroup.objects.create(user=user)
url = reverse('v2-jobtemplate_createjob')
response = self.client.post(url, format='json', data={
'tag': 'exomeseq/v1/b37xGen',
'name': 'My Job',
'fund_code': '001',
'stage_group': stage_group.id,
'job_order': {'threads': 12, 'items': 'pie'},
'share_group': self.share_group.id,
'job_strategy': self.job_strategy.id,
})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['name'], 'My Job')
jobs = Job.objects.all()
self.assertEqual(len(jobs), 1)
self.assertEqual(jobs[0].name, 'My Job')
self.assertEqual(jobs[0].fund_code, '001')
self.assertEqual(json.loads(jobs[0].job_order), {'A': 'B', 'threads': 12, 'items': 'pie'})
class ShareGroupViewSetTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
self.share_group = ShareGroup.objects.create(name="somegroup")
def test_list_fails_unauthenticated(self):
self.user_login.become_unauthorized()
url = reverse('v2-sharegroup-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-sharegroup-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['id'], self.share_group.id)
self.assertEqual(response.data[0]['name'], 'somegroup')
def test_list_with_filtering(self):
self.user_login.become_normal_user()
url = reverse('v2-sharegroup-list')
ShareGroup.objects.create(name="somegroup2")
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(set([item['name'] for item in response.data]), set(["somegroup", "somegroup2"]))
url = reverse('v2-sharegroup-list') + "?name=somegroup2"
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(set([item['name'] for item in response.data]), set(["somegroup2"]))
def test_retrieve_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-sharegroup-list') + str(self.share_group.id) + '/'
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], self.share_group.id)
self.assertEqual(response.data['name'], 'somegroup')
def test_post_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-sharegroup-list') + '1/'
response = self.client.post(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_put_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-sharegroup-list') + '1/'
response = self.client.put(url, format='json', data={})
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_delete_fails_with_normal_user(self):
self.user_login.become_normal_user()
url = reverse('v2-sharegroup-list') + '1/'
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class WorkflowVersionsTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
self.workflow = Workflow.objects.create(name='Exome Seq', tag='exomeseq')
self.workflow2 = Workflow.objects.create(name='Microbiome', tag='microbiome')
self.workflow_version1 = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v1 exomeseq',
version='1.0.1',
version_info_url='https://github.com/bespin-workflows/gatk/blob/1/CHANGELOG.md',
url='',
fields=[{"name": "threads", "type": "int"}, {"name": "items", "type": "string"}],
)
self.workflow_version2 = WorkflowVersion.objects.create(
workflow=self.workflow,
description='v2 exomeseq',
version='2.3.1',
version_info_url='https://github.com/bespin-workflows/gatk/blob/2/CHANGELOG.md',
url='',
fields=[{"name": "threads", "type": "int"}, {"name": "items", "type": "string"}],
enable_ui=False,
)
self.workflow_version3 = WorkflowVersion.objects.create(
workflow=self.workflow2,
description='v1 other',
version='1.0.0-dev',
version_info_url='https://github.com/bespin-workflows/gatk2/blob/1/CHANGELOG.md',
url='',
fields=[{"name": "threads", "type": "int"}, {"name": "items", "type": "string"}],
)
def test_list_filter_on_tag(self):
self.user_login.become_normal_user()
url = reverse('v2-workflowversion-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 3)
url = reverse('v2-workflowversion-list') + '?workflow__tag=exomeseq'
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(set([item['description'] for item in response.data]), set(['v1 exomeseq', 'v2 exomeseq']))
def test_list_filter_on_version(self):
self.user_login.become_normal_user()
url = reverse('v2-workflowversion-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 3)
url = reverse('v2-workflowversion-list') + '?version=2.3.1'
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['description'], 'v2 exomeseq')
def test_get_details_enable_ui(self):
self.user_login.become_normal_user()
url = reverse('v2-workflowversion-list') + '{}/'.format(self.workflow_version1.id)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['enable_ui'], True)
url = reverse('v2-workflowversion-list') + '{}/'.format(self.workflow_version2.id)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['enable_ui'], False)
def test_list_version_info_url(self):
user = self.user_login.become_normal_user()
url = reverse('v2-workflowversion-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 3)
version_info_urls = sorted([item['version_info_url'] for item in response.data])
self.assertEqual(version_info_urls, [
'https://github.com/bespin-workflows/gatk/blob/1/CHANGELOG.md',
'https://github.com/bespin-workflows/gatk/blob/2/CHANGELOG.md',
'https://github.com/bespin-workflows/gatk2/blob/1/CHANGELOG.md',
])
def test_sorted_by_workflow_and_version(self):
self.user_login.become_normal_user()
url = reverse('v2-workflowversion-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 3)
workflow_versions_ary = [(item['workflow'], item['version']) for item in response.data]
self.assertEqual(workflow_versions_ary, [
(self.workflow.id, '1.0.1'),
(self.workflow.id, '2.3.1'),
(self.workflow2.id, '1.0.0-dev'),
])
def test_includes_tool_details(self):
details = WorkflowVersionToolDetails.objects.create(
workflow_version=self.workflow_version1,
details=[{'k':'v'}]
)
self.user_login.become_admin_user()
url = reverse('v2-workflowversion-list') + '{}/'.format(self.workflow_version1.id)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['tool_details'], details.pk)
class WorkflowVersionWorkflowStateTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
self.user_login.become_normal_user()
self.active_wf = Workflow.objects.create(name='active', tag='active-tag',
state=Workflow.WORKFLOW_STATE_ACTIVE)
self.active_version = WorkflowVersion.objects.create(workflow=self.active_wf,
version="active-version", url='', fields=[])
self.deprecated_wf = Workflow.objects.create(name='deprecated', tag='deprecated-tag',
state=Workflow.WORKFLOW_STATE_DEPRECATED)
self.deprecated_version = WorkflowVersion.objects.create(workflow=self.deprecated_wf,
version="deprecated-version", url='', fields=[])
def test_excludes_deprecated(self):
self.assertEqual(WorkflowVersion.objects.count(), 2)
url = reverse('v2-workflowversion-list')
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['version'], 'active-version')
def test_includes_deprecated_when_filtering(self):
self.assertEqual(WorkflowVersion.objects.count(), 2)
url = reverse('v2-workflowversion-list') + "?workflow__state=D"
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['version'], 'deprecated-version')
self.assertEqual(response.data[0]['id'], self.deprecated_version.id)
def test_filters_on_workflow_state(self):
self.assertEqual(WorkflowVersion.objects.count(), 2)
url = reverse('v2-workflowversion-list') + "?workflow__state=A"
response = self.client.get(url, format='json')
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['version'], 'active-version')
self.assertEqual(response.data[0]['id'], self.active_version.id)
def test_can_get_deprecated_by_id(self):
detail_url = reverse('v2-workflowversion-detail', args=[self.deprecated_version.id])
detail_response = self.client.get(detail_url, format='json')
self.assertEqual(detail_response.data['version'], 'deprecated-version')
class JobsTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
workflow = Workflow.objects.create(name='RnaSeq')
cwl_url = "https://raw.githubusercontent.com/johnbradley/iMADS-worker/master/predict_service/predict-workflow-packed.cwl"
self.workflow_version = WorkflowVersion.objects.create(workflow=workflow,
version="v1",
url=cwl_url,
fields=[])
self.share_group = ShareGroup.objects.create(name='Results Checkers')
self.job_flavor = JobFlavor.objects.create(name='flavor1', cpus=32, memory='12Gi')
self.vm_job_settings = create_vm_job_settings(name='vm')
job_runtime_k8s = JobRuntimeK8s.objects.create()
job_runtime_k8s.steps = [
JobRuntimeStepK8s.objects.create(
step_type=JobRuntimeStepK8s.STAGE_DATA_STEP,
flavor=self.job_flavor,
image_name='myimage',
base_command=['download.py']
)
]
lando_connection = LandoConnection.objects.create(
cluster_type=LandoConnection.K8S_TYPE,
host='somehost', username='jpb67',
password='secret', queue_name='lando')
self.k8s_job_settings = JobSettings.objects.create(
name='k8s',
lando_connection=lando_connection,
job_runtime_k8s=job_runtime_k8s)
def test_jobs_list_shows_job_settings(self):
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-job-list') + '{}/'.format(job.id)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['job_settings'], self.vm_job_settings.id)
self.assertNotIn('vm_settings', response.data)
def test_admin_jobs_list_shows_vm_job_settings(self):
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['job_settings']['name'], 'vm')
self.assertEqual(response.data['job_settings']['job_runtime_k8s'], None)
job_runtime_openstack = response.data['job_settings']['job_runtime_openstack']
self.assertEqual(job_runtime_openstack['image_name'], 'someimage')
self.assertEqual(job_runtime_openstack['cwl_base_command'], ['cwltool'])
self.assertEqual(job_runtime_openstack['cwl_post_process_command'], ['cleanup.sh'])
self.assertEqual(job_runtime_openstack['cwl_pre_process_command'], ['prep.sh'])
self.assertEqual(job_runtime_openstack['cloud_settings']['name'], 'cloud')
self.assertEqual(job_runtime_openstack['cloud_settings']['vm_project']['name'], 'project1')
def test_admin_jobs_list_shows_k8s_job_settings(self):
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.k8s_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['job_settings']['name'], 'k8s')
self.assertEqual(response.data['job_settings']['job_runtime_openstack'], None)
job_runtime_k8s_steps = response.data['job_settings']['job_runtime_k8s']['steps']
self.assertEqual(len(job_runtime_k8s_steps), 1)
self.assertEqual(job_runtime_k8s_steps[0]['step_type'], JobRuntimeStepK8s.STAGE_DATA_STEP)
self.assertEqual(job_runtime_k8s_steps[0]['image_name'], 'myimage')
self.assertEqual(job_runtime_k8s_steps[0]['base_command'],['download.py'])
self.assertEqual(job_runtime_k8s_steps[0]['flavor']['cpus'], 32)
self.assertEqual(job_runtime_k8s_steps[0]['flavor']['memory'], '12Gi')
def testAdminSeeAllData(self):
normal_user = self.user_login.become_normal_user()
job = Job.objects.create(name='my job',
workflow_version=self.workflow_version,
job_order={},
user=normal_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
# normal user can't see admin endpoint
url = reverse('v2-admin_job-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
other_user = self.user_login.become_other_normal_user()
job = Job.objects.create(name='my job2',
workflow_version=self.workflow_version,
job_order={},
user=other_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
# admin user can see both via admin endpoint
admin_user = self.user_login.become_admin_user()
url = reverse('v2-admin_job-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(2, len(response.data))
self.assertIn(other_user.id, [item['user']['id'] for item in response.data])
self.assertIn(normal_user.id, [item['user']['id'] for item in response.data])
self.assertIn('my job', [item['name'] for item in response.data])
self.assertIn('my job2', [item['name'] for item in response.data])
self.assertEqual(['RnaSeq', 'RnaSeq'], [item['workflow_version']['name'] for item in response.data])
self.assertIn(self.share_group.id, [item['share_group'] for item in response.data])
self.assertEqual([None, None], [item['user'].get('cleanup_job_vm') for item in response.data])
def testAdminCanSeeDeletedJob(self):
url = reverse('v2-admin_job-list')
normal_user = self.user_login.become_normal_user()
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='my job',
state=Job.JOB_STATE_NEW,
workflow_version=self.workflow_version,
job_order={},
user=normal_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(1, len(response.data))
# Now mark as deleted
job.state = Job.JOB_STATE_DELETED
job.save()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(1, len(response.data))
self.assertEqual(response.data[0]['state'], 'D')
def testAdminFilterJobsVmInstanceName(self):
admin_user = self.user_login.become_admin_user()
Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
vm_instance_name='vm_job_1',
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
Job.objects.create(name='somejob2',
workflow_version=self.workflow_version,
vm_instance_name='vm_job_2',
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '?vm_instance_name=vm_job_1'
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(1, len(response.data))
self.assertEqual('somejob', response.data[0]['name'])
def test_settings_effect_job_cleanup_vm(self):
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
job.cleanup_vm = True
job.save()
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(True, response.data['cleanup_vm'])
job.cleanup_vm = False
job.save()
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(False, response.data['cleanup_vm'])
def test_normal_user_trying_to_update_job(self):
"""
Only admin should change job state or job step.
Regular users can only change the state and step via the start, cancel and restart job endpoints.
"""
normal_user = self.user_login.become_normal_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=normal_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
response = self.client.put(url, format='json',
data={
'state': Job.JOB_STATE_FINISHED,
'step': Job.JOB_STEP_RUNNING,
})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@patch('bespin_api_v2.api.JobMailer')
def testAdminUserUpdatesStateAndStep(self, MockJobMailer):
"""
Admin should be able to change job state and job step.
"""
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
response = self.client.put(url, format='json',
data={
'state': Job.JOB_STATE_RUNNING,
'step': Job.JOB_STEP_CREATE_VM,
})
self.assertEqual(response.status_code, status.HTTP_200_OK)
job = Job.objects.first()
self.assertEqual(Job.JOB_STATE_RUNNING, job.state)
self.assertEqual(Job.JOB_STEP_CREATE_VM, job.step)
@patch('bespin_api_v2.api.JobMailer')
def test_mails_when_job_state_changes(self, MockJobMailer):
mock_mail_current_state = Mock()
MockJobMailer.return_value.mail_current_state = mock_mail_current_state
"""
Admin should be able to change job state and job step.
"""
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
state=Job.JOB_STATE_AUTHORIZED,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
response = self.client.put(url, format='json',
data={
'state': Job.JOB_STATE_RUNNING,
'step': Job.JOB_STEP_CREATE_VM,
})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(mock_mail_current_state.called)
@patch('bespin_api_v2.api.JobMailer')
def test_does_not_mail_when_job_state_stays(self, MockJobMailer):
mock_mail_current_state = Mock()
MockJobMailer.return_value.mail_current_state = mock_mail_current_state
"""
Admin should be able to change job state and job step.
"""
admin_user = self.user_login.become_admin_user()
job = Job.objects.create(name='somejob',
workflow_version=self.workflow_version,
job_order={},
user=admin_user,
share_group=self.share_group,
state=Job.JOB_STATE_RUNNING,
step=Job.JOB_STEP_CREATE_VM,
job_settings=self.vm_job_settings,
job_flavor=self.job_flavor,
)
url = reverse('v2-admin_job-list') + '{}/'.format(job.id)
response = self.client.put(url, format='json',
data={
'state': Job.JOB_STATE_RUNNING,
'step': Job.JOB_STEP_RUNNING,
})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(mock_mail_current_state.called)
class EmailMessageTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
def test_admin_only_allow_admin_users(self):
url = reverse('v2-admin_emailmessage-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.user_login.become_normal_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.user_login.become_admin_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_list(self):
EmailMessage.objects.create(
body='body1',
subject='subject1',
sender_email='sender1@example.com',
to_email='recipient1@university.edu',
)
EmailMessage.objects.create(
body='body2',
subject='subject2',
sender_email='sender2@example.com',
to_email='recipient2@university.edu',
)
EmailMessage.objects.create(
body='body3',
subject='subject3',
sender_email='sender3@example.com',
to_email='recipient3@university.edu',
)
url = reverse('v2-admin_emailmessage-list')
self.user_login.become_admin_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(3, len(response.data))
messages = response.data
self.assertEqual('body1', messages[0]['body'])
self.assertEqual('body2', messages[1]['body'])
self.assertEqual('body3', messages[2]['body'])
def test_admin_read_single_message(self):
message = EmailMessage.objects.create(
body='body1',
subject='subject1',
sender_email='sender1@example.com',
to_email='recipient1@university.edu',
)
url = reverse('v2-admin_emailmessage-detail', args=[message.id])
self.user_login.become_admin_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual('body1', response.data['body'])
self.assertEqual('subject1', response.data['subject'])
self.assertEqual('sender1@example.com', response.data['sender_email'])
self.assertEqual('recipient1@university.edu', response.data['to_email'])
self.assertEqual('N', response.data['state'])
def test_admin_create_message(self):
message_dict = {
'body': 'Email message body',
'subject': 'Subject',
'sender_email': 'fred@school.edu',
'to_email': 'wilma@company.com'
}
url = reverse('v2-admin_emailmessage-list')
self.user_login.become_admin_user()
response = self.client.post(url, format='json', data=message_dict)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
created = EmailMessage.objects.first()
self.assertEqual('Subject', created.subject)
self.assertEqual('Email message body', created.body)
self.assertEqual('fred@school.edu', created.sender_email)
self.assertEqual('wilma@company.com', created.to_email)
@patch('data.mailer.DjangoEmailMessage')
def test_admin_send_message(self, MockSender):
message = EmailMessage.objects.create(
body='body1',
subject='subject1',
sender_email='sender1@example.com',
to_email='recipient1@university.edu',
)
url = reverse('v2-admin_emailmessage-detail', args=[message.id]) + 'send/'
self.user_login.become_admin_user()
response = self.client.post(url, format='json', data={})
self.assertTrue(MockSender.called)
self.assertTrue(MockSender.return_value.send.called)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual('S', response.data['state'])
@patch('data.mailer.DjangoEmailMessage')
def test_admin_send_message_with_error(self, MockSender):
MockSender.return_value.send.side_effect = Exception()
message = EmailMessage.objects.create(
body='body1',
subject='subject1',
sender_email='sender1@example.com',
to_email='recipient1@university.edu',
)
url = reverse('v2-admin_emailmessage-detail', args=[message.id]) + 'send/'
self.user_login.become_admin_user()
response = self.client.post(url, format='json', data={})
self.assertTrue(MockSender.called)
self.assertTrue(MockSender.return_value.send.called)
self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
message = EmailMessage.objects.get(id=message.id)
self.assertEqual(message.state, 'E')
class EmailTemplateTestCase(APITestCase):
def setUp(self):
self.user_login = UserLogin(self.client)
def test_admin_only_allow_admin_users(self):
url = reverse('v2-admin_emailtemplate-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.user_login.become_normal_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.user_login.become_admin_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_list(self):
EmailTemplate.objects.create(
name='template1',
body_template='body_template1',
subject_template='subject_template1',
)
EmailTemplate.objects.create(
name='template2',
body_template='body_template2',
subject_template='subject_template2',
)
EmailTemplate.objects.create(
name='template3',
body_template='body_template3',
subject_template='subject_template3',
)
url = reverse('v2-admin_emailtemplate-list')
self.user_login.become_admin_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(3, len(response.data))
messages = response.data
self.assertEqual('body_template1', messages[0]['body_template'])
self.assertEqual('body_template2', messages[1]['body_template'])
self.assertEqual('body_template3', messages[2]['body_template'])
def test_admin_read_single_template(self):
template = EmailTemplate.objects.create(
name='template1',
body_template='body1',
subject_template='subject1',
)
url = reverse('v2-admin_emailtemplate-detail', args=[template.id])
self.user_login.become_admin_user()
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual('template1', response.data['name'])
self.assertEqual('body1', response.data['body_template'])
self.assertEqual('subject1', response.data['subject_template'])
def test_admin_create_template(self):
template_dict = {
'name': 'error-template',
'body_template': 'The following error occurred {{ error }}',
'subject_template': 'Error for job {{ job.name }}',
}
url = reverse('v2-admin_emailtemplate-list')
self.user_login.become_admin_user()
response = self.client.post(url, format='json', data=template_dict)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
created = EmailTemplate.objects.first()
self.assertEqual('error-template', created.name)
class AdminLandoConnectionViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-admin_landoconnection'
MODEL_CLS = LandoConnection
def setUp(self):
self.user_login = UserLogin(self.client)
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/lando-connections/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/lando-connections/3/')
def create_model_object(self):
model_object = LandoConnection.objects.create(
cluster_type=LandoConnection.K8S_TYPE,
host='somehost',
username='user1',
password='secret',
queue_name='lando'
)
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['cluster_type'], 'k8s')
def build_post_data(self):
return {
'cluster_type': LandoConnection.K8S_TYPE,
'host': 'somehost',
'username': 'user1',
'password': 'secret',
'queue_name': 'lando'
}
class AdminJobStrategyViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-admin_jobstrategy'
MODEL_CLS = JobStrategy
def setUp(self):
self.user_login = UserLogin(self.client)
self.job_flavor = JobFlavor.objects.create(name='large')
self.lando_connection = LandoConnection.objects.create(
cluster_type=LandoConnection.K8S_TYPE,
host='somehost',
username='user1',
password='secret',
queue_name='lando'
)
self.job_settings = JobSettings.objects.create(
lando_connection=self.lando_connection,
job_runtime_k8s=JobRuntimeK8s.objects.create())
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/job-strategies/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/job-strategies/3/')
def create_model_object(self):
model_object = JobStrategy.objects.create(
name='mystrategy',
job_settings=self.job_settings,
job_flavor=self.job_flavor
)
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['name'], 'mystrategy')
def build_post_data(self):
return {
'name': 'mystrategy',
'job_settings': self.job_settings.id,
'job_flavor': self.job_flavor.id
}
def test_delete_fails_with_admin_user(self):
# Admin users are allowed to delete, overriding this test so it will not fail
pass
def test_delete_succeeds_with_admin_user(self):
model_object = self.create_model_object()
self.user_login.become_admin_user()
url = self.object_url(model_object.id)
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_list_filter_by_name(self):
JobStrategy.objects.create(name='default', job_flavor=self.job_flavor, job_settings=self.job_settings)
JobStrategy.objects.create(name='better', job_flavor=self.job_flavor, job_settings=self.job_settings)
self.user_login.become_normal_user()
url = reverse('v2-jobstrategies-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(set([item['name'] for item in response.data]), set(['default', 'better']))
response = self.client.get(url + "?name=better", format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(set([item['name'] for item in response.data]), set(['better']))
class AdminJobSettingsViewSetTestCase(APITestCase, AdminCreateListRetrieveMixin):
BASE_NAME = 'v2-admin_jobsettings'
MODEL_CLS = JobSettings
def setUp(self):
self.user_login = UserLogin(self.client)
self.job_flavor = JobFlavor.objects.create(name='large')
self.lando_connection = LandoConnection.objects.create(
cluster_type=LandoConnection.K8S_TYPE,
host='somehost',
username='user1',
password='secret',
queue_name='lando'
)
self.runtime_k8s = JobRuntimeK8s.objects.create()
def test_list_url(self):
self.assertEqual(self.list_url(), '/api/v2/admin/job-settings/')
def test_object_url(self):
self.assertEqual(self.object_url(3), '/api/v2/admin/job-settings/3/')
def create_model_object(self):
model_object = JobSettings.objects.create(
name='mysettings',
lando_connection=self.lando_connection,
job_runtime_k8s=self.runtime_k8s)
return model_object
def check_single_response(self, model_object, response_data):
self.assertEqual(response_data['id'], model_object.id)
self.assertEqual(response_data['name'], 'mysettings')
def build_post_data(self):
return {
'name': 'mysettings',
'lando_connection': self.lando_connection.id,
'job_runtime_k8s': self.runtime_k8s.id
}
| 46.44093
| 132
| 0.630093
| 8,055
| 71,937
| 5.396276
| 0.055121
| 0.091449
| 0.082016
| 0.052707
| 0.825293
| 0.796351
| 0.749672
| 0.711183
| 0.671958
| 0.657717
| 0
| 0.014304
| 0.24976
| 71,937
| 1,548
| 133
| 46.47093
| 0.791088
| 0.010537
| 0
| 0.625561
| 0
| 0.001495
| 0.12481
| 0.03331
| 0
| 0
| 0
| 0
| 0.207025
| 1
| 0.098655
| false
| 0.005232
| 0.006726
| 0.007474
| 0.14275
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
73a1d2f4ffcf7af0327fbc1f6631bf883ed73f15
| 41
|
py
|
Python
|
jdaviz/configs/imviz/plugins/aper_phot_simple/__init__.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
jdaviz/configs/imviz/plugins/aper_phot_simple/__init__.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | 3
|
2021-05-06T13:03:37.000Z
|
2021-12-02T14:51:48.000Z
|
jdaviz/configs/imviz/plugins/aper_phot_simple/__init__.py
|
rosteen/jdaviz
|
e02c08d68ef71c5e40600785f46e65e5ae95e236
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-12-27T04:24:34.000Z
|
2021-12-27T04:24:34.000Z
|
from .aper_phot_simple import * # noqa
| 20.5
| 40
| 0.731707
| 6
| 41
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 41
| 1
| 41
| 41
| 0.848485
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
73ea4e79965a64bc102b770bbf73639264ee1381
| 50
|
py
|
Python
|
tests/scripts/t3/t3.py
|
ShixiangWang/loon
|
288cf3ce032fe153fb65549f7ed0d3045be6207a
|
[
"MIT"
] | 4
|
2019-10-02T18:32:06.000Z
|
2020-08-04T15:30:38.000Z
|
tests/scripts/t3/t3.py
|
jianguozhouzunyimedicaluniversity/loon
|
ac5d6ded168f0fb90eaac2adfb611bca7229728b
|
[
"MIT"
] | 22
|
2019-09-29T00:31:00.000Z
|
2021-08-03T08:33:55.000Z
|
tests/scripts/t3/t3.py
|
jianguozhouzunyimedicaluniversity/loon
|
ac5d6ded168f0fb90eaac2adfb611bca7229728b
|
[
"MIT"
] | 3
|
2020-04-26T13:06:23.000Z
|
2020-09-26T16:08:49.000Z
|
#!/usr/bin/env python
print('This is t3 script.')
| 16.666667
| 27
| 0.68
| 9
| 50
| 3.777778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.12
| 50
| 2
| 28
| 25
| 0.75
| 0.4
| 0
| 0
| 0
| 0
| 0.62069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fb415dfd0b9d219adc4392dfd58c4f93714f2a63
| 254
|
py
|
Python
|
pokelist/models.py
|
gerbot150/pokelist-backend
|
8228660447ed15c9ea5126e9ab61d1d57bd5adaf
|
[
"MIT"
] | null | null | null |
pokelist/models.py
|
gerbot150/pokelist-backend
|
8228660447ed15c9ea5126e9ab61d1d57bd5adaf
|
[
"MIT"
] | null | null | null |
pokelist/models.py
|
gerbot150/pokelist-backend
|
8228660447ed15c9ea5126e9ab61d1d57bd5adaf
|
[
"MIT"
] | null | null | null |
from django.db import models
class Pokemon(models.Model):
pokemon_id = models.IntegerField()
name = models.CharField(max_length=200)
location_area_encounters = models.CharField(max_length=200)
def __str__(self):
return self.name
| 28.222222
| 63
| 0.73622
| 33
| 254
| 5.393939
| 0.666667
| 0.168539
| 0.202247
| 0.269663
| 0.303371
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.173228
| 254
| 8
| 64
| 31.75
| 0.819048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fb6bc36cb785f1b0ed8fab973f6ab17705071c78
| 102
|
py
|
Python
|
insomniac/extra_features/session.py
|
chikko80/Insomniac
|
2d49a6d4e5a15eb63bddd9aace3cc872cf40b01a
|
[
"MIT"
] | null | null | null |
insomniac/extra_features/session.py
|
chikko80/Insomniac
|
2d49a6d4e5a15eb63bddd9aace3cc872cf40b01a
|
[
"MIT"
] | null | null | null |
insomniac/extra_features/session.py
|
chikko80/Insomniac
|
2d49a6d4e5a15eb63bddd9aace3cc872cf40b01a
|
[
"MIT"
] | null | null | null |
from insomniac import activation_controller
exec(activation_controller.get_extra_feature("session"))
| 25.5
| 56
| 0.872549
| 12
| 102
| 7.083333
| 0.833333
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 102
| 3
| 57
| 34
| 0.885417
| 0
| 0
| 0
| 0
| 0
| 0.068627
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fb775d7242c0bab3903b0b3796f0f0553f191745
| 317
|
py
|
Python
|
concat/tests/level0/test_execute.py
|
jmanuel1/concat
|
b8a982f0b07c4af4a8d30c8fab927a07a4068232
|
[
"MIT"
] | 5
|
2020-11-27T23:34:29.000Z
|
2022-03-08T16:37:19.000Z
|
concat/tests/level0/test_execute.py
|
jmanuel1/concat
|
b8a982f0b07c4af4a8d30c8fab927a07a4068232
|
[
"MIT"
] | 1
|
2020-06-03T22:43:36.000Z
|
2020-06-03T22:45:42.000Z
|
concat/tests/level0/test_execute.py
|
jmanuel1/concat
|
b8a982f0b07c4af4a8d30c8fab927a07a4068232
|
[
"MIT"
] | null | null | null |
import concat.level0.execute
import unittest
import ast
class TestExecute(unittest.TestCase):
def setUp(self) -> None:
pass
def test_execute_function(self) -> None:
module = ast.Module(body=[])
concat.level0.execute.execute('<test>', module, {})
# we passed if we get here
| 21.133333
| 59
| 0.649842
| 39
| 317
| 5.230769
| 0.589744
| 0.117647
| 0.186275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00823
| 0.233438
| 317
| 14
| 60
| 22.642857
| 0.831276
| 0.07571
| 0
| 0
| 0
| 0
| 0.020619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
837f889a8aa5371d5470ab3a972975d7e5fc79d4
| 8,318
|
py
|
Python
|
tests/test_sentinel.py
|
AznStevy/heart_rate_sentinel_server
|
e241ee705221be643a3c3773a2e5ed9c129c733f
|
[
"MIT"
] | null | null | null |
tests/test_sentinel.py
|
AznStevy/heart_rate_sentinel_server
|
e241ee705221be643a3c3773a2e5ed9c129c733f
|
[
"MIT"
] | 4
|
2018-11-13T20:44:50.000Z
|
2018-11-16T19:47:09.000Z
|
tests/test_sentinel.py
|
AznStevy/heart_rate_sentinel_server
|
e241ee705221be643a3c3773a2e5ed9c129c733f
|
[
"MIT"
] | null | null | null |
import json
import pytest
import datetime
from random import choice
from string import ascii_uppercase
from heart_rate_sentinel_server import get_app
def _new_patient_id():
return ''.join(choice(ascii_uppercase) for _ in range(10))
@pytest.fixture()
def flask_app():
app = get_app()
return app
@pytest.fixture()
def patient_1_info():
patient_1 = {
"patient_id": 1,
"attending_email": "random@duke.edu",
"user_age": 21
}
return patient_1
@pytest.fixture()
def patient_2_info():
patient_2 = {
"patient_id": 2,
"attending_email": "random@duke.edu",
"user_age": 21
}
return patient_2
@pytest.fixture()
def heart_rate_p1():
payload = {
"patient_id": 1,
"heart_rate": 80,
}
return payload
# tachychardic
@pytest.fixture()
def heart_rate_p2():
payload = {
"patient_id": 2,
"heart_rate": 120,
}
return payload
def test_post_new_patient(flask_app, patient_1_info):
client = flask_app.test_client()
resp = client.post('/api/new_patient', json=patient_1_info)
assert resp.status_code == 200
def test_post_new_patient_no_id(flask_app, patient_1_info):
client = flask_app.test_client()
patient = {
"attending_email": "random@duke.edu",
"user_age": 21
}
resp = client.post('/api/new_patient', json=patient)
assert resp.json["status_code"] == 400
def test_post_new_patient_existing_id(flask_app, patient_1_info):
p_id = _new_patient_id()
new_patient = patient_1_info
new_patient["patient_id"] = p_id
client = flask_app.test_client()
client.post('/api/new_patient', json=new_patient)
resp = client.post('/api/new_patient', json=new_patient)
assert resp.json["error_type"] == "ValueError"
def test_post_new_patient_no_email(flask_app):
client = flask_app.test_client()
patient = {
"patient_id": 1230,
"user_age": 21
}
resp = client.post('/api/new_patient', json=patient)
assert resp.json["error_type"] == "AttributeError"
def test_post_new_patient_bad_email(flask_app, patient_1_info):
p_id = _new_patient_id()
new_patient = patient_1_info
new_patient["patient_id"] = p_id
new_patient["attending_email"] = "randomduke.edu"
client = flask_app.test_client()
resp = client.post('/api/new_patient', json=new_patient)
assert resp.json["error_type"] == "ValueError"
def test_post_new_patient_no_age(flask_app):
client = flask_app.test_client()
patient = {
"patient_id": 1230,
"attending_email": "random@duke.edu",
}
resp = client.post('/api/new_patient', json=patient)
assert resp.json["error_type"] == "AttributeError"
def test_post_heart_rate(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
client.post('/api/new_patient', json=patient_1_info)
resp = client.post('/api/heart_rate', json=heart_rate_p1)
assert resp.status_code == 200
def test_post_heart_rate_no_id(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
client.post('/api/new_patient', json=patient_1_info)
payload = {
"heart_rate": 100
}
resp = client.post('/api/heart_rate', json=payload)
assert resp.json["error_type"] == "AttributeError"
def test_post_heart_rate_no_hr(flask_app, patient_1_info, heart_rate_p1):
p_id = _new_patient_id()
new_patient = patient_1_info
new_patient["patient_id"] = p_id
client = flask_app.test_client()
client.post('/api/new_patient', json=new_patient)
payload = {
"patient_id": p_id
}
resp = client.post('/api/heart_rate', json=payload)
assert resp.json["error_type"] == "AttributeError"
def test_get_interval_average(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
new_patient = patient_1_info
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
new_patient["patient_id"] = p_id
client.post('/api/new_patient', json=new_patient)
new_hr = heart_rate_p1
new_hr["patient_id"] = p_id
client.post('/api/heart_rate', json=new_hr)
payload = {
"patient_id": p_id,
"heart_rate_average_since": datetime.datetime.now(),
}
resp = client.post('/api/heart_rate/interval_average', json=payload)
assert resp.status_code == 200
def test_get_interval_average_no_id(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
new_patient = patient_1_info
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
new_patient["patient_id"] = p_id
client.post('/api/new_patient', json=new_patient)
new_hr = heart_rate_p1
new_hr["patient_id"] = p_id
client.post('/api/heart_rate', json=new_hr)
payload = {
"heart_rate_average_since": datetime.datetime.now(),
}
resp = client.post('/api/heart_rate/interval_average', json=payload)
assert resp.json["error_type"] == "AttributeError"
def test_get_interval_average_no_time(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
new_patient = patient_1_info
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
new_patient["patient_id"] = p_id
client.post('/api/new_patient', json=new_patient)
new_hr = heart_rate_p1
new_hr["patient_id"] = p_id
client.post('/api/heart_rate', json=new_hr)
payload = {
"patient_id": p_id
}
resp = client.post('/api/heart_rate/interval_average', json=payload)
assert resp.json["error_type"] == "AttributeError"
def test_get_status(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
new_patient = patient_1_info
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
new_patient["patient_id"] = p_id
client.post('/api/new_patient', json=new_patient)
new_hr = heart_rate_p1
new_hr["patient_id"] = p_id
client.post('/api/heart_rate', json=new_hr)
resp = client.get('/api/status/{}'.format(patient_1_info["patient_id"]))
assert resp.status_code == 200
def test_get_heart_rate(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
new_patient = patient_1_info
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
new_patient["patient_id"] = p_id
client.post('/api/new_patient', json=new_patient)
new_hr = heart_rate_p1
new_hr["patient_id"] = p_id
client.post('/api/heart_rate', json=new_hr)
resp = client.get("/api/heart_rate/{}".format(p_id))
assert resp.status_code == 200
def test_get_average(flask_app, patient_1_info, heart_rate_p1):
client = flask_app.test_client()
new_patient = patient_1_info
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
new_patient["patient_id"] = p_id
client.post('/api/new_patient', json=new_patient)
new_hr = heart_rate_p1
new_hr["patient_id"] = p_id
client.post('/api/heart_rate', json=new_hr)
resp = client.get("/api/heart_rate/average/{}".format(p_id))
assert resp.status_code == 200
@pytest.mark.parametrize("age, hr, expect", [
(1, 190, True),
(3, 130, False),
(3, 140, True),
(5, 110, False),
(5, 170, True),
(11, 80, False),
(15, 80, False),
(15, 120, True),
(17, 80, False),
(17, 120, True),
])
def test__is_tachychardic(age, hr, expect):
from heart_rate_sentinel_server import _is_tachychardic
assert _is_tachychardic(age, hr) == expect
@pytest.mark.parametrize("email, expect", [
("test@gmail.com", True),
("testgmail.com", False),
("test@gmailcom", False),
])
def test__is_valid_email(email, expect):
from heart_rate_sentinel_server import _is_valid_email
assert _is_valid_email(email) == expect
@pytest.mark.parametrize("age, expect", [
(18, True),
("3", False),
(2.5, False),
])
def test__is_valid_age(age, expect):
from heart_rate_sentinel_server import _is_valid_age
assert _is_valid_age(age) == expect
@pytest.mark.parametrize("heart_rate, expect", [
(123, True),
(123.4, False),
(-345, False),
("test", False),
])
def test__is_valid_heart_rate(heart_rate, expect):
from heart_rate_sentinel_server import _is_valid_heart_rate
assert _is_valid_heart_rate(heart_rate) == expect
| 29.496454
| 80
| 0.686944
| 1,210
| 8,318
| 4.344628
| 0.080165
| 0.104622
| 0.069241
| 0.041088
| 0.834887
| 0.781434
| 0.766216
| 0.749096
| 0.698687
| 0.656839
| 0
| 0.025011
| 0.182856
| 8,318
| 281
| 81
| 29.601423
| 0.748418
| 0.001443
| 0
| 0.588496
| 0
| 0
| 0.162693
| 0.020472
| 0
| 0
| 0
| 0
| 0.084071
| 1
| 0.110619
| false
| 0
| 0.044248
| 0.004425
| 0.181416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
838d986e4c58e05d15e8b8a41e63b51318c126c5
| 56
|
py
|
Python
|
Examples/Python/Example_1.py
|
TotteKarlsson/dsl
|
3807cbe5f90a3cd495979eafa8cf5485367b634c
|
[
"BSD-2-Clause"
] | null | null | null |
Examples/Python/Example_1.py
|
TotteKarlsson/dsl
|
3807cbe5f90a3cd495979eafa8cf5485367b634c
|
[
"BSD-2-Clause"
] | null | null | null |
Examples/Python/Example_1.py
|
TotteKarlsson/dsl
|
3807cbe5f90a3cd495979eafa8cf5485367b634c
|
[
"BSD-2-Clause"
] | null | null | null |
import dsl
print ("API Version: " + dsl.getVersion())
| 11.2
| 42
| 0.660714
| 7
| 56
| 5.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 56
| 4
| 43
| 14
| 0.804348
| 0
| 0
| 0
| 0
| 0
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
839c3ae37e6ff5fadd7f9f1ae17ef5af4296033d
| 87
|
py
|
Python
|
Tareas/ejercicio1.py
|
pelahumi/calentamiento
|
a7d602bde16bec4952fd6ea5b346cd0e2feb5a2c
|
[
"Apache-2.0"
] | null | null | null |
Tareas/ejercicio1.py
|
pelahumi/calentamiento
|
a7d602bde16bec4952fd6ea5b346cd0e2feb5a2c
|
[
"Apache-2.0"
] | null | null | null |
Tareas/ejercicio1.py
|
pelahumi/calentamiento
|
a7d602bde16bec4952fd6ea5b346cd0e2feb5a2c
|
[
"Apache-2.0"
] | null | null | null |
print("Roses are #ff0000 Violets are #0000ff why my code´s working I haven´t a clue")
| 29
| 85
| 0.735632
| 19
| 87
| 3.473684
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0.172414
| 87
| 2
| 86
| 43.5
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
83d86b932b831dd3c324c2839e41edba99c9a082
| 60
|
py
|
Python
|
src/metrics/__init__.py
|
kzajac97/frequency-neural-networks
|
39ddea64d0654559dfe976e974fa369480f75caf
|
[
"MIT"
] | null | null | null |
src/metrics/__init__.py
|
kzajac97/frequency-neural-networks
|
39ddea64d0654559dfe976e974fa369480f75caf
|
[
"MIT"
] | null | null | null |
src/metrics/__init__.py
|
kzajac97/frequency-neural-networks
|
39ddea64d0654559dfe976e974fa369480f75caf
|
[
"MIT"
] | null | null | null |
from .regression import regression_report, regression_score
| 30
| 59
| 0.883333
| 7
| 60
| 7.285714
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 60
| 1
| 60
| 60
| 0.927273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
83e082d0dbcd405ec2a069c41cf3e7b3448b254b
| 1,269
|
py
|
Python
|
config/PixelConfig.py
|
MOrgacki/MMORPG-Bot
|
3ac418913044f35f2f872cf1d071fbba20ef6603
|
[
"MIT"
] | null | null | null |
config/PixelConfig.py
|
MOrgacki/MMORPG-Bot
|
3ac418913044f35f2f872cf1d071fbba20ef6603
|
[
"MIT"
] | 1
|
2021-10-12T18:59:12.000Z
|
2021-10-12T18:59:12.000Z
|
config/PixelConfig.py
|
MOrgacki/MMORPG-Bot
|
3ac418913044f35f2f872cf1d071fbba20ef6603
|
[
"MIT"
] | null | null | null |
from config.Config import Config
class PixelConfig(Config):
"""Class contains bunch of getters to specific PIXEL location"""
@property
def get_attack(self):
return self.get_property('attack')
@property
def get_chase(self):
return self.get_property('chase')
@property
def get_battle_list(self):
return self.get_property('battle_list')
@property
def get_monster_red(self):
return self.get_property('monster_red')
@property
def get_between(self):
return self.get_property('between')
@property
def get_high_hp(self):
return self.get_property('high_hp')
@property
def get_low_hp(self):
return self.get_property('low_hp')
@property
def get_mana_bar(self):
return self.get_property('mana_bar')
@property
def get_loot_boundary(self):
return self.get_property('loot_boundary')
@property
def get_loot_color(self):
return self.get_property('loot_color')
@property
def get_battle_list_two_monsters(self):
return self.get_property('battle_list_two_monsters')
@property
def get_battle_list_three_monsters(self):
return self.get_property('battle_list_three_monsters')
| 22.660714
| 68
| 0.677699
| 163
| 1,269
| 4.969325
| 0.220859
| 0.162963
| 0.207407
| 0.251852
| 0.530864
| 0.287654
| 0.149383
| 0.106173
| 0
| 0
| 0
| 0
| 0.229314
| 1,269
| 55
| 69
| 23.072727
| 0.828221
| 0.045705
| 0
| 0.315789
| 0
| 0
| 0.111388
| 0.041563
| 0
| 0
| 0
| 0
| 0
| 1
| 0.315789
| false
| 0
| 0.026316
| 0.315789
| 0.684211
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
83f3c2f7489c065c41d2862a8ac36c396984f97b
| 10,782
|
py
|
Python
|
test/test_lsh_cassandra.py
|
researcher2/datasketch
|
c56c633c2b9eeef3a0706ee83bdc9d2b38d0eb47
|
[
"MIT"
] | null | null | null |
test/test_lsh_cassandra.py
|
researcher2/datasketch
|
c56c633c2b9eeef3a0706ee83bdc9d2b38d0eb47
|
[
"MIT"
] | null | null | null |
test/test_lsh_cassandra.py
|
researcher2/datasketch
|
c56c633c2b9eeef3a0706ee83bdc9d2b38d0eb47
|
[
"MIT"
] | null | null | null |
import unittest
import os
import numpy as np
import pickle
from multiprocessing import Pool, TimeoutError
import time
from datasketch.lsh import MinHashLSH
from datasketch.minhash import MinHash
from datasketch.weighted_minhash import WeightedMinHashGenerator
STORAGE_CONFIG_CASSANDRA = {
'basename': b'test',
'type': 'cassandra',
'cassandra': {
'seeds': ['127.0.0.1'],
'keyspace': 'lsh_test',
'replication': {
'class': 'SimpleStrategy',
'replication_factor': '1'
},
'drop_keyspace': True,
'drop_tables': True,
}
}
DO_TEST_CASSANDRA = os.environ.get("DO_TEST_CASSANDRA") == "true"
def _multiprocess_test(lsh_pickled):
lsh = pickle.loads(lsh_pickled)
return True
class TestMinHashLSHCassandra(unittest.TestCase):
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__init")
def test_cassandra__init(self):
lsh = MinHashLSH(threshold=0.8, storage_config=STORAGE_CONFIG_CASSANDRA)
self.assertTrue(lsh.is_empty())
b1, r1 = lsh.b, lsh.r
lsh = MinHashLSH(threshold=0.8, weights=(0.2,0.8))
b2, r2 = lsh.b, lsh.r
self.assertTrue(b1 < b2)
self.assertTrue(r1 > r2)
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__H")
def test_cassandra__H(self):
'''
Check _H output consistent bytes length given
the same concatenated hash value size
'''
for l in range(2, 128+1, 16):
lsh = MinHashLSH(num_perm=128, storage_config=STORAGE_CONFIG_CASSANDRA)
m = MinHash()
m.update("abcdefg".encode("utf8"))
m.update("1234567".encode("utf8"))
lsh.insert("m", m)
sizes = [len(H) for ht in lsh.hashtables for H in ht]
self.assertTrue(all(sizes[0] == s for s in sizes))
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__insert")
def test_cassandra__insert(self):
lsh = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
m1 = MinHash(16)
m1.update("a".encode("utf8"))
m2 = MinHash(16)
m2.update("b".encode("utf8"))
lsh.insert("a", m1)
lsh.insert("b", m2)
for t in lsh.hashtables:
self.assertTrue(len(t) >= 1)
items = []
for H in t:
items.extend(t[H])
self.assertTrue("a" in items)
self.assertTrue("b" in items)
self.assertTrue("a" in lsh)
self.assertTrue("b" in lsh)
for i, H in enumerate(lsh.keys["a"]):
self.assertTrue("a" in lsh.hashtables[i][H])
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__query")
def test_cassandra__query(self):
lsh = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
m1 = MinHash(16)
m1.update("a".encode("utf8"))
m2 = MinHash(16)
m2.update("b".encode("utf8"))
lsh.insert("a", m1)
lsh.insert("b", m2)
result = lsh.query(m1)
self.assertTrue("a" in result)
result = lsh.query(m2)
self.assertTrue("b" in result)
m3 = MinHash(18)
self.assertRaises(ValueError, lsh.query, m3)
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__remove")
def test_cassandra__remove(self):
lsh = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
m1 = MinHash(16)
m1.update("a".encode("utf8"))
m2 = MinHash(16)
m2.update("b".encode("utf8"))
lsh.insert("a", m1)
lsh.insert("b", m2)
lsh.remove("a")
self.assertTrue("a" not in lsh.keys)
for table in lsh.hashtables:
for H in table:
self.assertGreater(len(table[H]), 0)
self.assertTrue("a" not in table[H])
self.assertRaises(ValueError, lsh.remove, "c")
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__get_subset_counts")
def test_cassandra__get_subset_counts(self):
m1 = MinHash(16)
m1.update("a".encode("utf8"))
m2 = MinHash(16)
m2.update("b".encode("utf8"))
lsh_c = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
lsh_c.insert("a", m1)
lsh_c.insert("b", m2)
lsh_m = MinHashLSH(threshold=0.5, num_perm=16)
lsh_m.insert("a", m1)
lsh_m.insert("b", m2)
self.assertEquals(lsh_c.get_subset_counts("a"), lsh_m.get_subset_counts("a"))
self.assertEquals(lsh_c.get_subset_counts("b"), lsh_m.get_subset_counts("b"))
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__insertion_session")
def test_cassandra__insertion_session(self):
lsh = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
m1 = MinHash(16)
m1.update("a".encode("utf8"))
m2 = MinHash(16)
m2.update("b".encode("utf8"))
data = [("a", m1), ("b", m2)]
with lsh.insertion_session() as session:
for key, minhash in data:
session.insert(key, minhash)
for t in lsh.hashtables:
self.assertTrue(len(t) >= 1)
items = []
for H in t:
items.extend(t[H])
self.assertTrue("a" in items)
self.assertTrue("b" in items)
self.assertTrue("a" in lsh)
self.assertTrue("b" in lsh)
for i, H in enumerate(lsh.keys["a"]):
self.assertTrue("a" in lsh.hashtables[i][H])
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__get_counts")
def test_cassandra__get_counts(self):
lsh = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
m1 = MinHash(16)
m1.update("a".encode("utf8"))
m2 = MinHash(16)
m2.update("b".encode("utf8"))
lsh.insert("a", m1)
lsh.insert("b", m2)
counts = lsh.get_counts()
self.assertEqual(len(counts), lsh.b)
for table in counts:
self.assertEqual(sum(table.values()), 2)
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__get_counts")
def test_cassandra__multiprocess(self):
lsh = MinHashLSH(threshold=0.5, num_perm=16, storage_config=STORAGE_CONFIG_CASSANDRA)
lsh_pickled = pickle.dumps(lsh)
pool = Pool(processes=4)
async_results = [pool.apply_async(_multiprocess_test, (lsh_pickled,))
for i in range(4)]
completion_status = [False for _ in async_results]
countdown = 4
wait_time = 30
start_time = time.time()
while countdown > 0:
elapsed_time = time.time() - start_time
self.assertTrue(elapsed_time < wait_time)
for i, async_result in enumerate(async_results):
if completion_status[i]:
continue
if async_result.ready():
completion_status[i] = True
countdown -= 1
class TestWeightedMinHashLSHCassandra(unittest.TestCase):
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__init")
def test_cassandra__init(self):
lsh = MinHashLSH(threshold=0.8, storage_config=STORAGE_CONFIG_CASSANDRA)
self.assertTrue(lsh.is_empty())
b1, r1 = lsh.b, lsh.r
lsh = MinHashLSH(threshold=0.8, weights=(0.2,0.8), storage_config=STORAGE_CONFIG_CASSANDRA)
b2, r2 = lsh.b, lsh.r
self.assertTrue(b1 < b2)
self.assertTrue(r1 > r2)
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__H")
def test_cassandra__H(self):
'''
Check _H output consistent bytes length given
the same concatenated hash value size
'''
mg = WeightedMinHashGenerator(100, sample_size=128)
for l in range(2, mg.sample_size+1, 16):
m = mg.minhash(np.random.randint(1, 99999999, 100))
lsh = MinHashLSH(num_perm=128, storage_config=STORAGE_CONFIG_CASSANDRA)
lsh.insert("m", m)
sizes = [len(H) for ht in lsh.hashtables for H in ht]
self.assertTrue(all(sizes[0] == s for s in sizes))
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__insert")
def test_cassandra__insert(self):
lsh = MinHashLSH(threshold=0.5, num_perm=4, storage_config=STORAGE_CONFIG_CASSANDRA)
mg = WeightedMinHashGenerator(10, 4)
m1 = mg.minhash(np.random.uniform(1, 10, 10))
m2 = mg.minhash(np.random.uniform(1, 10, 10))
lsh.insert("a", m1)
lsh.insert("b", m2)
for t in lsh.hashtables:
self.assertTrue(len(t) >= 1)
items = []
for H in t:
items.extend(t[H])
self.assertTrue("a" in items)
self.assertTrue("b" in items)
self.assertTrue("a" in lsh)
self.assertTrue("b" in lsh)
for i, H in enumerate(lsh.keys["a"]):
self.assertTrue("a" in lsh.hashtables[i][H])
mg = WeightedMinHashGenerator(10, 5)
m3 = mg.minhash(np.random.uniform(1, 10, 10))
self.assertRaises(ValueError, lsh.insert, "c", m3)
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__query")
def test_cassandra__query(self):
lsh = MinHashLSH(threshold=0.5, num_perm=4, storage_config=STORAGE_CONFIG_CASSANDRA)
mg = WeightedMinHashGenerator(10, 4)
m1 = mg.minhash(np.random.uniform(1, 10, 10))
m2 = mg.minhash(np.random.uniform(1, 10, 10))
lsh.insert("a", m1)
lsh.insert("b", m2)
result = lsh.query(m1)
self.assertTrue("a" in result)
result = lsh.query(m2)
self.assertTrue("b" in result)
mg = WeightedMinHashGenerator(10, 5)
m3 = mg.minhash(np.random.uniform(1, 10, 10))
self.assertRaises(ValueError, lsh.query, m3)
@unittest.skipIf(not DO_TEST_CASSANDRA, "Skipping test_cassandra__remove")
def test_cassandra__remove(self):
lsh = MinHashLSH(threshold=0.5, num_perm=4, storage_config=STORAGE_CONFIG_CASSANDRA)
mg = WeightedMinHashGenerator(10, 4)
m1 = mg.minhash(np.random.uniform(1, 10, 10))
m2 = mg.minhash(np.random.uniform(1, 10, 10))
lsh.insert("a", m1)
lsh.insert("b", m2)
lsh.remove("a")
self.assertTrue("a" not in lsh.keys)
for table in lsh.hashtables:
for H in table:
self.assertGreater(len(table[H]), 0)
self.assertTrue("a" not in table[H])
self.assertRaises(ValueError, lsh.remove, "c")
if __name__ == "__main__":
unittest.main()
| 38.370107
| 99
| 0.613615
| 1,402
| 10,782
| 4.542083
| 0.112696
| 0.089824
| 0.055276
| 0.061244
| 0.775754
| 0.758794
| 0.758794
| 0.737594
| 0.737594
| 0.729271
| 0
| 0.036702
| 0.262104
| 10,782
| 280
| 100
| 38.507143
| 0.7637
| 0.015489
| 0
| 0.6625
| 0
| 0
| 0.071361
| 0.023503
| 0
| 0
| 0
| 0
| 0.191667
| 1
| 0.0625
| false
| 0
| 0.0375
| 0
| 0.1125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f766e0e63f77f6d33944033364b887b207981671
| 64
|
py
|
Python
|
outlookdisablespamfilter/__init__.py
|
jan-janssen/outlook-disable-spam-filter
|
264f5fb650408abbfa5736f33ae098aefc0e7fac
|
[
"BSD-3-Clause"
] | null | null | null |
outlookdisablespamfilter/__init__.py
|
jan-janssen/outlook-disable-spam-filter
|
264f5fb650408abbfa5736f33ae098aefc0e7fac
|
[
"BSD-3-Clause"
] | null | null | null |
outlookdisablespamfilter/__init__.py
|
jan-janssen/outlook-disable-spam-filter
|
264f5fb650408abbfa5736f33ae098aefc0e7fac
|
[
"BSD-3-Clause"
] | null | null | null |
from outlookdisablespamfilter.shared import transfer_spam_emails
| 64
| 64
| 0.9375
| 7
| 64
| 8.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 64
| 1
| 64
| 64
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f76cf5fdc8b65714f0db11d40e2a60b62cc19363
| 6,798
|
py
|
Python
|
tests/test_template_fetcher.py
|
oazmon/sceptre-template-fetcher
|
ff40fea4dcdb7b785b90b70426758475a8d09634
|
[
"Apache-2.0"
] | null | null | null |
tests/test_template_fetcher.py
|
oazmon/sceptre-template-fetcher
|
ff40fea4dcdb7b785b90b70426758475a8d09634
|
[
"Apache-2.0"
] | null | null | null |
tests/test_template_fetcher.py
|
oazmon/sceptre-template-fetcher
|
ff40fea4dcdb7b785b90b70426758475a8d09634
|
[
"Apache-2.0"
] | null | null | null |
from mock import patch
import pytest
import yaml
# When switch to new version change EnvironmentPathNotFoundError to:
# from sceptre.exceptions import InvalidSceptreDirectoryError
from sceptre.exceptions import EnvironmentPathNotFoundError
from sceptre_template_fetcher.template_fetcher import TemplateFetcher
class TestTemplateFetcher___init__(object):
@patch("sceptre_template_fetcher.template_fetcher.FetcherMap")
@patch("sceptre_template_fetcher.template_fetcher.path.isdir")
def test_correctly_initialised_with_defaults(
self, mock_isdir, mock_fetcher_map
):
mock_isdir.return_value = True
self.template_fetcher = TemplateFetcher(
sceptre_dir='fake-sceptre-dir'
)
mock_fetcher_map.assert_called_once_with(
sceptre_dir='fake-sceptre-dir',
shared_template_dir='fake-sceptre-dir/shared-templates'
)
assert self.template_fetcher.sceptre_dir == "fake-sceptre-dir"
assert self.template_fetcher.shared_template_dir == \
"fake-sceptre-dir/shared-templates"
@patch("sceptre_template_fetcher.template_fetcher.FetcherMap")
@patch("sceptre_template_fetcher.template_fetcher.path.isdir")
def test_correctly_initialised_with_values(
self, mock_isdir, mock_fetcher_map
):
mock_isdir.return_value = True
self.template_fetcher = TemplateFetcher(
sceptre_dir='fake-sceptre-dir',
shared_template_dir='fake-shared-dir'
)
mock_fetcher_map.assert_called_once_with(
sceptre_dir='fake-sceptre-dir',
shared_template_dir='fake-sceptre-dir/fake-shared-dir'
)
assert self.template_fetcher.sceptre_dir == "fake-sceptre-dir"
assert self.template_fetcher.shared_template_dir == \
"fake-sceptre-dir/fake-shared-dir"
@patch("sceptre_template_fetcher.template_fetcher.FetcherMap")
@patch("sceptre_template_fetcher.template_fetcher.path.isdir")
def test__invalid_sceptre_dir(
self, mock_isdir, mock_fetcher_map
):
mock_isdir.return_value = False
with pytest.raises(EnvironmentPathNotFoundError):
self.template_fetcher = TemplateFetcher(
sceptre_dir='fake-sceptre-dir',
shared_template_dir='fake-sceptre-dir/shared-templates'
)
mock_fetcher_map.assert_not_called()
@patch("sceptre_template_fetcher.template_fetcher.FetcherMap")
@patch("sceptre_template_fetcher.template_fetcher.os.makedirs")
@patch("sceptre_template_fetcher.template_fetcher.path.isdir")
def test__missing_shared_template_dir(
self, mock_isdir, mock_makedirs, mock_fetcher_map
):
def mock_isdir_side_effect(path):
return True if path == 'fake-sceptre-dir' else False
mock_isdir.side_effect = mock_isdir_side_effect
self.template_fetcher = TemplateFetcher(
sceptre_dir='fake-sceptre-dir',
shared_template_dir='fake-shared-dir'
)
mock_makedirs.assert_called_once_with(
'fake-sceptre-dir/fake-shared-dir',
0o750
)
mock_fetcher_map.assert_called_once_with(
sceptre_dir='fake-sceptre-dir',
shared_template_dir='fake-sceptre-dir/fake-shared-dir'
)
class TestTemplateFetcher_fetch(object):
@patch("sceptre_template_fetcher.template_fetcher.FetcherMap")
@patch("sceptre_template_fetcher.template_fetcher.path.isdir")
def setup_method(self, test_method, mock_isdir, mock_fetcher_map):
mock_isdir.return_value = True
self.mock_fetcher_map = mock_fetcher_map
self.template_fetcher = TemplateFetcher(
sceptre_dir='fake-sceptre-dir'
)
@patch("sceptre_template_fetcher.template_fetcher.open")
def test_default_argument(self, mock_open):
mock_open.return_value.__enter__.return_value.\
read.return_value = yaml.dump({})
self.template_fetcher.fetch(None)
mock_open.assert_called_once_with(
'fake-sceptre-dir/config/import.yaml',
'r'
)
self.mock_fetcher_map.fetch.assert_not_called()
def test_missing_import_file(self):
with pytest.raises(IOError):
self.template_fetcher.fetch('No Such File')
@patch("sceptre_template_fetcher.template_fetcher.open")
def test_bad_yaml_file(self, mock_open):
mock_open.return_value.__enter__.return_value.\
read.return_value = "%bad yaml':"
with pytest.raises(yaml.parser.ParserError):
self.template_fetcher.fetch('fake-import.yaml')
mock_open.assert_called_once_with(
'fake-sceptre-dir/config/fake-import.yaml',
'r'
)
@patch("sceptre_template_fetcher.template_fetcher.open")
def test_missing_import_list(self, mock_open):
mock_open.return_value.__enter__.return_value.\
read.return_value = yaml.dump({})
self.template_fetcher.fetch('fake-import.yaml')
self.mock_fetcher_map.fetch.assert_not_called()
mock_open.assert_called_once_with(
'fake-sceptre-dir/config/fake-import.yaml',
'r'
)
@patch("sceptre_template_fetcher.template_fetcher.open")
def test_bad_import_stanza(self, mock_open):
mock_open.return_value.__enter__.return_value.\
read.return_value = "'imports': 'bad-stanza'"
with pytest.raises(TypeError):
self.template_fetcher.fetch('fake-import.yaml')
mock_open.assert_called_once_with(
'fake-sceptre-dir/config/fake-import.yaml',
'r'
)
@patch("sceptre_template_fetcher.template_fetcher.open")
def test_empty_import_list(self, mock_open):
mock_open.return_value.__enter__.return_value.\
read.return_value = yaml.dump({'imports': []})
self.template_fetcher.fetch('fake-import.yaml')
self.mock_fetcher_map.fetch.assert_not_called()
mock_open.assert_called_once_with(
'fake-sceptre-dir/config/fake-import.yaml',
'r'
)
@patch("sceptre_template_fetcher.template_fetcher.open")
def test_one_directive_import_list(self, mock_open):
directive = {
'provider': 'git',
'url': "https://git/repo/path/to/dir/template.yaml"
}
mock_open.return_value.__enter__.return_value.\
read.return_value = yaml.dump({'imports': [directive]})
self.template_fetcher.fetch('fake-import.yaml')
mock_open.assert_called_once_with(
'fake-sceptre-dir/config/fake-import.yaml',
'r'
)
self.mock_fetcher_map.return_value.\
fetch.assert_called_once_with(directive)
| 39.754386
| 71
| 0.684319
| 792
| 6,798
| 5.498737
| 0.111111
| 0.179104
| 0.077153
| 0.123995
| 0.779104
| 0.760276
| 0.75729
| 0.742824
| 0.730654
| 0.702181
| 0
| 0.000753
| 0.218447
| 6,798
| 170
| 72
| 39.988235
| 0.818935
| 0.018535
| 0
| 0.57047
| 0
| 0
| 0.257798
| 0.196611
| 0
| 0
| 0
| 0
| 0.127517
| 1
| 0.087248
| false
| 0
| 0.161074
| 0.006711
| 0.268456
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f76da8cf5400cbdee6d5577c738c25bec19fa7d6
| 45
|
py
|
Python
|
tests/__init__.py
|
saeedesmaili/twitter-blocker
|
0a58bcd0a483c54ef1a274804bb80fa132839834
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
saeedesmaili/twitter-blocker
|
0a58bcd0a483c54ef1a274804bb80fa132839834
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
saeedesmaili/twitter-blocker
|
0a58bcd0a483c54ef1a274804bb80fa132839834
|
[
"MIT"
] | null | null | null |
"""Unit test package for twitter_blocker."""
| 22.5
| 44
| 0.733333
| 6
| 45
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 1
| 45
| 45
| 0.8
| 0.844444
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e3f303c59954e7bec5cf6fa62b4f49925de56d80
| 141
|
py
|
Python
|
Ekeopara_Praise/Phase 2/LIST/Day44 Tasks/Task4.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
Ekeopara_Praise/Phase 2/LIST/Day44 Tasks/Task4.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
Ekeopara_Praise/Phase 2/LIST/Day44 Tasks/Task4.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
'''4. Write a Python program to concatenate elements of a list. '''
num = ['1', '2', '3', '4', '5']
print('-'.join(num))
print(''.join(num))
| 28.2
| 67
| 0.567376
| 23
| 141
| 3.478261
| 0.73913
| 0.225
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.148936
| 141
| 5
| 68
| 28.2
| 0.616667
| 0.425532
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
541d7b4e725c73aa7223417d3b326c524923b4c4
| 39
|
py
|
Python
|
talking_assist/__init__.py
|
shubhamjainjnsb/talking_assist
|
42acb4d5d41c4edee15fcb8fe81c365ab65ed76f
|
[
"MIT"
] | null | null | null |
talking_assist/__init__.py
|
shubhamjainjnsb/talking_assist
|
42acb4d5d41c4edee15fcb8fe81c365ab65ed76f
|
[
"MIT"
] | null | null | null |
talking_assist/__init__.py
|
shubhamjainjnsb/talking_assist
|
42acb4d5d41c4edee15fcb8fe81c365ab65ed76f
|
[
"MIT"
] | null | null | null |
#The file to initialise the other files
| 39
| 39
| 0.820513
| 7
| 39
| 4.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 39
| 1
| 39
| 39
| 0.969697
| 0.974359
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
580ad5a07c09c1b65a33d8cb9e7cf5166c76c6ab
| 55
|
py
|
Python
|
code/utils/__init__.py
|
yatharthahuja/unsupervised-homography-estimation
|
6b5c7b1ad56418783351874a75521e94c2c84ce0
|
[
"MIT"
] | null | null | null |
code/utils/__init__.py
|
yatharthahuja/unsupervised-homography-estimation
|
6b5c7b1ad56418783351874a75521e94c2c84ce0
|
[
"MIT"
] | null | null | null |
code/utils/__init__.py
|
yatharthahuja/unsupervised-homography-estimation
|
6b5c7b1ad56418783351874a75521e94c2c84ce0
|
[
"MIT"
] | null | null | null |
from .utils import *
from .RANSAC_homography import *
| 18.333333
| 32
| 0.763636
| 7
| 55
| 5.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 55
| 2
| 33
| 27.5
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
583bd45e040c331c7cb24ec42ba85048e87e6404
| 223
|
py
|
Python
|
tests/builtins/test_callable.py
|
akubera/batavia
|
b56f2a3f54242f81aea3a5a048356a03f0d49494
|
[
"BSD-3-Clause"
] | 1,256
|
2015-08-09T07:44:02.000Z
|
2019-03-27T05:41:24.000Z
|
tests/builtins/test_callable.py
|
akubera/batavia
|
b56f2a3f54242f81aea3a5a048356a03f0d49494
|
[
"BSD-3-Clause"
] | 406
|
2015-08-12T03:40:29.000Z
|
2019-02-25T19:26:02.000Z
|
tests/builtins/test_callable.py
|
akubera/batavia
|
b56f2a3f54242f81aea3a5a048356a03f0d49494
|
[
"BSD-3-Clause"
] | 589
|
2015-08-10T03:27:31.000Z
|
2019-03-10T20:58:07.000Z
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class CallableTests(TranspileTestCase):
pass
class BuiltinCallableFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
function = "callable"
| 22.3
| 79
| 0.820628
| 15
| 223
| 12.2
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121076
| 223
| 9
| 80
| 24.777778
| 0.933673
| 0
| 0
| 0
| 0
| 0
| 0.035874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
58779bc0f374bf2023edd7f40daaf9147c8b4b2e
| 2,401
|
py
|
Python
|
readConfig.py
|
DongDong-123/zgg_SmartSystem
|
9bf30a53b31972e104264af8bc248bae378f78df
|
[
"Apache-2.0"
] | null | null | null |
readConfig.py
|
DongDong-123/zgg_SmartSystem
|
9bf30a53b31972e104264af8bc248bae378f78df
|
[
"Apache-2.0"
] | null | null | null |
readConfig.py
|
DongDong-123/zgg_SmartSystem
|
9bf30a53b31972e104264af8bc248bae378f78df
|
[
"Apache-2.0"
] | null | null | null |
import configparser
import os
curPath = os.path.dirname(os.path.realpath(__file__))
cfgPath = os.path.join(curPath, "config.ini")
class ReadConfig:
def __init__(self):
self.cfg = configparser.ConfigParser()
self.cfg.read(cfgPath, encoding='utf-8')
def get_user(self):
return self.cfg.get("account", "USER")
def get_password(self):
return self.cfg.get("account", "PASSWORD")
def save_report(self):
return self.cfg.get("path", "REPORT")
def save_screen(self):
return self.cfg.get("path", "SCREEN_SHOOT")
def get_root_url(self):
return self.cfg.get("URL", "URL")
def get_user_url(self):
return self.cfg.get("URL", "USER_URL")
def redis_host(self):
return self.cfg.get("REDIS", "HOST")
def redis_password(self):
return self.cfg.get("REDIS", "PASSWORD")
def get_trademake_type(self):
return self.cfg.get("TRADEMARK", "trademark_international")
def get_clue_type(self):
clue_1_1 = self.cfg.get("CLUE", "clue_1_1")
clue_1_2 = self.cfg.get("CLUE", "clue_1_2")
clue_1_3 = self.cfg.get("CLUE", "clue_1_3")
clue_1_4 = self.cfg.get("CLUE", "clue_1_4")
clue_2 = self.cfg.get("CLUE", "clue_2")
clue_5 = self.cfg.get("CLUE", "clue_5")
clue_5_1 = self.cfg.get("CLUE", "clue_5_1")
all_clue_type = eval(clue_1_1)+eval(clue_1_2)+eval(clue_1_3)+eval(clue_1_4)+eval(clue_2)+eval(clue_5)+eval(clue_5_1)
return all_clue_type
def get_trademark_url(self):
return self.cfg.get("URL", "TRADEMAEK")
def login_page(self):
return self.cfg.get("URL", "LOGIN_PAGE")
def get_order_list(self):
return self.cfg.get("URL", "ORDER_LIST")
class SendEmailConfig:
def __init__(self):
self.cfg = configparser.ConfigParser()
self.cfg.read(cfgPath, encoding='utf-8')
def get_smtpserver(self):
return self.cfg.get("EMAIL", "SMTPSERVER")
def get_user(self):
return self.cfg.get("EMAIL", "USER")
def get_password(self):
return self.cfg.get("EMAIL", "PASSWORD")
def get_sender(self):
return self.cfg.get("EMAIL", "SENDER")
def get_receiver(self):
return eval(self.cfg.get("EMAIL", "RECEIVER"))
if __name__ == "__main__":
res = ReadConfig()
print(res.redis_host())
print(res.redis_password())
| 28.247059
| 124
| 0.630987
| 349
| 2,401
| 4.080229
| 0.17765
| 0.13764
| 0.168539
| 0.191011
| 0.523876
| 0.509831
| 0.255618
| 0.200843
| 0.162921
| 0.109551
| 0
| 0.020138
| 0.214077
| 2,401
| 85
| 125
| 28.247059
| 0.734499
| 0
| 0
| 0.166667
| 0
| 0
| 0.137386
| 0.009575
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.116667
| 0.033333
| 0.283333
| 0.7
| 0.033333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
5894f7eed1f1c5e893aa0317c813d0e4bcc600bf
| 208
|
py
|
Python
|
app/defaults/AB_Test/get_reward.py
|
bartfrenk/streamingbandit
|
4237a05b439c2c12912e813f0b76ccf8af382aef
|
[
"MIT"
] | 64
|
2017-05-21T06:08:57.000Z
|
2022-01-25T14:44:54.000Z
|
app/defaults/AB_Test/get_reward.py
|
bartfrenk/streamingbandit
|
4237a05b439c2c12912e813f0b76ccf8af382aef
|
[
"MIT"
] | 76
|
2017-05-04T10:30:59.000Z
|
2020-05-07T06:43:03.000Z
|
app/defaults/AB_Test/get_reward.py
|
bartfrenk/streamingbandit
|
4237a05b439c2c12912e813f0b76ccf8af382aef
|
[
"MIT"
] | 12
|
2017-05-04T13:10:23.000Z
|
2020-02-22T17:12:49.000Z
|
# Generate random rewards for each treatment
if self.action["treatment"] == "1":
self.reward["value"] = np.random.binomial(1,0.5)
else: #Treatment = 2
self.reward["value"] = np.random.binomial(1,0.3)
| 34.666667
| 52
| 0.677885
| 32
| 208
| 4.40625
| 0.59375
| 0.141844
| 0.212766
| 0.241135
| 0.468085
| 0.468085
| 0.468085
| 0.468085
| 0
| 0
| 0
| 0.044693
| 0.139423
| 208
| 5
| 53
| 41.6
| 0.743017
| 0.264423
| 0
| 0
| 1
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
58a3864a67a2dcba7dc7a6529a33b0cf60ad1e67
| 207
|
py
|
Python
|
spikesorters/__init__.py
|
manimoh/spikesorters
|
b5ea81a316f6caeacf5be587561804ef99807d76
|
[
"MIT"
] | 22
|
2019-06-27T08:49:20.000Z
|
2022-03-21T00:49:09.000Z
|
spikesorters/__init__.py
|
manimoh/spikesorters
|
b5ea81a316f6caeacf5be587561804ef99807d76
|
[
"MIT"
] | 171
|
2019-06-27T06:56:25.000Z
|
2021-07-23T13:24:59.000Z
|
spikesorters/__init__.py
|
manimoh/spikesorters
|
b5ea81a316f6caeacf5be587561804ef99807d76
|
[
"MIT"
] | 26
|
2019-06-26T17:41:39.000Z
|
2022-03-21T00:49:12.000Z
|
from .sorterlist import *
from .version import version as __version__
from .basesorter import BaseSorter
from .launcher import run_sorters, collect_sorting_outputs, iter_output_folders, iter_sorting_output
| 34.5
| 100
| 0.855072
| 27
| 207
| 6.148148
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10628
| 207
| 5
| 101
| 41.4
| 0.897297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5435b1293210bd8b9d43b774e38e8248597cf4b0
| 12,700
|
py
|
Python
|
model/baseline.py
|
tianqi-wang1996/Airsim_imitation-reinforcement-learning
|
d2b355309573d4cbe3e2e75512760d97fb261c82
|
[
"MIT"
] | 1
|
2020-09-17T14:13:15.000Z
|
2020-09-17T14:13:15.000Z
|
model/baseline.py
|
familywei/Airsim_imitation-reinforcement-learning
|
d2b355309573d4cbe3e2e75512760d97fb261c82
|
[
"MIT"
] | null | null | null |
model/baseline.py
|
familywei/Airsim_imitation-reinforcement-learning
|
d2b355309573d4cbe3e2e75512760d97fb261c82
|
[
"MIT"
] | 6
|
2020-11-27T09:56:11.000Z
|
2021-09-26T02:57:27.000Z
|
import torch
import torch.nn as nn
import numpy as np
# minimum component block for ResNet34
# flag input--'downsample' to determine whether downsampling will be done in this block
class BottleNeck(nn.Module):
def __init__(self, in_channels, out_channels, downsample=False):
super().__init__()
# super(BottleNeck, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.downsample = downsample
if downsample:
self.stride = 2
else:
self.stride = 1
self.relu = nn.ReLU()
self.conv_main = nn.Conv2d(in_channels = self.in_channels,
out_channels = self.out_channels,
kernel_size = 1,
stride = self.stride,
padding = 0,
dilation = 1,
bias = False)
self.batch_norm_main = nn.BatchNorm2d(self.out_channels)
self.conv1 = nn.Conv2d(in_channels = self.in_channels,
out_channels = self.out_channels,
kernel_size = 3,
stride = self.stride,
padding = 1,
dilation = 1,
bias = False)
self.batch_norm1 = nn.BatchNorm2d(self.out_channels)
self.conv2 = nn.Conv2d(in_channels = self.out_channels,
out_channels = self.out_channels,
kernel_size = 3,
stride = 1,
padding = 1,
dilation = 1,
bias = False)
self.batch_norm2 = nn.BatchNorm2d(self.out_channels)
def forward(self, x):
#main branch
if self.downsample:
shortcut = self.conv_main(x)
shortcut = self.batch_norm_main(shortcut)
else:
shortcut = x
#side branch
x = self.conv1(x)
x = self.batch_norm1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.batch_norm2(x)
out = x + shortcut
out = self.relu(out)
return out
class ResNet34(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU()
self.hard_tanh_1 = torch.nn.Hardtanh(-1,1)
self.hard_tanh_2 = torch.nn.Hardtanh(0,1)
self.b21 = BottleNeck(64,64)
self.b22 = BottleNeck(64,64)
self.b23 = BottleNeck(64,64)
self.b31 = BottleNeck(64,128,downsample=True)
self.b32 = BottleNeck(128,128)
self.b33 = BottleNeck(128,128)
self.b34 = BottleNeck(128,128)
self.b41 = BottleNeck(128,256,downsample=True)
self.b42 = BottleNeck(256,256)
self.b43 = BottleNeck(256,256)
self.b44 = BottleNeck(256,256)
self.b45 = BottleNeck(256,256)
self.b46 = BottleNeck(256,256)
self.b51 = BottleNeck(256,512,downsample=True)
self.b52 = BottleNeck(512,512)
self.b53 = BottleNeck(512,512)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
# self.fc1 = nn.Linear(512, 20)
# self.fc2 = nn.Linear(22, 10)
# self.fc3 = nn.Linear(10, 3)
self.fc1 = nn.Linear(513, 250)
self.fc1_bn = nn.BatchNorm1d(250)
self.fc2 = nn.Linear(250, 100)
self.fc2_bn = nn.BatchNorm1d(100)
self.fc3 = nn.Linear(100, 3)
# self.fc4 = nn.Linear(50, 1)
# self.fc4 = nn.Linear(50, 1)
def forward(self, x, x2):
# print(x.shape)
# print(x2.shape)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.b21(x)
x = self.b22(x)
x = self.b23(x)
x = self.b31(x)
x = self.b32(x)
x = self.b33(x)
x = self.b34(x)
x = self.b41(x)
x = self.b42(x)
x = self.b43(x)
x = self.b44(x)
x = self.b45(x)
x = self.b46(x)
x = self.b51(x)
x = self.b52(x)
x = self.b53(x)
x = self.avg_pool(x)
x = x.view(x.size(0), -1)
# print(x.shape, x2.shape)
x = torch.cat((x,x2), dim = 1)
x = self.fc1(x)
x = self.fc1_bn(x)
x = self.relu(x)
x = self.fc2(x)
x = self.fc2_bn(x)
x = self.relu(x)
x = self.fc3(x)
acc = self.hard_tanh_2(x[::,0]).unsqueeze(1)
brake = self.hard_tanh_2(x[::,1]).unsqueeze(1)
steering = self.hard_tanh_1(x[::,2]).unsqueeze(1)
x = torch.cat((acc,brake,steering), dim = 1)
return x
class Critic(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU()
self.b21 = BottleNeck(64,64)
self.b22 = BottleNeck(64,64)
self.b23 = BottleNeck(64,64)
self.b31 = BottleNeck(64,128,downsample=True)
self.b32 = BottleNeck(128,128)
self.b33 = BottleNeck(128,128)
self.b34 = BottleNeck(128,128)
self.b41 = BottleNeck(128,256,downsample=True)
self.b42 = BottleNeck(256,256)
self.b43 = BottleNeck(256,256)
self.b44 = BottleNeck(256,256)
self.b45 = BottleNeck(256,256)
self.b46 = BottleNeck(256,256)
self.b51 = BottleNeck(256,512,downsample=True)
self.b52 = BottleNeck(512,512)
self.b53 = BottleNeck(512,512)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
# self.fc1 = nn.Linear(512, 20)
# self.fc2 = nn.Linear(22, 10)
# self.fc3 = nn.Linear(10, 3)
self.fc1 = nn.Linear(513, 250)
self.fc1_bn = nn.BatchNorm1d(250)
self.fc2 = nn.Linear(250, 100)
self.fc2_bn = nn.BatchNorm1d(100)
self.fc3_critic = nn.Linear(103, 1)
def forward(self, x, x2, a):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.b21(x)
x = self.b22(x)
x = self.b23(x)
x = self.b31(x)
x = self.b32(x)
x = self.b33(x)
x = self.b34(x)
x = self.b41(x)
x = self.b42(x)
x = self.b43(x)
x = self.b44(x)
x = self.b45(x)
x = self.b46(x)
x = self.b51(x)
x = self.b52(x)
x = self.b53(x)
x = self.avg_pool(x)
x = x.view(x.size(0), -1)
# print(x.shape, x2.shape)
x = torch.cat((x,x2), dim = 1)
x = self.fc1(x)
x = self.fc1_bn(x)
x = self.relu(x)
x = self.fc2(x)
x = self.fc2_bn(x)
x = self.relu(x)
x = torch.cat((x,a), dim = 1)
x = self.fc3_critic(x)
x = self.relu(x)
return x
# minimum component block for ResNet50
# will do downsampling as long as one of the flags 'first_block' and 'downsample' is True
class BottleNeck_50(nn.Module):
def __init__(self, in_channels, out_channels, first_block=False, downsample=False):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.downsample = downsample
self.first_block = first_block
if downsample:
self.stride = 2
else:
self.stride = 1
self.relu = nn.ReLU()
self.conv_main = nn.Conv2d(in_channels = self.in_channels,
out_channels = self.out_channels,
kernel_size = 1,
stride = self.stride,
padding = 0,
dilation = 1,
bias = False)
self.batch_norm_main = nn.BatchNorm2d(self.out_channels)
self.conv1 = nn.Conv2d(in_channels = self.in_channels,
out_channels = self.out_channels//4,
kernel_size = 1,
stride = self.stride,
padding = 0,
dilation = 1,
bias = False)
self.batch_norm1 = nn.BatchNorm2d(self.out_channels//4)
self.conv2 = nn.Conv2d(in_channels = self.out_channels//4,
out_channels = self.out_channels//4,
kernel_size = 3,
stride = 1,
padding = 1,
dilation = 1,
bias = False)
self.batch_norm2 = nn.BatchNorm2d(self.out_channels//4)
self.conv3 = nn.Conv2d(in_channels = self.out_channels//4,
out_channels = self.out_channels,
kernel_size = 1,
stride = 1,
padding = 0,
dilation = 1,
bias = False)
self.batch_norm3 = nn.BatchNorm2d(self.out_channels)
def forward(self, x):
#main branch
if self.downsample:
shortcut = self.conv_main(x)
shortcut = self.batch_norm_main(shortcut)
elif self.first_block:
shortcut = self.conv_main(x)
shortcut = self.batch_norm_main(shortcut)
else:
shortcut = x
#side branch
x = self.conv1(x)
x = self.batch_norm1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.batch_norm2(x)
x = self.relu(x)
x = self.conv3(x)
x = self.batch_norm3(x)
out = x + shortcut
out = self.relu(out)
return out
class ResNet50(nn.Module):
def __init__(self):
super().__init__()
# remark: In the original ResNet paper, the first convolution layer takes stride=2 to downsample the input
# ,after this convolution layer there is also another maxpooling layer to do downsample as well.
# But that was the structure for ImageNet dataset which is 224x224 size. For CIFAR100 dataset, if we keep using
# this, there will be too many 1x1 dimension in the later layer's output which I did the experiment and found that
# the performance is poor. So here I refer to the official CBAM code, do not do downsampling in the first conv layer
# and delete the maxpooling layer as well.
# self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU()
self.b21 = BottleNeck_50(64,256,first_block=True)
self.b22 = BottleNeck_50(256,256)
self.b23 = BottleNeck_50(256,256)
self.b31 = BottleNeck_50(256,512,downsample=True)
self.b32 = BottleNeck_50(512,512)
self.b33 = BottleNeck_50(512,512)
self.b34 = BottleNeck_50(512,512)
self.b41 = BottleNeck_50(512,1024,downsample=True)
self.b42 = BottleNeck_50(1024,1024)
self.b43 = BottleNeck_50(1024,1024)
self.b44 = BottleNeck_50(1024,1024)
self.b45 = BottleNeck_50(1024,1024)
self.b46 = BottleNeck_50(1024,1024)
self.b51 = BottleNeck_50(1024,2048,downsample=True)
self.b52 = BottleNeck_50(2048,2048)
self.b53 = BottleNeck_50(2048,2048)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Linear(2050, 250)
self.fc2 = nn.Linear(250, 100)
self.fc3 = nn.Linear(100, 3)
def forward(self, x, x2):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.b21(x)
x = self.b22(x)
x = self.b23(x)
x = self.b31(x)
x = self.b32(x)
x = self.b33(x)
x = self.b34(x)
x = self.b41(x)
x = self.b42(x)
x = self.b43(x)
x = self.b44(x)
x = self.b45(x)
x = self.b46(x)
x = self.b51(x)
x = self.b52(x)
x = self.b53(x)
x = self.avg_pool(x)
x = x.view(x.size(0), -1)
x = torch.cat((x,x2), dim = 1)
x = self.fc1(x)
x = self.fc2(x)
x = self.fc3(x)
return x
| 32.731959
| 124
| 0.507795
| 1,628
| 12,700
| 3.853194
| 0.111179
| 0.072533
| 0.078431
| 0.043998
| 0.7934
| 0.728519
| 0.717201
| 0.7011
| 0.684043
| 0.661725
| 0
| 0.104768
| 0.380709
| 12,700
| 388
| 125
| 32.731959
| 0.692816
| 0.102047
| 0
| 0.833898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033898
| false
| 0
| 0.010169
| 0
| 0.077966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
545b50e6909764e7f35d20d488b9b531c17f658a
| 1,832
|
py
|
Python
|
src/scrapers/base_scraper.py
|
at1as/IMDB-Scrape
|
38c8c92f0f73fb8d9c86369036841ea7d9697520
|
[
"MIT"
] | 2
|
2016-02-23T18:42:23.000Z
|
2017-06-22T22:34:21.000Z
|
src/scrapers/base_scraper.py
|
at1as/IMDB-Scrape
|
38c8c92f0f73fb8d9c86369036841ea7d9697520
|
[
"MIT"
] | 1
|
2019-01-18T03:05:38.000Z
|
2019-01-18T03:05:38.000Z
|
src/scrapers/base_scraper.py
|
at1as/Media-Database
|
38c8c92f0f73fb8d9c86369036841ea7d9697520
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
class BaseScraper(object):
__metaclass__ = ABCMeta
def __init__(self):
pass
@abstractmethod
def construct_search_url(self, title):
raise NotImplementedError
@abstractmethod
def get_title(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_alternative_title(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_description(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_director(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_rating(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_genres(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_votes(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_running_time(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_content_rating(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_stars(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_languages(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_image_url(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_movie_year(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_awards(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_series_year(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_creator(self, xml_doc):
raise NotImplementedError
@abstractmethod
def get_movie_details(self, movie, mediatype, movie_url):
raise NotImplementedError
@abstractmethod
def get_series_details(self, eries, mediatype, series_url):
raise NotImplementedError
| 21.552941
| 61
| 0.75
| 197
| 1,832
| 6.700508
| 0.213198
| 0.244697
| 0.518182
| 0.559091
| 0.760606
| 0.727273
| 0.684848
| 0.684848
| 0.357576
| 0
| 0
| 0
| 0.191594
| 1,832
| 84
| 62
| 21.809524
| 0.89129
| 0
| 0
| 0.612903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.322581
| false
| 0.016129
| 0.016129
| 0
| 0.370968
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
546c8f3d389cc9633709a828627d31c31985719e
| 19
|
py
|
Python
|
vex/_version.py
|
jobevers/vex
|
b7680c40897b8cbe6aae55ec9812b4fb11738192
|
[
"MIT"
] | 1
|
2018-03-08T10:32:15.000Z
|
2018-03-08T10:32:15.000Z
|
vex/_version.py
|
jobevers/vex
|
b7680c40897b8cbe6aae55ec9812b4fb11738192
|
[
"MIT"
] | null | null | null |
vex/_version.py
|
jobevers/vex
|
b7680c40897b8cbe6aae55ec9812b4fb11738192
|
[
"MIT"
] | 1
|
2020-12-18T09:05:34.000Z
|
2020-12-18T09:05:34.000Z
|
VERSION = "0.0.18"
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
54a04eb3fb4f7da6ba692b62ecd20602c238ff44
| 56
|
py
|
Python
|
6 kyu/Count the divisible numbers.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
6 kyu/Count the divisible numbers.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
6 kyu/Count the divisible numbers.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def divisible_count(x,y,k):
return (k*(y//k+1)-x)//k
| 28
| 28
| 0.589286
| 13
| 56
| 2.461538
| 0.615385
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020408
| 0.125
| 56
| 2
| 28
| 28
| 0.632653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b73d65cd205e0320beb43bee65d3d364f80aed64
| 82
|
py
|
Python
|
EIDEGraphics/EIDEUser.py
|
Vicente-Francisco/EIDEGraphics
|
8e61bf64f4644a2e80df00946271f8cba4b5e65e
|
[
"Unlicense"
] | 2
|
2022-02-09T08:06:13.000Z
|
2022-03-18T07:30:19.000Z
|
EIDEGraphics/EIDEUser.py
|
Vicente-Francisco/EIDEGraphics
|
8e61bf64f4644a2e80df00946271f8cba4b5e65e
|
[
"Unlicense"
] | null | null | null |
EIDEGraphics/EIDEUser.py
|
Vicente-Francisco/EIDEGraphics
|
8e61bf64f4644a2e80df00946271f8cba4b5e65e
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Test script. Emulates user script.
"""
import EIDE
| 9.111111
| 34
| 0.585366
| 10
| 82
| 4.8
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.207317
| 82
| 8
| 35
| 10.25
| 0.723077
| 0.695122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3f9c490770ac6be555913e9a07f362225aad6b86
| 34,214
|
py
|
Python
|
release/stubs.min/System/__init___parts/DateTime.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 182
|
2017-06-27T02:26:15.000Z
|
2022-03-30T18:53:43.000Z
|
release/stubs.min/System/__init___parts/DateTime.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 28
|
2017-06-27T13:38:23.000Z
|
2022-03-15T11:19:44.000Z
|
release/stubs.min/System/__init___parts/DateTime.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 67
|
2017-06-28T09:43:59.000Z
|
2022-03-20T21:17:10.000Z
|
class DateTime(object,IComparable,IFormattable,IConvertible,ISerializable,IComparable[DateTime],IEquatable[DateTime]):
"""
Represents an instant in time,typically expressed as a date and time of day.
DateTime(ticks: Int64)
DateTime(ticks: Int64,kind: DateTimeKind)
DateTime(year: int,month: int,day: int)
DateTime(year: int,month: int,day: int,calendar: Calendar)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int,kind: DateTimeKind)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int,calendar: Calendar)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int,kind: DateTimeKind)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int,calendar: Calendar)
DateTime(year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int,calendar: Calendar,kind: DateTimeKind)
"""
def Add(self,value):
"""
Add(self: DateTime,value: TimeSpan) -> DateTime
Returns a new System.DateTime that adds the value of the specified System.TimeSpan to the value
of this instance.
value: A positive or negative time interval.
Returns: An object whose value is the sum of the date and time represented by this instance and the time
interval represented by value.
"""
pass
def AddDays(self,value):
"""
AddDays(self: DateTime,value: float) -> DateTime
Returns a new System.DateTime that adds the specified number of days to the value of this
instance.
value: A number of whole and fractional days. The value parameter can be negative or positive.
Returns: An object whose value is the sum of the date and time represented by this instance and the
number of days represented by value.
"""
pass
def AddHours(self,value):
"""
AddHours(self: DateTime,value: float) -> DateTime
Returns a new System.DateTime that adds the specified number of hours to the value of this
instance.
value: A number of whole and fractional hours. The value parameter can be negative or positive.
Returns: An object whose value is the sum of the date and time represented by this instance and the
number of hours represented by value.
"""
pass
def AddMilliseconds(self,value):
"""
AddMilliseconds(self: DateTime,value: float) -> DateTime
Returns a new System.DateTime that adds the specified number of milliseconds to the value of
this instance.
value: A number of whole and fractional milliseconds. The value parameter can be negative or positive.
Note that this value is rounded to the nearest integer.
Returns: An object whose value is the sum of the date and time represented by this instance and the
number of milliseconds represented by value.
"""
pass
def AddMinutes(self,value):
"""
AddMinutes(self: DateTime,value: float) -> DateTime
Returns a new System.DateTime that adds the specified number of minutes to the value of this
instance.
value: A number of whole and fractional minutes. The value parameter can be negative or positive.
Returns: An object whose value is the sum of the date and time represented by this instance and the
number of minutes represented by value.
"""
pass
def AddMonths(self,months):
"""
AddMonths(self: DateTime,months: int) -> DateTime
Returns a new System.DateTime that adds the specified number of months to the value of this
instance.
months: A number of months. The months parameter can be negative or positive.
Returns: An object whose value is the sum of the date and time represented by this instance and months.
"""
pass
def AddSeconds(self,value):
"""
AddSeconds(self: DateTime,value: float) -> DateTime
Returns a new System.DateTime that adds the specified number of seconds to the value of this
instance.
value: A number of whole and fractional seconds. The value parameter can be negative or positive.
Returns: An object whose value is the sum of the date and time represented by this instance and the
number of seconds represented by value.
"""
pass
def AddTicks(self,value):
"""
AddTicks(self: DateTime,value: Int64) -> DateTime
Returns a new System.DateTime that adds the specified number of ticks to the value of this
instance.
value: A number of 100-nanosecond ticks. The value parameter can be positive or negative.
Returns: An object whose value is the sum of the date and time represented by this instance and the time
represented by value.
"""
pass
def AddYears(self,value):
"""
AddYears(self: DateTime,value: int) -> DateTime
Returns a new System.DateTime that adds the specified number of years to the value of this
instance.
value: A number of years. The value parameter can be negative or positive.
Returns: An object whose value is the sum of the date and time represented by this instance and the
number of years represented by value.
"""
pass
@staticmethod
def Compare(t1,t2):
"""
Compare(t1: DateTime,t2: DateTime) -> int
Compares two instances of System.DateTime and returns an integer that indicates whether the
first instance is earlier than,the same as,or later than the second instance.
t1: The first object to compare.
t2: The second object to compare.
Returns: A signed number indicating the relative values of t1 and t2.Value Type Condition Less than zero
t1 is earlier than t2. Zero t1 is the same as t2. Greater than zero t1 is later than t2.
"""
pass
def CompareTo(self,value):
"""
CompareTo(self: DateTime,value: DateTime) -> int
Compares the value of this instance to a specified System.DateTime value and returns an integer
that indicates whether this instance is earlier than,the same as,or later than the specified
System.DateTime value.
value: The object to compare to the current instance.
Returns: A signed number indicating the relative values of this instance and the value parameter.Value
Description Less than zero This instance is earlier than value. Zero This instance is the same
as value. Greater than zero This instance is later than value.
CompareTo(self: DateTime,value: object) -> int
Compares the value of this instance to a specified object that contains a specified
System.DateTime value,and returns an integer that indicates whether this instance is earlier
than,the same as,or later than the specified System.DateTime value.
value: A boxed object to compare,or null.
Returns: A signed number indicating the relative values of this instance and value.Value Description Less
than zero This instance is earlier than value. Zero This instance is the same as value. Greater
than zero This instance is later than value,or value is null.
"""
pass
@staticmethod
def DaysInMonth(year,month):
"""
DaysInMonth(year: int,month: int) -> int
Returns the number of days in the specified month and year.
year: The year.
month: The month (a number ranging from 1 to 12).
Returns: The number of days in month for the specified year.For example,if month equals 2 for February,
the return value is 28 or 29 depending upon whether year is a leap year.
"""
pass
def Equals(self,*__args):
"""
Equals(t1: DateTime,t2: DateTime) -> bool
Returns a value indicating whether two System.DateTime instances have the same date and time
value.
t1: The first object to compare.
t2: The second object to compare.
Returns: true if the two values are equal; otherwise,false.
Equals(self: DateTime,value: DateTime) -> bool
Returns a value indicating whether the value of this instance is equal to the value of the
specified System.DateTime instance.
value: The object to compare to this instance.
Returns: true if the value parameter equals the value of this instance; otherwise,false.
Equals(self: DateTime,value: object) -> bool
Returns a value indicating whether this instance is equal to a specified object.
value: The object to compare to this instance.
Returns: true if value is an instance of System.DateTime and equals the value of this instance;
otherwise,false.
"""
pass
@staticmethod
def FromBinary(dateData):
"""
FromBinary(dateData: Int64) -> DateTime
Deserializes a 64-bit binary value and recreates an original serialized System.DateTime object.
dateData: A 64-bit signed integer that encodes the System.DateTime.Kind property in a 2-bit field and the
System.DateTime.Ticks property in a 62-bit field.
Returns: An object that is equivalent to the System.DateTime object that was serialized by the
System.DateTime.ToBinary method.
"""
pass
@staticmethod
def FromFileTime(fileTime):
"""
FromFileTime(fileTime: Int64) -> DateTime
Converts the specified Windows file time to an equivalent local time.
fileTime: A Windows file time expressed in ticks.
Returns: An object that represents the local time equivalent of the date and time represented by the
fileTime parameter.
"""
pass
@staticmethod
def FromFileTimeUtc(fileTime):
"""
FromFileTimeUtc(fileTime: Int64) -> DateTime
Converts the specified Windows file time to an equivalent UTC time.
fileTime: A Windows file time expressed in ticks.
Returns: An object that represents the UTC time equivalent of the date and time represented by the
fileTime parameter.
"""
pass
@staticmethod
def FromOADate(d):
"""
FromOADate(d: float) -> DateTime
Returns a System.DateTime equivalent to the specified OLE Automation Date.
d: An OLE Automation Date value.
Returns: An object that represents the same date and time as d.
"""
pass
def GetDateTimeFormats(self,*__args):
"""
GetDateTimeFormats(self: DateTime,format: Char) -> Array[str]
Converts the value of this instance to all the string representations supported by the specified
standard date and time format specifier.
format: A standard date and time format string (see Remarks).
Returns: A string array where each element is the representation of the value of this instance formatted
with the format standard date and time format specifier.
GetDateTimeFormats(self: DateTime,format: Char,provider: IFormatProvider) -> Array[str]
Converts the value of this instance to all the string representations supported by the specified
standard date and time format specifier and culture-specific formatting information.
format: A date and time format string (see Remarks).
provider: An object that supplies culture-specific formatting information about this instance.
Returns: A string array where each element is the representation of the value of this instance formatted
with one of the standard date and time format specifiers.
GetDateTimeFormats(self: DateTime) -> Array[str]
Converts the value of this instance to all the string representations supported by the standard
date and time format specifiers.
Returns: A string array where each element is the representation of the value of this instance formatted
with one of the standard date and time format specifiers.
GetDateTimeFormats(self: DateTime,provider: IFormatProvider) -> Array[str]
Converts the value of this instance to all the string representations supported by the standard
date and time format specifiers and the specified culture-specific formatting information.
provider: An object that supplies culture-specific formatting information about this instance.
Returns: A string array where each element is the representation of the value of this instance formatted
with one of the standard date and time format specifiers.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: DateTime) -> int
Returns the hash code for this instance.
Returns: A 32-bit signed integer hash code.
"""
pass
def GetTypeCode(self):
"""
GetTypeCode(self: DateTime) -> TypeCode
Returns the System.TypeCode for value type System.DateTime.
Returns: The enumerated constant,System.TypeCode.DateTime.
"""
pass
def IsDaylightSavingTime(self):
"""
IsDaylightSavingTime(self: DateTime) -> bool
Indicates whether this instance of System.DateTime is within the daylight saving time range for
the current time zone.
Returns: true if System.DateTime.Kind is System.DateTimeKind.Local or System.DateTimeKind.Unspecified and
the value of this instance of System.DateTime is within the daylight saving time range for the
current time zone. false if System.DateTime.Kind is System.DateTimeKind.Utc.
"""
pass
@staticmethod
def IsLeapYear(year):
"""
IsLeapYear(year: int) -> bool
Returns an indication whether the specified year is a leap year.
year: A 4-digit year.
Returns: true if year is a leap year; otherwise,false.
"""
pass
@staticmethod
def Parse(s,provider=None,styles=None):
"""
Parse(s: str,provider: IFormatProvider,styles: DateTimeStyles) -> DateTime
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified culture-specific format information and formatting style.
s: A string containing a date and time to convert.
provider: An object that supplies culture-specific formatting information about s.
styles: A bitwise combination of the enumeration values that indicates the style elements that can be
present in s for the parse operation to succeed and that defines how to interpret the parsed
date in relation to the current time zone or the current date. A typical value to specify is
System.Globalization.DateTimeStyles.None.
Returns: An object that is equivalent to the date and time contained in s,as specified by provider and
styles.
Parse(s: str,provider: IFormatProvider) -> DateTime
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified culture-specific format information.
s: A string containing a date and time to convert.
provider: An object that supplies culture-specific format information about s.
Returns: An object that is equivalent to the date and time contained in s as specified by provider.
Parse(s: str) -> DateTime
Converts the specified string representation of a date and time to its System.DateTime
equivalent.
s: A string containing a date and time to convert.
Returns: An object that is equivalent to the date and time contained in s.
"""
pass
@staticmethod
def ParseExact(s,*__args):
"""
ParseExact(s: str,formats: Array[str],provider: IFormatProvider,style: DateTimeStyles) -> DateTime
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified array of formats,culture-specific format information,and style.
The format of the string representation must match at least one of the specified formats exactly
or an exception is thrown.
s: A string containing one or more dates and times to convert.
formats: An array of allowable formats of s.
provider: An object that supplies culture-specific format information about s.
style: A bitwise combination of enumeration values that indicates the permitted format of s. A typical
value to specify is System.Globalization.DateTimeStyles.None.
Returns: An object that is equivalent to the date and time contained in s,as specified by formats,
provider,and style.
ParseExact(s: str,format: str,provider: IFormatProvider,style: DateTimeStyles) -> DateTime
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified format,culture-specific format information,and style. The
format of the string representation must match the specified format exactly or an exception is
thrown.
s: A string containing a date and time to convert.
format: A format specifier that defines the required format of s.
provider: An object that supplies culture-specific formatting information about s.
style: A bitwise combination of the enumeration values that provides additional information about s,
about style elements that may be present in s,or about the conversion from s to a
System.DateTime value. A typical value to specify is System.Globalization.DateTimeStyles.None.
Returns: An object that is equivalent to the date and time contained in s,as specified by format,
provider,and style.
ParseExact(s: str,format: str,provider: IFormatProvider) -> DateTime
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified format and culture-specific format information. The format of the
string representation must match the specified format exactly.
s: A string that contains a date and time to convert.
format: A format specifier that defines the required format of s.
provider: An object that supplies culture-specific format information about s.
Returns: An object that is equivalent to the date and time contained in s,as specified by format and
provider.
"""
pass
@staticmethod
def SpecifyKind(value,kind):
"""
SpecifyKind(value: DateTime,kind: DateTimeKind) -> DateTime
Creates a new System.DateTime object that has the same number of ticks as the specified
System.DateTime,but is designated as either local time,Coordinated Universal Time (UTC),or
neither,as indicated by the specified System.DateTimeKind value.
value: A date and time.
kind: One of the enumeration values that indicates whether the new object represents local time,UTC,
or neither.
Returns: A new object that has the same number of ticks as the object represented by the value parameter
and the System.DateTimeKind value specified by the kind parameter.
"""
pass
def Subtract(self,value):
"""
Subtract(self: DateTime,value: TimeSpan) -> DateTime
Subtracts the specified duration from this instance.
value: The time interval to subtract.
Returns: An object that is equal to the date and time represented by this instance minus the time
interval represented by value.
Subtract(self: DateTime,value: DateTime) -> TimeSpan
Subtracts the specified date and time from this instance.
value: The date and time value to subtract.
Returns: A time interval that is equal to the date and time represented by this instance minus the date
and time represented by value.
"""
pass
def ToBinary(self):
"""
ToBinary(self: DateTime) -> Int64
Serializes the current System.DateTime object to a 64-bit binary value that subsequently can be
used to recreate the System.DateTime object.
Returns: A 64-bit signed integer that encodes the System.DateTime.Kind and System.DateTime.Ticks
properties.
"""
pass
def ToFileTime(self):
"""
ToFileTime(self: DateTime) -> Int64
Converts the value of the current System.DateTime object to a Windows file time.
Returns: The value of the current System.DateTime object expressed as a Windows file time.
"""
pass
def ToFileTimeUtc(self):
"""
ToFileTimeUtc(self: DateTime) -> Int64
Converts the value of the current System.DateTime object to a Windows file time.
Returns: The value of the current System.DateTime object expressed as a Windows file time.
"""
pass
def ToLocalTime(self):
"""
ToLocalTime(self: DateTime) -> DateTime
Converts the value of the current System.DateTime object to local time.
Returns: An object whose System.DateTime.Kind property is System.DateTimeKind.Local,and whose value is
the local time equivalent to the value of the current System.DateTime object,or
System.DateTime.MaxValue if the converted value is too large to be represented by a
System.DateTime object,or System.DateTime.MinValue if the converted value is too small to be
represented as a System.DateTime object.
"""
pass
def ToLongDateString(self):
"""
ToLongDateString(self: DateTime) -> str
Converts the value of the current System.DateTime object to its equivalent long date string
representation.
Returns: A string that contains the long date string representation of the current System.DateTime object.
"""
pass
def ToLongTimeString(self):
"""
ToLongTimeString(self: DateTime) -> str
Converts the value of the current System.DateTime object to its equivalent long time string
representation.
Returns: A string that contains the long time string representation of the current System.DateTime object.
"""
pass
def ToOADate(self):
"""
ToOADate(self: DateTime) -> float
Converts the value of this instance to the equivalent OLE Automation date.
Returns: A double-precision floating-point number that contains an OLE Automation date equivalent to the
value of this instance.
"""
pass
def ToShortDateString(self):
"""
ToShortDateString(self: DateTime) -> str
Converts the value of the current System.DateTime object to its equivalent short date string
representation.
Returns: A string that contains the short date string representation of the current System.DateTime
object.
"""
pass
def ToShortTimeString(self):
"""
ToShortTimeString(self: DateTime) -> str
Converts the value of the current System.DateTime object to its equivalent short time string
representation.
Returns: A string that contains the short time string representation of the current System.DateTime
object.
"""
pass
def ToString(self,*__args):
"""
ToString(self: DateTime,provider: IFormatProvider) -> str
Converts the value of the current System.DateTime object to its equivalent string representation
using the specified culture-specific format information.
provider: An object that supplies culture-specific formatting information.
Returns: A string representation of value of the current System.DateTime object as specified by provider.
ToString(self: DateTime,format: str,provider: IFormatProvider) -> str
Converts the value of the current System.DateTime object to its equivalent string representation
using the specified format and culture-specific format information.
format: A standard or custom date and time format string.
provider: An object that supplies culture-specific formatting information.
Returns: A string representation of value of the current System.DateTime object as specified by format
and provider.
ToString(self: DateTime) -> str
Converts the value of the current System.DateTime object to its equivalent string representation.
Returns: A string representation of the value of the current System.DateTime object.
ToString(self: DateTime,format: str) -> str
Converts the value of the current System.DateTime object to its equivalent string representation
using the specified format.
format: A standard or custom date and time format string (see Remarks).
Returns: A string representation of value of the current System.DateTime object as specified by format.
"""
pass
def ToUniversalTime(self):
"""
ToUniversalTime(self: DateTime) -> DateTime
Converts the value of the current System.DateTime object to Coordinated Universal Time (UTC).
Returns: An object whose System.DateTime.Kind property is System.DateTimeKind.Utc,and whose value is the
UTC equivalent to the value of the current System.DateTime object,or System.DateTime.MaxValue
if the converted value is too large to be represented by a System.DateTime object,or
System.DateTime.MinValue if the converted value is too small to be represented by a
System.DateTime object.
"""
pass
@staticmethod
def TryParse(s,*__args):
"""
TryParse(s: str,provider: IFormatProvider,styles: DateTimeStyles) -> (bool,DateTime)
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified culture-specific format information and formatting style,and
returns a value that indicates whether the conversion succeeded.
s: A string containing a date and time to convert.
provider: An object that supplies culture-specific formatting information about s.
styles: A bitwise combination of enumeration values that defines how to interpret the parsed date in
relation to the current time zone or the current date. A typical value to specify is
System.Globalization.DateTimeStyles.None.
Returns: true if the s parameter was converted successfully; otherwise,false.
TryParse(s: str) -> (bool,DateTime)
Converts the specified string representation of a date and time to its System.DateTime
equivalent and returns a value that indicates whether the conversion succeeded.
s: A string containing a date and time to convert.
Returns: true if the s parameter was converted successfully; otherwise,false.
"""
pass
@staticmethod
def TryParseExact(s,*__args):
"""
TryParseExact(s: str,formats: Array[str],provider: IFormatProvider,style: DateTimeStyles) -> (bool,DateTime)
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified array of formats,culture-specific format information,and style.
The format of the string representation must match at least one of the specified formats
exactly. The method returns a value that indicates whether the conversion succeeded.
s: A string containing one or more dates and times to convert.
formats: An array of allowable formats of s.
provider: An object that supplies culture-specific format information about s.
style: A bitwise combination of enumeration values that indicates the permitted format of s. A typical
value to specify is System.Globalization.DateTimeStyles.None.
Returns: true if the s parameter was converted successfully; otherwise,false.
TryParseExact(s: str,format: str,provider: IFormatProvider,style: DateTimeStyles) -> (bool,DateTime)
Converts the specified string representation of a date and time to its System.DateTime
equivalent using the specified format,culture-specific format information,and style. The
format of the string representation must match the specified format exactly. The method returns
a value that indicates whether the conversion succeeded.
s: A string containing a date and time to convert.
format: The required format of s.
provider: An object that supplies culture-specific formatting information about s.
style: A bitwise combination of one or more enumeration values that indicate the permitted format of s.
Returns: true if s was converted successfully; otherwise,false.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __cmp__(self,*args):
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type,ticks: Int64)
__new__(cls: type,ticks: Int64,kind: DateTimeKind)
__new__(cls: type,year: int,month: int,day: int)
__new__(cls: type,year: int,month: int,day: int,calendar: Calendar)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int,kind: DateTimeKind)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int,calendar: Calendar)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int,kind: DateTimeKind)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int,calendar: Calendar)
__new__(cls: type,year: int,month: int,day: int,hour: int,minute: int,second: int,millisecond: int,calendar: Calendar,kind: DateTimeKind)
__new__[DateTime]() -> DateTime
"""
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __rsub__(self,*args):
"""
__rsub__(d1: DateTime,d2: DateTime) -> TimeSpan
Subtracts a specified date and time from another specified date and time and returns a time
interval.
d1: The date and time value to subtract from (the minuend).
d2: The date and time value to subtract (the subtrahend).
Returns: The time interval between d1 and d2; that is,d1 minus d2.
"""
pass
def __str__(self,*args):
pass
def __sub__(self,*args):
""" x.__sub__(y) <==> x-yx.__sub__(y) <==> x-y """
pass
Date=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the date component of this instance.
Get: Date(self: DateTime) -> DateTime
"""
Day=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the day of the month represented by this instance.
Get: Day(self: DateTime) -> int
"""
DayOfWeek=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the day of the week represented by this instance.
Get: DayOfWeek(self: DateTime) -> DayOfWeek
"""
DayOfYear=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the day of the year represented by this instance.
Get: DayOfYear(self: DateTime) -> int
"""
Hour=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the hour component of the date represented by this instance.
Get: Hour(self: DateTime) -> int
"""
Kind=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the time represented by this instance is based on local time,Coordinated Universal Time (UTC),or neither.
Get: Kind(self: DateTime) -> DateTimeKind
"""
Millisecond=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the milliseconds component of the date represented by this instance.
Get: Millisecond(self: DateTime) -> int
"""
Minute=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the minute component of the date represented by this instance.
Get: Minute(self: DateTime) -> int
"""
Month=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the month component of the date represented by this instance.
Get: Month(self: DateTime) -> int
"""
Second=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the seconds component of the date represented by this instance.
Get: Second(self: DateTime) -> int
"""
Ticks=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of ticks that represent the date and time of this instance.
Get: Ticks(self: DateTime) -> Int64
"""
TimeOfDay=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the time of day for this instance.
Get: TimeOfDay(self: DateTime) -> TimeSpan
"""
Year=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the year component of the date represented by this instance.
Get: Year(self: DateTime) -> int
"""
MaxValue=None
MinValue=None
Now=None
Today=None
UtcNow=None
| 25.046852
| 215
| 0.697697
| 4,608
| 34,214
| 5.138021
| 0.076389
| 0.046714
| 0.028341
| 0.014783
| 0.765205
| 0.718196
| 0.690615
| 0.672622
| 0.654587
| 0.623374
| 0
| 0.002875
| 0.237505
| 34,214
| 1,365
| 216
| 25.065201
| 0.904669
| 0.772023
| 0
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.387324
| false
| 0.387324
| 0
| 0
| 0.521127
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
3fb301e022862b3369bedfe09d206e1a8487687f
| 204
|
py
|
Python
|
visdets/utils.py
|
eqs/visdets
|
b88f474a2eb6207c3338185eeeaa77658ba2d43a
|
[
"MIT"
] | null | null | null |
visdets/utils.py
|
eqs/visdets
|
b88f474a2eb6207c3338185eeeaa77658ba2d43a
|
[
"MIT"
] | 4
|
2020-03-31T00:56:55.000Z
|
2020-04-28T01:18:15.000Z
|
visdets/utils.py
|
eqs/visdets
|
b88f474a2eb6207c3338185eeeaa77658ba2d43a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
import torch
__all__ = ['tensor2cvmat']
def tensor2cvmat(image: torch.Tensor) -> np.ndarray:
return np.uint8(image.permute(1, 2, 0).numpy() * 255).copy()
| 20.4
| 64
| 0.661765
| 29
| 204
| 4.517241
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 0.156863
| 204
| 9
| 65
| 22.666667
| 0.703488
| 0.102941
| 0
| 0
| 0
| 0
| 0.066298
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
3fc758fb1ffea7b007e0276f1a95b17d80766134
| 183
|
py
|
Python
|
Inheritance/Exercises/03. players_and_monsters/project/soul_master.py
|
geodimitrov/PythonOOP_SoftUni
|
f1c6718c878b618b3ab3f174cd4d187bd178940b
|
[
"MIT"
] | 1
|
2021-06-30T11:53:44.000Z
|
2021-06-30T11:53:44.000Z
|
Inheritance/Exercises/03. players_and_monsters/project/soul_master.py
|
geodimitrov/PythonOOP_SoftUni
|
f1c6718c878b618b3ab3f174cd4d187bd178940b
|
[
"MIT"
] | null | null | null |
Inheritance/Exercises/03. players_and_monsters/project/soul_master.py
|
geodimitrov/PythonOOP_SoftUni
|
f1c6718c878b618b3ab3f174cd4d187bd178940b
|
[
"MIT"
] | null | null | null |
from project.dark_wizard import DarkWizard
class SoulMaster(DarkWizard):
def __repr__(self):
return f"{self.username} of type {SoulMaster.__name__} has level {self.level}"
| 45.75
| 86
| 0.754098
| 24
| 183
| 5.375
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147541
| 183
| 4
| 86
| 45.75
| 0.826923
| 0
| 0
| 0
| 0
| 0
| 0.369565
| 0.11413
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
3fd96d75249273dcd4302e3253f4e0c8ce4c6f05
| 102
|
py
|
Python
|
tests/frontend/pages/base.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | null | null | null |
tests/frontend/pages/base.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | null | null | null |
tests/frontend/pages/base.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | null | null | null |
class Base:
base_url = "https://localhost"
def __init__(self, *args, **kwargs):
pass
| 17
| 40
| 0.588235
| 12
| 102
| 4.583333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264706
| 102
| 5
| 41
| 20.4
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
3fe2c2a773a6c9641c676325a91876e4ecac8171
| 135
|
py
|
Python
|
novelsave/services/source/__init__.py
|
mHaisham/novelsave
|
011b6c5d705591783aee64662bc88b207bdc7205
|
[
"Apache-2.0"
] | 15
|
2020-11-05T10:05:01.000Z
|
2021-06-28T14:43:56.000Z
|
novelsave/services/source/__init__.py
|
mHaisham/novelsave
|
011b6c5d705591783aee64662bc88b207bdc7205
|
[
"Apache-2.0"
] | 21
|
2020-11-01T04:36:56.000Z
|
2021-08-16T09:36:48.000Z
|
novelsave/services/source/__init__.py
|
mHaisham/novelsave
|
011b6c5d705591783aee64662bc88b207bdc7205
|
[
"Apache-2.0"
] | null | null | null |
from .source_gateway import SourceGateway
from .meta_source_gateway import MetaSourceGateway
from .source_service import SourceService
| 33.75
| 50
| 0.888889
| 16
| 135
| 7.25
| 0.5625
| 0.172414
| 0.327586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 135
| 3
| 51
| 45
| 0.943089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3ff65619d3ad1dc072f4b9f3b5749231abd4c257
| 6,548
|
py
|
Python
|
pricing/vanilla/integration.py
|
Simbold/PyFinQ
|
c8be876ba7977fac1578ef788f51c2c9e7cb7d4c
|
[
"CC0-1.0"
] | 2
|
2020-08-23T19:03:49.000Z
|
2022-01-01T04:54:03.000Z
|
pricing/vanilla/integration.py
|
Simbold/PyFinQ
|
c8be876ba7977fac1578ef788f51c2c9e7cb7d4c
|
[
"CC0-1.0"
] | null | null | null |
pricing/vanilla/integration.py
|
Simbold/PyFinQ
|
c8be876ba7977fac1578ef788f51c2c9e7cb7d4c
|
[
"CC0-1.0"
] | null | null | null |
import numpy as np
from scipy import stats, integrate
from integration_functions import laplace_transform_vanilla, heston_characteristic_function, \
black_scholes_characteristic_function
def closed_form_bs_eu(spot, strike, r, sigma, mt, option_type="call", t=0):
# spot: underlying spot price
# strike: strike price
# r: rik free interest rate
# sigma: volatility of the underlying
# mt: time to maturity in years
# option_type: Type of the option either call or put
# t: time at which to evaluate the option
# returns the European vanilla option value via the Black-Scholes closed form formula
# ------------------------------------------------------------------------------------------------------------------
d1 = (np.log(spot / strike) + (r + 0.5 * (sigma ** 2)) * (mt - t)) / sigma * np.sqrt(mt - t)
d2 = d1 - sigma * np.sqrt(mt - t)
if option_type == "call":
v_t = spot * stats.norm.cdf(d1) - strike * np.exp(-r * (mt - t)) * stats.norm.cdf(d2)
elif option_type == "put":
v_t = strike * np.exp(-r * (mt - t)) * stats.norm.cdf(-d2) - spot * stats.norm.cdf(-d1)
else:
print("ERROR: option_type must be 'call' or 'put'")
return
return v_t
def laplace_heston_eu(spot, strike, r, sigma_tilde, mt, nu0, kappa, lamb, option_type="call", t=0):
# spot: underlying spot price
# strike: strike price
# r: rik free interest rate
# sigma_tilde, nu0, kappa, lambda: heston volatility dynamic parameters
# mt: time to maturity in years
# option_type: Type of the option either call or put
# t: time at which to evaluate the option
# returns list of two values
# v_t: the European vanilla option value in the heston model via laplace transform
# abserr: estimate of the absolute error of the numerical integration scheme
# ------------------------------------------------------------------------------------------------------------------
if option_type == "call":
R = 2
elif option_type == "put":
R = -1
else:
print("ERROR, option_type must be either 'call', or 'put'")
return
def heston_call_integrant(u):
y = (np.exp(-r*mt)/np.pi)*np.real(laplace_transform_vanilla(u*1j+R, strike)*heston_characteristic_function(u-1j*R, np.log(spot), nu0, r, sigma_tilde, mt, t, kappa, lamb))
return y
[v0_heston, abserr] = integrate.quad(heston_call_integrant, 0, 100)[0:2]
return [v0_heston, abserr]
def laplace_transform_vanilla_0(z):
# laplace transform for vanilla option with strike=exp(kappa)
# for for pricing via fast fourier transform
return 1/((z-1)*z)
def fast_fourier_bs_eu(spot, strikes, r, sigma, mt, option_type="call", n=10000, m=400, t=0):
# spot: underlying spot price
# strikes: strike prices can be a list of multiple strikes
# r: rik free interest rate
# sigma: volatility
# mt: time to maturity in years
# option_type: Type of the option either call or put
# n, m: delta=m/n is the mesh size of the integral approximation via the midpoint rule, m should be large and m/n should be small
# t: time at which to evaluate the option
# returns a list of three values:
# 1. option values corresponding to the inputed strikes
# 2. options values without interpolation
# 3. strikes corresponding to the prices without interpolation
# ------------------------------------------------------------------------------------------------------------------
if option_type == "call":
R = 1.1
elif option_type == "put":
R = -0.1
else:
print("ERROR, option_type must be either 'call', or 'put'")
return
def g(u):
return laplace_transform_vanilla_0(R+1j*u)*black_scholes_characteristic_function(u-1j*R, np.log(spot), r, sigma, mt, t)
delta = m/n
kappa1 = np.log(np.min(strikes))
x = np.zeros(n, dtype=np.complex128)
for i in range(1, n):
x[i-1] = g((i - 0.5)*delta) * delta * np.exp(-1j*(i-1)*delta*kappa1)
# perform DFT using the efficient FFT algorithm
x_hat = np.fft.fft(x)
# compute vector kappa
kappa_m = kappa1 + (np.arange(1, n+1) - 1) * 2 * np.pi / m
strikes_fft = np.exp(kappa_m)
# finally compute the option prices
vt_fft = (np.exp(-r*(mt-t) + (1-R)*kappa_m))/np.pi * np.real(x_hat*np.exp(-1j * delta * kappa_m / 2))
# interpolate strike values
vt_fft_interpolated = np.interp(strikes, strikes_fft, vt_fft)
return [vt_fft_interpolated, vt_fft, strikes_fft]
def fast_fourier_heston_eu(spot, strikes, r, sigma_tilde, mt, nu0, kappa, lamb, option_type="call", n=10000, m=400, t=0):
# spot: underlying spot price
# strikes: strike prices can be a list of multiple strikes
# r: rik free interest rate
# sigma_tilde, nu0, kappa, lambda: heston volatility dynamic parameters
# mt: time to maturity in years
# option_type: Type of the option either call or put
# n, m: delta=m/n is the mesh size of the integral approximation via the midpoint rule, m should be large and m/n should be small
# t: time at which to evaluate the option
# returns a list of three values:
# 1. option values corresponding to the inputed strikes
# 2. options values without interpolation
# 3. strikes corresponding to the prices without interpolation
# ------------------------------------------------------------------------------------------------------------------
if option_type == "call":
R = 1.1
elif option_type == "put":
R = -0.1
else:
print("ERROR, option_type must be either 'call', or 'put'")
return
def g(u):
return laplace_transform_vanilla_0(R+1j*u)*heston_characteristic_function(u-1j*R, np.log(spot), nu0, r, sigma_tilde, mt, t, kappa, lamb)
delta = m/n
kappa1 = np.log(np.min(strikes))
x = np.zeros(n, dtype=np.complex128)
for i in range(1, n):
x[i-1] = g((i - 0.5)*delta) * delta * np.exp(-1j*(i-1)*delta*kappa1)
# perform DFT using the efficient FFT algorithm
x_hat = np.fft.fft(x)
# compute vector kappa
kappa_m = kappa1 + (np.arange(1, n+1) - 1) * 2 * np.pi / m
strikes_fft = np.exp(kappa_m)
# finally compute the option prices
vt_fft = (np.exp(-r*(mt-t) + (1-R)*kappa_m))/np.pi * np.real(x_hat*np.exp(-1j * delta * kappa_m / 2))
# interpolate strike values
vt_fft_interpolated = np.interp(strikes, strikes_fft, vt_fft)
return [vt_fft_interpolated, vt_fft, strikes_fft]
| 41.18239
| 179
| 0.609499
| 970
| 6,548
| 4.006186
| 0.164948
| 0.051467
| 0.028821
| 0.02702
| 0.799022
| 0.745754
| 0.741637
| 0.72877
| 0.719763
| 0.719763
| 0
| 0.021268
| 0.217318
| 6,548
| 158
| 180
| 41.443038
| 0.736976
| 0.421961
| 0
| 0.652778
| 0
| 0
| 0.063322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.041667
| 0.041667
| 0.305556
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b74a30acfc3eb4685511d05881195e41bb9eb82b
| 1,647
|
py
|
Python
|
x/client/readers.py
|
qwasa-net/pyllyllyll
|
401c7339953cfc9d5c0f79784fe1a1bf90049e9d
|
[
"MIT"
] | null | null | null |
x/client/readers.py
|
qwasa-net/pyllyllyll
|
401c7339953cfc9d5c0f79784fe1a1bf90049e9d
|
[
"MIT"
] | null | null | null |
x/client/readers.py
|
qwasa-net/pyllyllyll
|
401c7339953cfc9d5c0f79784fe1a1bf90049e9d
|
[
"MIT"
] | null | null | null |
"""Server writers."""
import asyncio
import logging
import socket
from x.client.content import ContentProcessor
def reader(client: socket.socket, reader_id: int) -> tuple:
"""Handle a connection -- read all data, close connection, call processor."""
logging.info("reader %s connected", reader_id)
# read all data till the end
data = b""
while True:
chunk = client.recv(1024)
if not chunk:
break
data += chunk
# close client (shutdown shared socket for all processes)
client.shutdown(socket.SHUT_RDWR)
client.close()
logging.info("reader %s read %s bytes, connection closed", reader_id, len(data))
# process data
rsp = ContentProcessor.put(data)
logging.info("reader %s data processed %s", reader_id, rsp)
return (repr(rsp), reader_id)
async def reader_async(client: socket.socket, reader_id: int) -> tuple:
"""Handle a connection -- read all data, close connection, call processor. (ASYNC version)."""
logging.info("reader %s connected", reader_id)
loop = asyncio.get_event_loop()
# read all data till the end
data = b""
while True:
chunk = await loop.sock_recv(client, 1024)
if not chunk:
break
data += chunk
# close client (shutdown shared socket for all processes)
client.shutdown(socket.SHUT_RDWR)
client.close()
logging.info("reader %s read %s bytes, connection closed", reader_id, len(data))
# process data
rsp = await ContentProcessor.put_async(data)
logging.info("reader %s data processed %s", reader_id, rsp)
return (repr(rsp), reader_id)
| 26.564516
| 98
| 0.661809
| 218
| 1,647
| 4.922018
| 0.284404
| 0.074557
| 0.095061
| 0.100652
| 0.780988
| 0.780988
| 0.780988
| 0.71575
| 0.71575
| 0.71575
| 0
| 0.006314
| 0.230723
| 1,647
| 61
| 99
| 27
| 0.840568
| 0.170006
| 0
| 0.666667
| 0
| 0
| 0.140016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.121212
| 0
| 0.212121
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b758eeaaf1d8c2805f824065cea806825e2e3fe1
| 134
|
py
|
Python
|
maskrcnn_benchmark/_custom_ops.py
|
furkankirac/maskrcnn-benchmark
|
a348dc36600e577c3ba569320f3a6a8e15986f72
|
[
"MIT"
] | 3
|
2020-06-10T04:37:01.000Z
|
2021-12-20T07:45:48.000Z
|
maskrcnn_benchmark/_custom_ops.py
|
furkankirac/maskrcnn-benchmark
|
a348dc36600e577c3ba569320f3a6a8e15986f72
|
[
"MIT"
] | 1
|
2020-06-17T09:05:31.000Z
|
2021-09-13T09:16:36.000Z
|
maskrcnn_benchmark/_custom_ops.py
|
furkankirac/maskrcnn-benchmark
|
a348dc36600e577c3ba569320f3a6a8e15986f72
|
[
"MIT"
] | 1
|
2020-07-06T05:47:12.000Z
|
2020-07-06T05:47:12.000Z
|
import os
import torch
torch.ops.load_library(os.path.join(os.path.dirname(__file__), 'lib', 'libmaskrcnn_benchmark_customops.so'))
| 22.333333
| 108
| 0.791045
| 20
| 134
| 4.95
| 0.75
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067164
| 134
| 5
| 109
| 26.8
| 0.792
| 0
| 0
| 0
| 0
| 0
| 0.276119
| 0.253731
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7c2182fc0b51f13b9bf5936c453f750c7f76a7c
| 353
|
py
|
Python
|
instagram_api/response/shared_follower.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | 13
|
2019-08-07T21:24:34.000Z
|
2020-12-12T12:23:50.000Z
|
instagram_api/response/shared_follower.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | null | null | null |
instagram_api/response/shared_follower.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | null | null | null |
from .mapper import ApiResponse, ApiResponseInterface
from .mapper.types import Timestamp, AnyType
from .model import SharedFollower
__all__ = ['SharedFollowersResponse']
class SharedFollowersResponseInterface(ApiResponseInterface):
users: [SharedFollower]
class SharedFollowersResponse(ApiResponse, SharedFollowersResponseInterface):
pass
| 25.214286
| 77
| 0.832861
| 27
| 353
| 10.740741
| 0.592593
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107649
| 353
| 13
| 78
| 27.153846
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0.065156
| 0.065156
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.375
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
b7e83fb09357912c50608378e768f6d3cc52e8d3
| 133
|
py
|
Python
|
src/badd/__init__.py
|
wksmirnowa/badd
|
82bc0c2d70b2334f230ed178bf3c037940b9eb37
|
[
"MIT"
] | 1
|
2021-05-20T12:32:06.000Z
|
2021-05-20T12:32:06.000Z
|
src/badd/__init__.py
|
wksmirnowa/badd
|
82bc0c2d70b2334f230ed178bf3c037940b9eb37
|
[
"MIT"
] | null | null | null |
src/badd/__init__.py
|
wksmirnowa/badd
|
82bc0c2d70b2334f230ed178bf3c037940b9eb37
|
[
"MIT"
] | null | null | null |
from .ad import AdDetector
from .toxic import ToxicDetector
from .obscene import ObsceneDetector
from .file_loader import FileLoader
| 26.6
| 36
| 0.849624
| 17
| 133
| 6.588235
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 133
| 4
| 37
| 33.25
| 0.957265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7f3569412a072d13463d53f33da84724b1eeae7
| 306
|
py
|
Python
|
Darlington/phase2/STRINGS/day 30 solution/qtn8.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
Darlington/phase2/STRINGS/day 30 solution/qtn8.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
Darlington/phase2/STRINGS/day 30 solution/qtn8.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
#function to get a string made of its first three characters of a specified string.
# If the length of the string is less than 3 then return the original string.
def first_three(str):
return str[:3] if len(str) > 3 else str
print(first_three('ipy'))
print(first_three('python'))
print(first_three('py'))
| 38.25
| 84
| 0.748366
| 55
| 306
| 4.090909
| 0.545455
| 0.222222
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011538
| 0.150327
| 306
| 8
| 85
| 38.25
| 0.853846
| 0.519608
| 0
| 0
| 0
| 0
| 0.075342
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0.2
| 0.4
| 0.6
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 5
|
4d2b73f670bad5a67e072f219f59727120f890db
| 1,336
|
py
|
Python
|
test.py
|
Taimoor55/TrustPrime
|
362320b7f31b94a9dac8055bd4d459c75bae16af
|
[
"MIT"
] | null | null | null |
test.py
|
Taimoor55/TrustPrime
|
362320b7f31b94a9dac8055bd4d459c75bae16af
|
[
"MIT"
] | null | null | null |
test.py
|
Taimoor55/TrustPrime
|
362320b7f31b94a9dac8055bd4d459c75bae16af
|
[
"MIT"
] | null | null | null |
# from osgeo import ogr
# wkt1 = "POLYGON ((1208064.271243039 624154.6783778917, 1208064.271243039 601260.9785661874, 1231345.9998651114 601260.9785661874, 1231345.9998651114 624154.6783778917, 1208064.271243039 624154.6783778917))"
# wkt2 = "POLYGON ((1199915.6662253144 633079.3410163528, 1199915.6662253144 614453.958118695, 1219317.1067437078 614453.958118695, 1219317.1067437078 633079.3410163528, 1199915.6662253144 633079.3410163528)))"
# poly1 = ogr.CreateGeometryFromWkt(wkt1)
# poly2 = ogr.CreateGeometryFromWkt(wkt2)
# intersection = poly1.Intersection(poly2)
# print(intersection.ExportToWkt())
# import shapely
from shapely.geometry import Polygon
import numpy as np
# [280,100],[280,200],[405,200],[405,100]
#check collision
# pts = np.array([[280,100],[280,200],[405,200],[405,100]], np.int32)
# p1=Polygon(pts)
# p2=Polygon([[280,100],[280,200],[405,200],[405,150]])
# p3=p1.intersection(p2)
# print(p3) # result: POLYGON ((0.5 0.5, 1 1, 1 0, 0.5 0.5))
# print(p3.area) # result: 0.25
# get axis
# import numpy as np
# import matplotlib.pyplot as plt
# import mpldatacursor
# import cv2
# data = cv2.imread("../images/illegal.jpg")
# fig, ax = plt.subplots()
# ax.imshow(data, interpolation='none')
# mpldatacursor.datacursor(hover=True, bbox=dict(alpha=1, fc='w'))
# plt.show()
!pip install dlib
import dlib
| 31.809524
| 210
| 0.732036
| 183
| 1,336
| 5.344262
| 0.453552
| 0.03681
| 0.027607
| 0.03681
| 0.070552
| 0.070552
| 0.070552
| 0.04908
| 0
| 0
| 0
| 0.362416
| 0.107784
| 1,336
| 42
| 211
| 31.809524
| 0.458054
| 0.886228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.75
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4d7223b365c36d9857eed3af00eb45c4b85adb49
| 15,486
|
py
|
Python
|
sample-input/benchmarks/c5g7/c5g7.py
|
luisarose/OpenMOC
|
0617c22c0e0aee32c6c972000f8b399f6c53a51f
|
[
"MIT"
] | null | null | null |
sample-input/benchmarks/c5g7/c5g7.py
|
luisarose/OpenMOC
|
0617c22c0e0aee32c6c972000f8b399f6c53a51f
|
[
"MIT"
] | null | null | null |
sample-input/benchmarks/c5g7/c5g7.py
|
luisarose/OpenMOC
|
0617c22c0e0aee32c6c972000f8b399f6c53a51f
|
[
"MIT"
] | null | null | null |
from openmoc import *
import openmoc.log as log
import openmoc.plotter as plotter
import openmoc.materialize as materialize
from openmoc.options import Options
###############################################################################
####################### Main Simulation Parameters ########################
###############################################################################
options = Options()
num_threads = options.getNumThreads()
track_spacing = options.getTrackSpacing()
num_azim = options.getNumAzimAngles()
tolerance = options.getTolerance()
max_iters = options.getMaxIterations()
log.set_log_level('NORMAL')
log.py_printf('TITLE', 'Simulating the OECD\'s C5G7 Benchmark Problem...')
###############################################################################
########################### Creating Materials ############################
###############################################################################
log.py_printf('NORMAL', 'Importing materials data from HDF5...')
materials = materialize.materialize('../../c5g7-materials.h5')
uo2_id = materials['UO2'].getId()
mox43_id = materials['MOX-4.3%'].getId()
mox7_id = materials['MOX-7%'].getId()
mox87_id = materials['MOX-8.7%'].getId()
guide_tube_id = materials['Guide Tube'].getId()
fiss_id = materials['Fission Chamber'].getId()
water_id = materials['Water'].getId()
###############################################################################
########################### Creating Surfaces #############################
###############################################################################
log.py_printf('NORMAL', 'Creating surfaces...')
circles = []
planes = []
planes.append(XPlane(x=-32.13))
planes.append(XPlane(x=32.13))
planes.append(YPlane(y=-32.13))
planes.append(YPlane(y=32.13))
circles.append(Circle(x=0., y=0., radius=0.54))
circles.append(Circle(x=0., y=0., radius=0.58))
circles.append(Circle(x=0., y=0., radius=0.62))
planes[0].setBoundaryType(REFLECTIVE)
planes[1].setBoundaryType(VACUUM)
planes[2].setBoundaryType(VACUUM)
planes[3].setBoundaryType(REFLECTIVE)
###############################################################################
############################# Creating Cells ##############################
###############################################################################
log.py_printf('NORMAL', 'Creating cells...')
cells = []
# UO2 pin cells
cells.append(CellBasic(universe=1, material=uo2_id, rings=3, sectors=8))
cells.append(CellBasic(universe=1, material=water_id, sectors=8))
cells.append(CellBasic(universe=1, material=water_id, sectors=8))
cells.append(CellBasic(universe=1, material=water_id, sectors=8))
cells[0].addSurface(-1, circles[0])
cells[1].addSurface(+1, circles[0])
cells[1].addSurface(-1, circles[1])
cells[2].addSurface(+1, circles[1])
cells[2].addSurface(-1, circles[2])
cells[3].addSurface(+1, circles[2])
# 4.3% MOX pin cells
cells.append(CellBasic(universe=2, material=mox43_id, rings=3, sectors=8))
cells.append(CellBasic(universe=2, material=water_id, sectors=8))
cells.append(CellBasic(universe=2, material=water_id, sectors=8))
cells.append(CellBasic(universe=2, material=water_id, sectors=8))
cells[4].addSurface(-1, circles[0])
cells[5].addSurface(+1, circles[0])
cells[5].addSurface(-1, circles[1])
cells[6].addSurface(+1, circles[1])
cells[6].addSurface(-1, circles[2])
cells[7].addSurface(+1, circles[2])
# 7% MOX pin cells
cells.append(CellBasic(universe=3, material=mox7_id, rings=3, sectors=8))
cells.append(CellBasic(universe=3, material=water_id, sectors=8))
cells.append(CellBasic(universe=3, material=water_id, sectors=8))
cells.append(CellBasic(universe=3, material=water_id, sectors=8))
cells[8].addSurface(-1, circles[0])
cells[9].addSurface(+1, circles[0])
cells[9].addSurface(-1, circles[1])
cells[10].addSurface(+1, circles[1])
cells[10].addSurface(-1, circles[2])
cells[11].addSurface(+1, circles[2])
# 8.7% MOX pin cells
cells.append(CellBasic(universe=4, material=mox87_id, rings=3, sectors=8))
cells.append(CellBasic(universe=4, material=water_id, sectors=8))
cells.append(CellBasic(universe=4, material=water_id, sectors=8))
cells.append(CellBasic(universe=4, material=water_id, sectors=8))
cells[12].addSurface(-1, circles[0])
cells[13].addSurface(+1, circles[0])
cells[13].addSurface(-1, circles[1])
cells[14].addSurface(+1, circles[1])
cells[14].addSurface(-1, circles[2])
cells[15].addSurface(+1, circles[2])
# Fission chamber pin cells
cells.append(CellBasic(universe=5, material=fiss_id, rings=3, sectors=8))
cells.append(CellBasic(universe=5, material=water_id, sectors=8))
cells.append(CellBasic(universe=5, material=water_id, sectors=8))
cells.append(CellBasic(universe=5, material=water_id, sectors=8))
cells[16].addSurface(-1, circles[0])
cells[17].addSurface(+1, circles[0])
cells[17].addSurface(-1, circles[1])
cells[18].addSurface(+1, circles[1])
cells[18].addSurface(-1, circles[2])
cells[19].addSurface(+1, circles[2])
# Guide tube pin cells
cells.append(CellBasic(universe=6, material=guide_tube_id, rings=3, sectors=8))
cells.append(CellBasic(universe=6, material=water_id, sectors=8))
cells.append(CellBasic(universe=6, material=water_id, sectors=8))
cells.append(CellBasic(universe=6, material=water_id, sectors=8))
cells[20].addSurface(-1, circles[0])
cells[21].addSurface(+1, circles[0])
cells[21].addSurface(-1, circles[1])
cells[22].addSurface(+1, circles[1])
cells[22].addSurface(-1, circles[2])
cells[23].addSurface(+1, circles[2])
# Moderator cell
cells.append(CellBasic(universe=7, material=water_id))
# Top left, bottom right lattice
cells.append(CellFill(universe=10, universe_fill=20))
# Top right, bottom left lattice
cells.append(CellFill(universe=11, universe_fill=21))
# Moderator lattice - semi-finely spaced
cells.append(CellFill(universe=12, universe_fill=23))
# Moderator lattice - bottom of geometry
cells.append(CellFill(universe=13, universe_fill=24))
# Moderator lattice - bottom corner of geometry
cells.append(CellFill(universe=14, universe_fill=25))
# Moderator lattice right side of geometry
cells.append(CellFill(universe=15, universe_fill=26))
# Full geometry
cells.append(CellFill(universe=0, universe_fill=30))
cells[-1].addSurface(+1, planes[0])
cells[-1].addSurface(-1, planes[1])
cells[-1].addSurface(+1, planes[2])
cells[-1].addSurface(-1, planes[3])
###############################################################################
########################### Creating Lattices #############################
###############################################################################
log.py_printf('NORMAL', 'Creating lattices...')
lattices = []
# Top left, bottom right 17 x 17 assemblies
lattices.append(Lattice(id=20, width_x=1.26, width_y=1.26))
lattices[-1].setLatticeCells(
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1, 1, 1, 1],
[1, 1, 1, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 6, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 6, 1, 1, 6, 1, 1, 5, 1, 1, 6, 1, 1, 6, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 6, 1, 1, 1],
[1, 1, 1, 1, 1, 6, 1, 1, 6, 1, 1, 6, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
# Top right, bottom left 17 x 17 assemblies
lattices.append(Lattice(id=21, width_x=1.26, width_y=1.26))
lattices[-1].setLatticeCells(
[[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2],
[2, 3, 3, 3, 3, 6, 3, 3, 6, 3, 3, 6, 3, 3, 3, 3, 2],
[2, 3, 3, 6, 3, 4, 4, 4, 4, 4, 4, 4, 3, 6, 3, 3, 2],
[2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 2],
[2, 3, 6, 4, 4, 6, 4, 4, 6, 4, 4, 6, 4, 4, 6, 3, 2],
[2, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2],
[2, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2],
[2, 3, 6, 4, 4, 6, 4, 4, 5, 4, 4, 6, 4, 4, 6, 3, 2],
[2, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2],
[2, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 2],
[2, 3, 6, 4, 4, 6, 4, 4, 6, 4, 4, 6, 4, 4, 6, 3, 2],
[2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 2],
[2, 3, 3, 6, 3, 4, 4, 4, 4, 4, 4, 4, 3, 6, 3, 3, 2],
[2, 3, 3, 3, 3, 6, 3, 3, 6, 3, 3, 6, 3, 3, 3, 3, 2],
[2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]])
# Sliced up water cells - semi finely spaced
lattices.append(Lattice(id=23, width_x=0.126, width_y=0.126))
lattices[-1].setLatticeCells(
[[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7]])
# Sliced up water cells - right side of geometry
lattices.append(Lattice(id=26, width_x=1.26, width_y=1.26))
lattices[-1].setLatticeCells(
[[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7]])
# Sliced up water cells for bottom corner of geometry
lattices.append(Lattice(id=25, width_x=1.26, width_y=1.26))
lattices[-1].setLatticeCells(
[[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7]])
# Sliced up water cells for bottom of geometry
lattices.append(Lattice(id=24, width_x=1.26, width_y=1.26))
lattices[-1].setLatticeCells(
[[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7],
[7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7]])
# 4 x 4 core to represent two bundles and water
lattices.append(Lattice(id=30, width_x=21.42, width_y=21.42))
lattices[-1].setLatticeCells([[10, 11, 15],
[11, 10, 15],
[13, 13, 14]])
###############################################################################
########################## Creating the Geometry ##########################
###############################################################################
log.py_printf('NORMAL', 'Creating geometry...')
geometry = Geometry()
for material in materials.values(): geometry.addMaterial(material)
for cell in cells: geometry.addCell(cell)
for lattice in lattices: geometry.addLattice(lattice)
geometry.initializeFlatSourceRegions()
###############################################################################
######################## Creating the TrackGenerator ######################
###############################################################################
log.py_printf('NORMAL', 'Initializing the track generator...')
track_generator = TrackGenerator(geometry, num_azim, track_spacing)
track_generator.generateTracks()
###############################################################################
########################### Running a Simulation ##########################
###############################################################################
solver = CPUSolver(geometry, track_generator)
solver.setSourceConvergenceThreshold(tolerance)
solver.setNumThreads(num_threads)
solver.convergeSource(max_iters)
solver.printTimerReport()
###############################################################################
############################ Generating Plots #############################
###############################################################################
log.py_printf('NORMAL', 'Plotting data...')
#plotter.plot_tracks(track_generator)
#plotter.plot_materials(geometry, gridsize=500)
#plotter.plot_cells(geometry, gridsize=500)
#plotter.plot_flat_source_regions(geometry, gridsize=500)
#plotter.plot_fluxes(geometry, solver, energy_groups=[1,2,3,4,5,6,7])
log.py_printf('TITLE', 'Finished')
| 42.543956
| 79
| 0.505618
| 2,693
| 15,486
| 2.875603
| 0.064241
| 0.240702
| 0.338585
| 0.421488
| 0.681173
| 0.626679
| 0.578642
| 0.573605
| 0.541581
| 0.430527
| 0
| 0.191685
| 0.178355
| 15,486
| 363
| 80
| 42.661157
| 0.416929
| 0.075229
| 0
| 0.456067
| 0
| 0.451883
| 0.026556
| 0.001856
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025105
| 0
| 0.025105
| 0.041841
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d91378c76042a2cd24840b57c647d78f1bc718a
| 21
|
py
|
Python
|
hello_world.py
|
heethanjan/webpage-jobseeker
|
e16e4d9db36247409ad433cd5298c8201b6f205f
|
[
"MIT"
] | null | null | null |
hello_world.py
|
heethanjan/webpage-jobseeker
|
e16e4d9db36247409ad433cd5298c8201b6f205f
|
[
"MIT"
] | 8
|
2019-12-05T00:00:16.000Z
|
2022-02-10T09:38:48.000Z
|
hello_world.py
|
kamal2222ahmed/profiles-rest-api
|
509245d41a2687b8d0db9c6f555c560eb9b59d74
|
[
"MIT"
] | null | null | null |
print("hello_world")
| 10.5
| 20
| 0.761905
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 21
| 1
| 21
| 21
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4da9e0f30cebab380dcb9439dd94fc8d3e734779
| 103
|
py
|
Python
|
162.py
|
RafaelHuang87/Leet-Code-Practice
|
7754dcee38ffda18a5759113ef06d7becf4fe728
|
[
"MIT"
] | null | null | null |
162.py
|
RafaelHuang87/Leet-Code-Practice
|
7754dcee38ffda18a5759113ef06d7becf4fe728
|
[
"MIT"
] | null | null | null |
162.py
|
RafaelHuang87/Leet-Code-Practice
|
7754dcee38ffda18a5759113ef06d7becf4fe728
|
[
"MIT"
] | null | null | null |
class Solution:
def findPeakElement(self, nums: [int]) -> int:
return nums.index(max(nums))
| 34.333333
| 50
| 0.650485
| 13
| 103
| 5.153846
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203884
| 103
| 3
| 51
| 34.333333
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4db54393516a1ff2c7212a3872e284550cd14714
| 184
|
py
|
Python
|
auth_manager/apps.py
|
monstrenyatko/butler-api
|
842cf16212ba9fdb6943b5ecd488bf0ca57acf84
|
[
"MIT"
] | null | null | null |
auth_manager/apps.py
|
monstrenyatko/butler-api
|
842cf16212ba9fdb6943b5ecd488bf0ca57acf84
|
[
"MIT"
] | null | null | null |
auth_manager/apps.py
|
monstrenyatko/butler-api
|
842cf16212ba9fdb6943b5ecd488bf0ca57acf84
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class AuthManagerConfig(AppConfig):
name = 'auth_manager'
verbose_name = 'auth manager'
def ready(self):
from . import signals
| 18.4
| 35
| 0.695652
| 21
| 184
| 6
| 0.714286
| 0.126984
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228261
| 184
| 9
| 36
| 20.444444
| 0.887324
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4db73f3fc65a5de12bb2401fd3d72a8deddf3030
| 181
|
py
|
Python
|
miner/coinslib/Command.py
|
JesseEmond/csgains
|
c7ea6d48522a8e46a2a32cdfd0f50307cafa61ca
|
[
"MIT"
] | 11
|
2017-03-28T03:28:30.000Z
|
2019-06-02T05:25:02.000Z
|
miner/coinslib/Command.py
|
JesseEmond/csgains
|
c7ea6d48522a8e46a2a32cdfd0f50307cafa61ca
|
[
"MIT"
] | 2
|
2017-06-21T20:17:10.000Z
|
2017-06-21T20:17:52.000Z
|
miner/coinslib/Command.py
|
JesseEmond/csgains
|
c7ea6d48522a8e46a2a32cdfd0f50307cafa61ca
|
[
"MIT"
] | null | null | null |
import json
class Command:
def __init__(self, name, args={}):
self.name = name
self.args = args
def to_json(self):
return json.dumps(self.__dict__)
| 20.111111
| 40
| 0.607735
| 24
| 181
| 4.208333
| 0.541667
| 0.158416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.281768
| 181
| 9
| 40
| 20.111111
| 0.776923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4dd7451bbb14e4ade7345d77739c24fa7c07e7a6
| 126
|
py
|
Python
|
rretry/lib.py
|
afdaniele/run-and-retry
|
fdfd0e77160fa2b751b80d93326288915bcaf1ef
|
[
"MIT"
] | null | null | null |
rretry/lib.py
|
afdaniele/run-and-retry
|
fdfd0e77160fa2b751b80d93326288915bcaf1ef
|
[
"MIT"
] | null | null | null |
rretry/lib.py
|
afdaniele/run-and-retry
|
fdfd0e77160fa2b751b80d93326288915bcaf1ef
|
[
"MIT"
] | null | null | null |
class AttemptResults(list):
def __init__(self, tries):
return super(AttemptResults, self).extend(['ND'] * tries)
| 25.2
| 65
| 0.674603
| 14
| 126
| 5.785714
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18254
| 126
| 4
| 66
| 31.5
| 0.786408
| 0
| 0
| 0
| 0
| 0
| 0.016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4de8c3319ee16db03b84f478040f7f39f9eddbfa
| 121
|
py
|
Python
|
lexer/__init__.py
|
brian-joseph-petersen/oply
|
b30212492c7657903a88d1026b17beda0a1b9ce3
|
[
"MIT"
] | null | null | null |
lexer/__init__.py
|
brian-joseph-petersen/oply
|
b30212492c7657903a88d1026b17beda0a1b9ce3
|
[
"MIT"
] | null | null | null |
lexer/__init__.py
|
brian-joseph-petersen/oply
|
b30212492c7657903a88d1026b17beda0a1b9ce3
|
[
"MIT"
] | null | null | null |
t_ignore = " \t\r"
t_NUM = r"\d+"
from .Identifier import t_ID
from .LineFeed import t_NEWLINE
from .Error import t_error
| 24.2
| 31
| 0.743802
| 23
| 121
| 3.695652
| 0.521739
| 0.247059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14876
| 121
| 5
| 32
| 24.2
| 0.825243
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12a5a39dab3287221d130005860fefca3aa70eba
| 93
|
py
|
Python
|
Hello.py
|
ElliotEriksson/Hello_World
|
152ca186dfaf2f9304d6d019921f3caab99c3f76
|
[
"MIT"
] | null | null | null |
Hello.py
|
ElliotEriksson/Hello_World
|
152ca186dfaf2f9304d6d019921f3caab99c3f76
|
[
"MIT"
] | null | null | null |
Hello.py
|
ElliotEriksson/Hello_World
|
152ca186dfaf2f9304d6d019921f3caab99c3f76
|
[
"MIT"
] | null | null | null |
# Hello World
# My first python git repo
if __name__ == "__main__":
print("Hello World")
| 18.6
| 26
| 0.677419
| 13
| 93
| 4.230769
| 0.846154
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204301
| 93
| 5
| 27
| 18.6
| 0.743243
| 0.387097
| 0
| 0
| 0
| 0
| 0.345455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
12a638cad8be2e5703ed253a2972e9d999042c05
| 106
|
py
|
Python
|
utils/__init__.py
|
omiderfanmanesh/dengue-infections-prediction
|
6b4e4aa4af6f6e2cc581fd7828634bbfdc446340
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
omiderfanmanesh/dengue-infections-prediction
|
6b4e4aa4af6f6e2cc581fd7828634bbfdc446340
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
omiderfanmanesh/dengue-infections-prediction
|
6b4e4aa4af6f6e2cc581fd7828634bbfdc446340
|
[
"Apache-2.0"
] | 1
|
2021-06-05T10:05:44.000Z
|
2021-06-05T10:05:44.000Z
|
# Copyright (c) 2021, Omid Erfanmanesh, All rights reserved.
from utils.runtime_mode import RuntimeMode
| 26.5
| 61
| 0.792453
| 14
| 106
| 5.928571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043956
| 0.141509
| 106
| 3
| 62
| 35.333333
| 0.868132
| 0.54717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12b55229e87c0ece73e45f7b2a0e15e3ba4f81be
| 112
|
py
|
Python
|
browser launch.py
|
Ian-Ratliff/Overwatch-league-viewer
|
9934aa0c960d0e4b7b04f0ebbf7a4d794cade2da
|
[
"MIT"
] | null | null | null |
browser launch.py
|
Ian-Ratliff/Overwatch-league-viewer
|
9934aa0c960d0e4b7b04f0ebbf7a4d794cade2da
|
[
"MIT"
] | 3
|
2020-07-17T18:03:49.000Z
|
2021-05-10T01:13:40.000Z
|
browser launch.py
|
Ian-Ratliff/Overwatch-league-viewer
|
9934aa0c960d0e4b7b04f0ebbf7a4d794cade2da
|
[
"MIT"
] | null | null | null |
import webbrowser
import selenium
webbrowser.open("https://twitch.tv/overwatchleague",new=1, autoraise=False)
| 18.666667
| 75
| 0.803571
| 14
| 112
| 6.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.071429
| 112
| 5
| 76
| 22.4
| 0.855769
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12cdf94ad804c013e5bd3980384a30beb95485d8
| 67
|
py
|
Python
|
shogun/dispatch/__init__.py
|
menpo/shogun
|
013baff88cb495e8f4314826865ec7e332859636
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2020-11-26T21:04:20.000Z
|
2020-11-26T21:04:20.000Z
|
shogun/dispatch/__init__.py
|
menpo/shogun
|
013baff88cb495e8f4314826865ec7e332859636
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
shogun/dispatch/__init__.py
|
menpo/shogun
|
013baff88cb495e8f4314826865ec7e332859636
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
import shogun.dispatch.concrete
from .registry import TypeRegistry
| 22.333333
| 34
| 0.865672
| 8
| 67
| 7.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 67
| 2
| 35
| 33.5
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12db2b68e8891b908b2bbc1f16301e73d0c20bef
| 270
|
py
|
Python
|
urlencode.py
|
66095021/handy_code
|
60654326378e60c4bc4f7fd9ab386bb64653abb9
|
[
"MIT"
] | null | null | null |
urlencode.py
|
66095021/handy_code
|
60654326378e60c4bc4f7fd9ab386bb64653abb9
|
[
"MIT"
] | null | null | null |
urlencode.py
|
66095021/handy_code
|
60654326378e60c4bc4f7fd9ab386bb64653abb9
|
[
"MIT"
] | null | null | null |
#!/bin/env python
# -*- coding:utf-8 -*-
import urllib
# use %20 as space
x = urllib.quote("test 我们")
# use + as space
y = urllib.quote_plus("test 我们")
print urllib.unquote(x)
print urllib.unquote(y)
print urllib.unquote_plus(x)
print urllib.unquote_plus(y)
print x, y
| 19.285714
| 32
| 0.703704
| 47
| 270
| 3.978723
| 0.425532
| 0.235294
| 0.385027
| 0.203209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012931
| 0.140741
| 270
| 13
| 33
| 20.769231
| 0.793103
| 0.255556
| 0
| 0
| 0
| 0
| 0.071066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.125
| null | null | 0.625
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
420ea2098e3026a781ea8b66e1f483bb7494e1bc
| 39
|
py
|
Python
|
deeds/__init__.py
|
wiktorowski211/deeds-registration
|
4acec209b9d0ee322d3e71a2be5c3b8bf2050dfb
|
[
"MIT"
] | null | null | null |
deeds/__init__.py
|
wiktorowski211/deeds-registration
|
4acec209b9d0ee322d3e71a2be5c3b8bf2050dfb
|
[
"MIT"
] | null | null | null |
deeds/__init__.py
|
wiktorowski211/deeds-registration
|
4acec209b9d0ee322d3e71a2be5c3b8bf2050dfb
|
[
"MIT"
] | null | null | null |
from .registration import registration
| 19.5
| 38
| 0.871795
| 4
| 39
| 8.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
421f25d2bf7375b29ba5f3857af175e8051d2a8d
| 182
|
py
|
Python
|
install.py
|
B3ND1X/py-air-script
|
d6756cc2b5ec2a7e7950b13b09c78c776488fd6e
|
[
"Apache-2.0"
] | 2
|
2021-11-19T10:40:07.000Z
|
2022-02-28T16:39:49.000Z
|
install.py
|
B3ND1X/py-air-script
|
d6756cc2b5ec2a7e7950b13b09c78c776488fd6e
|
[
"Apache-2.0"
] | null | null | null |
install.py
|
B3ND1X/py-air-script
|
d6756cc2b5ec2a7e7950b13b09c78c776488fd6e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
import os
os.system("sudo apt-get update")
os.system("sudo apt-get install aircrack-ng")
os.system("sudo apt install ethtool")
os.system("sudo apt install rfkill")
| 26
| 45
| 0.747253
| 31
| 182
| 4.387097
| 0.483871
| 0.235294
| 0.352941
| 0.441176
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098901
| 182
| 7
| 46
| 26
| 0.829268
| 0.087912
| 0
| 0
| 0
| 0
| 0.590361
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
424e1597ed935d855fe93e7585e97a3754868005
| 20
|
py
|
Python
|
catboost/python-package/catboost/version.py
|
dzaytsev91/catboost
|
c23174433aa20101ca10fa7dec729d86dfda5393
|
[
"Apache-2.0"
] | null | null | null |
catboost/python-package/catboost/version.py
|
dzaytsev91/catboost
|
c23174433aa20101ca10fa7dec729d86dfda5393
|
[
"Apache-2.0"
] | null | null | null |
catboost/python-package/catboost/version.py
|
dzaytsev91/catboost
|
c23174433aa20101ca10fa7dec729d86dfda5393
|
[
"Apache-2.0"
] | null | null | null |
VERSION = '0.9.1.1'
| 10
| 19
| 0.55
| 5
| 20
| 2.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.15
| 20
| 1
| 20
| 20
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
428b66cb86f706323c2c8b39e9274d45e431f3e0
| 332
|
py
|
Python
|
example/example/core/views.py
|
nicokant/django-htmx
|
4e5b9875404d34dc2e9386b5de8410f354980605
|
[
"MIT"
] | null | null | null |
example/example/core/views.py
|
nicokant/django-htmx
|
4e5b9875404d34dc2e9386b5de8410f354980605
|
[
"MIT"
] | null | null | null |
example/example/core/views.py
|
nicokant/django-htmx
|
4e5b9875404d34dc2e9386b5de8410f354980605
|
[
"MIT"
] | null | null | null |
import time
from django.shortcuts import render
from django.views.decorators.http import require_http_methods
def index(request):
return render(request, "index.html")
@require_http_methods(["DELETE", "POST", "PUT"])
def attribute_test(request):
return render(request, "attribute_test.html", {"timestamp": time.time()})
| 23.714286
| 77
| 0.753012
| 43
| 332
| 5.674419
| 0.511628
| 0.081967
| 0.147541
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114458
| 332
| 13
| 78
| 25.538462
| 0.829932
| 0
| 0
| 0
| 0
| 0
| 0.153614
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.375
| 0.25
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.