hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
423f200f8346a97be5eb36c1bf5e07b4b200cdb8
| 120
|
py
|
Python
|
jigsaw/datasets/__init__.py
|
alexvishnevskiy/jigsaw
|
7fc2c4cd3700a54e9c5cbc02870bf4057b0a9fe3
|
[
"MIT"
] | null | null | null |
jigsaw/datasets/__init__.py
|
alexvishnevskiy/jigsaw
|
7fc2c4cd3700a54e9c5cbc02870bf4057b0a9fe3
|
[
"MIT"
] | null | null | null |
jigsaw/datasets/__init__.py
|
alexvishnevskiy/jigsaw
|
7fc2c4cd3700a54e9c5cbc02870bf4057b0a9fe3
|
[
"MIT"
] | null | null | null |
from .dataloaders import get_paired_loader, get_regression_loader
from .datasets import PairedDataset, RegressionDataset
| 60
| 65
| 0.891667
| 14
| 120
| 7.357143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 120
| 2
| 66
| 60
| 0.927928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
426678e9b7ed85af96d3b5d6b6d33ec70c60e2dd
| 3,525
|
py
|
Python
|
tests/response/unmarshal_response_test.py
|
Timothyyung/bravado-core
|
baaef12d2908f803deed4e220e6bdaec4ee6e029
|
[
"BSD-3-Clause"
] | null | null | null |
tests/response/unmarshal_response_test.py
|
Timothyyung/bravado-core
|
baaef12d2908f803deed4e220e6bdaec4ee6e029
|
[
"BSD-3-Clause"
] | null | null | null |
tests/response/unmarshal_response_test.py
|
Timothyyung/bravado-core
|
baaef12d2908f803deed4e220e6bdaec4ee6e029
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import msgpack
import pytest
from mock import Mock
from mock import patch
from bravado_core.content_type import APP_JSON
from bravado_core.content_type import APP_MSGPACK
from bravado_core.response import IncomingResponse
from bravado_core.response import unmarshal_response
@pytest.fixture
def response_spec():
return {
'description': "Day of the week",
'schema': {
'type': 'string',
}
}
def test_no_content(empty_swagger_spec):
response_spec = {
'description': "I don't have a 'schema' key so I return nothing",
}
response = Mock(spec=IncomingResponse, status_code=200)
with patch('bravado_core.response.get_response_spec') as m:
m.return_value = response_spec
op = Mock(swagger_spec=empty_swagger_spec)
result = unmarshal_response(response, op)
assert result is None
def test_json_content(empty_swagger_spec, response_spec):
response = Mock(
spec=IncomingResponse,
status_code=200,
headers={'content-type': APP_JSON},
json=Mock(return_value='Monday'))
with patch('bravado_core.response.get_response_spec') as m:
m.return_value = response_spec
op = Mock(swagger_spec=empty_swagger_spec)
assert 'Monday' == unmarshal_response(response, op)
def test_msgpack_content(empty_swagger_spec, response_spec):
message = 'Monday'
response = Mock(
spec=IncomingResponse,
status_code=200,
headers={'content-type': APP_MSGPACK},
raw_bytes=msgpack.dumps(message, use_bin_type=True))
with patch(
'bravado_core.response.get_response_spec',
return_value=response_spec,
):
op = Mock(swagger_spec=empty_swagger_spec)
assert message == unmarshal_response(response, op)
def test_text_content(empty_swagger_spec, response_spec):
response = Mock(
spec=IncomingResponse,
status_code=200,
headers={'content-type': 'text/plain'},
text='Monday')
with patch('bravado_core.response.get_response_spec') as m:
m.return_value = response_spec
op = Mock(swagger_spec=empty_swagger_spec)
assert 'Monday' == unmarshal_response(response, op)
def test_skips_validation(empty_swagger_spec, response_spec):
empty_swagger_spec.config['validate_responses'] = False
response = Mock(
spec=IncomingResponse,
status_code=200,
headers={'content-type': APP_JSON},
json=Mock(return_value='Monday'))
with patch('bravado_core.response.validate_schema_object') as val_schem:
with patch('bravado_core.response.get_response_spec') as get_resp:
get_resp.return_value = response_spec
op = Mock(swagger_spec=empty_swagger_spec)
unmarshal_response(response, op)
assert val_schem.call_count == 0
def test_performs_validation(empty_swagger_spec, response_spec):
empty_swagger_spec.config['validate_responses'] = True
response = Mock(
spec=IncomingResponse,
status_code=200,
headers={'content-type': APP_JSON},
json=Mock(return_value='Monday'))
with patch('bravado_core.response.validate_schema_object') as val_schem:
with patch('bravado_core.response.get_response_spec') as get_resp:
get_resp.return_value = response_spec
op = Mock(swagger_spec=empty_swagger_spec)
unmarshal_response(response, op)
assert val_schem.call_count == 1
| 32.638889
| 76
| 0.690496
| 435
| 3,525
| 5.291954
| 0.183908
| 0.095569
| 0.097307
| 0.069505
| 0.827976
| 0.788445
| 0.743267
| 0.69331
| 0.674631
| 0.674631
| 0
| 0.007609
| 0.217021
| 3,525
| 107
| 77
| 32.943925
| 0.826449
| 0.005957
| 0
| 0.505882
| 0
| 0
| 0.162764
| 0.091947
| 0
| 0
| 0
| 0
| 0.070588
| 1
| 0.082353
| false
| 0
| 0.094118
| 0.011765
| 0.188235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
42899ec66e011c19f37eef2e3d850d11a91ee85f
| 291
|
py
|
Python
|
setup.py
|
andres-jordan/PyARoME
|
9cf60eb0afcf5026c0ee3f8b1ae3b5c1bc6ebb89
|
[
"MIT"
] | null | null | null |
setup.py
|
andres-jordan/PyARoME
|
9cf60eb0afcf5026c0ee3f8b1ae3b5c1bc6ebb89
|
[
"MIT"
] | null | null | null |
setup.py
|
andres-jordan/PyARoME
|
9cf60eb0afcf5026c0ee3f8b1ae3b5c1bc6ebb89
|
[
"MIT"
] | null | null | null |
from distutils.core import setup, Extension
import numpy.distutils.misc_util
setup(
ext_modules=[Extension("pyarome", ["pyarome.c"],library_dirs=['/usr/local/lib/'], libraries=['arome'])],
include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs() + ['/usr/local/include/'],
)
| 36.375
| 108
| 0.738832
| 38
| 291
| 5.447368
| 0.578947
| 0.135266
| 0.173913
| 0.21256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 291
| 7
| 109
| 41.571429
| 0.775281
| 0
| 0
| 0
| 0
| 0
| 0.189003
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
42add9c908e1cb3037bebbce218123e48eab6123
| 12
|
py
|
Python
|
python/testData/testRunner/env/createConfigurationTest/tests_package/test_tools.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/testRunner/env/createConfigurationTest/tests_package/test_tools.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/testRunner/env/createConfigurationTest/tests_package/test_tools.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
ANSWER = 42
| 12
| 12
| 0.666667
| 2
| 12
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 0.25
| 12
| 1
| 12
| 12
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
35fce163d8479bd36084f5d7f023414627fcd092
| 34
|
py
|
Python
|
wx.py
|
XinnWang/MultiplayerTest
|
1c8c0dab99f4c399e0784de8a37c5f891ce9dc27
|
[
"MIT"
] | null | null | null |
wx.py
|
XinnWang/MultiplayerTest
|
1c8c0dab99f4c399e0784de8a37c5f891ce9dc27
|
[
"MIT"
] | null | null | null |
wx.py
|
XinnWang/MultiplayerTest
|
1c8c0dab99f4c399e0784de8a37c5f891ce9dc27
|
[
"MIT"
] | null | null | null |
wx = 1
xw = -1
print(wx)
print(xw)
| 8.5
| 9
| 0.588235
| 8
| 34
| 2.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0.205882
| 34
| 4
| 10
| 8.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
c410e2cb43fff2c5e9aeb6a296903930e03b5e8e
| 176
|
py
|
Python
|
business_rules/__init__.py
|
hpersonsz/business-rules
|
c6e1fac6e726436ed6cd613765ed98be72e46112
|
[
"MIT"
] | 10
|
2020-03-19T18:19:55.000Z
|
2020-07-13T16:18:28.000Z
|
business_rules/__init__.py
|
hpersonsz/business-rules
|
c6e1fac6e726436ed6cd613765ed98be72e46112
|
[
"MIT"
] | 1
|
2020-12-15T23:46:17.000Z
|
2020-12-15T23:46:17.000Z
|
business_rules/__init__.py
|
hpersonsz/business-rules
|
c6e1fac6e726436ed6cd613765ed98be72e46112
|
[
"MIT"
] | 2
|
2019-10-11T16:32:51.000Z
|
2019-11-04T20:44:46.000Z
|
__version__ = '1.0.1'
from .engine import run, run_all
from .utils import export_rule_data
# Appease pyflakes by "using" these exports
assert run_all
assert export_rule_data
| 19.555556
| 43
| 0.795455
| 29
| 176
| 4.482759
| 0.655172
| 0.092308
| 0.215385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019868
| 0.142045
| 176
| 8
| 44
| 22
| 0.84106
| 0.232955
| 0
| 0
| 0
| 0
| 0.037594
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c419071e0dc4ad21e2cf4d057b952dd70d50d839
| 846
|
py
|
Python
|
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GLES2/EXT/shader_pixel_local_storage.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GLES2/EXT/shader_pixel_local_storage.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GLES2/EXT/shader_pixel_local_storage.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GLES2 import _types as _cs
# End users want this...
from OpenGL.raw.GLES2._types import *
from OpenGL.raw.GLES2 import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GLES2_EXT_shader_pixel_local_storage'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GLES2,'GLES2_EXT_shader_pixel_local_storage',error_checker=_errors._error_checker)
GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_FAST_SIZE_EXT=_C('GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_FAST_SIZE_EXT',0x8F63)
GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_SIZE_EXT=_C('GL_MAX_SHADER_PIXEL_LOCAL_STORAGE_SIZE_EXT',0x8F67)
GL_SHADER_PIXEL_LOCAL_STORAGE_EXT=_C('GL_SHADER_PIXEL_LOCAL_STORAGE_EXT',0x8F64)
| 47
| 134
| 0.833333
| 133
| 846
| 4.781955
| 0.383459
| 0.138365
| 0.201258
| 0.289308
| 0.537736
| 0.421384
| 0.235849
| 0.235849
| 0.180818
| 0
| 0
| 0.02356
| 0.096927
| 846
| 17
| 135
| 49.764706
| 0.808901
| 0.118203
| 0
| 0
| 1
| 0
| 0.269071
| 0.269071
| 0
| 0
| 0.024965
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.5
| 0.083333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c422115eed6da2fe7e221812a846f20626abfb32
| 755
|
py
|
Python
|
PySRCG/src/Tabs/notebook_tab.py
|
apampuch/PySRCG
|
bb3777aed3517b473e5860336c015e2e8d0905e9
|
[
"MIT"
] | null | null | null |
PySRCG/src/Tabs/notebook_tab.py
|
apampuch/PySRCG
|
bb3777aed3517b473e5860336c015e2e8d0905e9
|
[
"MIT"
] | null | null | null |
PySRCG/src/Tabs/notebook_tab.py
|
apampuch/PySRCG
|
bb3777aed3517b473e5860336c015e2e8d0905e9
|
[
"MIT"
] | null | null | null |
from tkinter import ttk
import src.app_data as app_data
import abc
class NotebookTab(ttk.Frame):
"""Base tab that other tabs inherit from"""
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
@property
def character(self):
return app_data.app_character
@property
def statblock(self):
return self.character.statblock
@property
def gen_mode(self):
return self.statblock.gen_mode
@property
def race(self):
return self.statblock.race
@abc.abstractmethod
def on_switch(self):
"""Called on tab switch."""
pass
@abc.abstractmethod
def load_character(self):
"""Called on character load.."""
pass
| 20.405405
| 47
| 0.634437
| 91
| 755
| 5.087912
| 0.395604
| 0.095032
| 0.090713
| 0.099352
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272848
| 755
| 36
| 48
| 20.972222
| 0.843352
| 0.113907
| 0
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.28
| false
| 0.08
| 0.12
| 0.16
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
c42502a2c343eb23d7f5a2f46276012a8555068a
| 10,771
|
py
|
Python
|
civis/tests/test_utils.py
|
mcoirad-gmmb/civis-python
|
4286da93496cc7d15d876c9001ea3aa1ad359972
|
[
"BSD-3-Clause"
] | 31
|
2016-11-14T14:26:24.000Z
|
2021-11-19T15:43:45.000Z
|
civis/tests/test_utils.py
|
mcoirad-gmmb/civis-python
|
4286da93496cc7d15d876c9001ea3aa1ad359972
|
[
"BSD-3-Clause"
] | 296
|
2016-11-11T20:52:59.000Z
|
2022-02-23T13:34:37.000Z
|
civis/tests/test_utils.py
|
mcoirad-gmmb/civis-python
|
4286da93496cc7d15d876c9001ea3aa1ad359972
|
[
"BSD-3-Clause"
] | 40
|
2016-11-11T20:48:13.000Z
|
2021-04-22T17:47:09.000Z
|
from unittest import mock
from requests import Request
from requests import ConnectionError, ConnectTimeout
from datetime import datetime
from math import floor
from civis._utils import camel_to_snake, to_camelcase, maybe_get_random_name
from civis._utils import retry
from civis._utils import retry_request
from civis.civis import RETRY_VERBS, RETRY_CODES, POST_RETRY_CODES
import pytest
def test_camel_to_snake():
test_cases = [
('CAMELCase', 'camel_case'),
('camelCase', 'camel_case'),
('CamelCase', 'camel_case'),
('c__amel', 'c__amel'),
]
for in_word, out_word in test_cases:
assert camel_to_snake(in_word) == out_word
def test_tocamlecase():
test_cases = [
('snake_case', 'SnakeCase'),
('Snake_Case', 'SnakeCase'),
('snakecase', 'Snakecase')
]
for in_word, out_word in test_cases:
assert to_camelcase(in_word) == out_word
@mock.patch('civis._utils.uuid')
def test_maybe_random_name_random(mock_uuid):
random_name = '11111'
mock_uuid.uuid4.return_value = mock.Mock(hex=random_name)
assert maybe_get_random_name(None) == random_name
def test_maybe_random_name_not_random():
given_name = '22222'
assert maybe_get_random_name(given_name) == given_name
def test_io_no_retry():
@retry(ConnectionError, retries=4, delay=0.1)
def succeeds():
counter['i'] += 1
return 'success'
counter = dict(i=0)
test_result = succeeds()
assert test_result == 'success'
assert counter['i'] == 1
def test_io_retry_once():
@retry(ConnectionError, retries=4, delay=0.1)
def fails_once():
counter['i'] += 1
if counter['i'] < 2:
raise ConnectionError('failed')
else:
return 'success'
counter = dict(i=0)
test_result = fails_once()
assert test_result == 'success'
assert counter['i'] == 2
def test_io_retry_limit_reached():
@retry(ConnectionError, retries=4, delay=0.1)
def always_fails():
counter['i'] += 1
raise ConnectionError('failed')
counter = dict(i=0)
pytest.raises(ConnectionError, always_fails)
assert counter['i'] == 5
def test_io_retry_multiple_exceptions():
@retry((ConnectionError, ConnectTimeout), retries=4, delay=0.1)
def raise_multiple_exceptions():
counter['i'] += 1
if counter['i'] == 1:
raise ConnectionError('one error')
elif counter['i'] == 2:
raise ConnectTimeout('another error')
else:
return 'success'
counter = dict(i=0)
test_result = raise_multiple_exceptions()
assert test_result == 'success'
assert counter['i'] == 3
def test_io_retry_unexpected_exception():
@retry(ConnectionError, retries=4, delay=0.1)
def raise_unexpected_error():
raise ValueError('unexpected error')
pytest.raises(ValueError, raise_unexpected_error)
@mock.patch('civis._utils.open_session')
def test_no_retry_on_success(mock_session):
expected_call_count = 0
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
for verb in RETRY_VERBS:
expected_call_count += 1
session_context.send.return_value.status_code = 200
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method=verb
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
retry_request(verb, pre_request, session_context, 3)
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_no_retry_on_get_no_retry_failure(mock_session):
expected_call_count = 0
max_calls = 3
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
for verb in RETRY_VERBS:
expected_call_count += 1
session_context.send.return_value.status_code = 403
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method=verb
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
retry_request(verb, pre_request, session_context, max_calls)
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_retry_on_retry_eligible_failures(mock_session):
expected_call_count = 0
max_calls = 3
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
for verb in RETRY_VERBS:
for code in RETRY_CODES:
expected_call_count += max_calls
session_context.send.return_value.status_code = code
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method=verb
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
retry_request(verb, pre_request, session_context, max_calls)
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_retry_on_retry_eligible_failures_lowercase_verbs(mock_session):
expected_call_count = 0
max_calls = 3
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
for verb in RETRY_VERBS:
for code in RETRY_CODES:
expected_call_count += max_calls
session_context.send.return_value.status_code = code
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method=verb.lower()
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
retry_request(verb, pre_request, session_context, max_calls)
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_no_retry_on_post_success(mock_session):
expected_call_count = 1
max_calls = 3
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
session_context.send.return_value.status_code = 200
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method='POST'
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
retry_request('post', pre_request, session_context, max_calls)
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_retry_on_retry_eligible_post_failures(mock_session):
expected_call_count = 0
max_calls = 3
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
for code in POST_RETRY_CODES:
expected_call_count += max_calls
session_context.send.return_value.status_code = code
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method='POST'
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
retry_request('post', pre_request, session_context, max_calls)
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_no_retry_on_connection_error(mock_session):
expected_call_count = 0
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
for verb in RETRY_VERBS:
expected_call_count += 1
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method=verb
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
session_context.send.side_effect = ConnectionError()
try:
retry_request(verb, pre_request, session_context, 3)
except ConnectionError:
pass
assert session_context.send.call_count == expected_call_count
@mock.patch('civis._utils.open_session')
def test_retry_respect_retry_after_headers(mock_session):
expected_call_count = 0
max_calls = 3
retry_after = 3
api_response = {'key': 'value'}
session_context = mock_session.return_value.__enter__.return_value
session_context.send.return_value.json.return_value = api_response
session_context.send.return_value.status_code = 429
session_context.send.return_value.headers = {
'Retry-After': str(retry_after)
}
for verb in ['HEAD', 'TRACE', 'GET', 'PUT', 'OPTIONS', 'DELETE', 'POST',
'head', 'trace', 'get', 'put', 'options', 'delete', 'post']:
expected_call_count += max_calls
request_info = dict(
params={'secondParameter': 'b', 'firstParameter': 'a'},
json={},
url='https://api.civisanalytics.com/wobble/wubble',
method=verb
)
request = Request(**request_info)
pre_request = session_context.prepare_request(request)
start_time = datetime.now().timestamp()
retry_request(verb, pre_request, session_context, max_calls)
end_time = datetime.now().timestamp()
duration = end_time - start_time
assert session_context.send.call_count == expected_call_count
assert floor(duration) == retry_after * (max_calls - 1)
| 32.838415
| 77
| 0.671525
| 1,298
| 10,771
| 5.218798
| 0.113251
| 0.10127
| 0.06643
| 0.056687
| 0.789489
| 0.753174
| 0.733392
| 0.703425
| 0.669472
| 0.627842
| 0
| 0.008838
| 0.222635
| 10,771
| 327
| 78
| 32.938838
| 0.800191
| 0
| 0
| 0.636364
| 0
| 0
| 0.112524
| 0.018568
| 0
| 0
| 0
| 0
| 0.079051
| 1
| 0.086957
| false
| 0.003953
| 0.039526
| 0
| 0.13834
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c42d6f43684008f287d7ce4b35bff396556f9fdb
| 56
|
py
|
Python
|
enthought/traits/protocols/protocols.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/traits/protocols/protocols.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/traits/protocols/protocols.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from traits.protocols.protocols import *
| 18.666667
| 40
| 0.803571
| 7
| 56
| 6.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 56
| 2
| 41
| 28
| 0.918367
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c475148a8b59e359497aeafc3aaf19bac9216463
| 345
|
py
|
Python
|
bob/bio/base/script/vulnerability.py
|
bioidiap/bob.bio.base
|
44b8d192e957eb328591c8110cf0113f602292ef
|
[
"BSD-3-Clause"
] | 16
|
2016-04-06T20:37:55.000Z
|
2019-10-19T08:06:25.000Z
|
bob/bio/base/script/vulnerability.py
|
bioidiap/bob.bio.base
|
44b8d192e957eb328591c8110cf0113f602292ef
|
[
"BSD-3-Clause"
] | 25
|
2015-07-04T17:41:40.000Z
|
2016-08-08T20:36:01.000Z
|
bob/bio/base/script/vulnerability.py
|
bioidiap/bob.bio.base
|
44b8d192e957eb328591c8110cf0113f602292ef
|
[
"BSD-3-Clause"
] | 7
|
2015-08-07T17:21:02.000Z
|
2018-08-13T15:51:54.000Z
|
"""The main entry for bob.vuln
"""
import click
import pkg_resources
from click_plugins import with_plugins
from bob.extension.scripts.click_helper import AliasedGroup
@with_plugins(pkg_resources.iter_entry_points("bob.vuln.cli"))
@click.group(cls=AliasedGroup)
def vulnerability():
"""Vulnerability analysis related commands."""
pass
| 24.642857
| 62
| 0.791304
| 46
| 345
| 5.76087
| 0.608696
| 0.05283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107246
| 345
| 13
| 63
| 26.538462
| 0.86039
| 0.197101
| 0
| 0
| 0
| 0
| 0.045283
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0.125
| 0.5
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
672b29247bdb23c1b5d7ff44c526e73b3f317b73
| 258
|
py
|
Python
|
nff/nn/activations.py
|
torchmd/mdgrad
|
77bd7685b74b41acf54a9483546e1e8cb545eb01
|
[
"MIT"
] | 54
|
2021-03-10T18:35:49.000Z
|
2022-03-28T13:54:47.000Z
|
nff/nn/activations.py
|
wwang2/torchmd
|
77bd7685b74b41acf54a9483546e1e8cb545eb01
|
[
"MIT"
] | 1
|
2021-03-17T07:01:02.000Z
|
2021-03-17T07:01:02.000Z
|
nff/nn/activations.py
|
torchmd/mdgrad
|
77bd7685b74b41acf54a9483546e1e8cb545eb01
|
[
"MIT"
] | 5
|
2021-06-08T02:44:35.000Z
|
2021-12-17T11:50:08.000Z
|
import numpy as np
import torch
import torch.nn.functional as F
class shifted_softplus(torch.nn.Module):
def __init__(self):
super(shifted_softplus, self).__init__()
def forward(self, input):
return F.softplus(input) - np.log(2.0)
| 21.5
| 48
| 0.697674
| 38
| 258
| 4.473684
| 0.578947
| 0.129412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.193798
| 258
| 11
| 49
| 23.454545
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.375
| 0.125
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
677835a10b20e16b04a4bd32546a25db85469aaa
| 317
|
py
|
Python
|
backend/appengine/routes/trabalheconosco/home.py
|
Joaohigor/JCUsinagem
|
c0e48129a9d1607fb896fe9d66975182b9402933
|
[
"MIT"
] | null | null | null |
backend/appengine/routes/trabalheconosco/home.py
|
Joaohigor/JCUsinagem
|
c0e48129a9d1607fb896fe9d66975182b9402933
|
[
"MIT"
] | null | null | null |
backend/appengine/routes/trabalheconosco/home.py
|
Joaohigor/JCUsinagem
|
c0e48129a9d1607fb896fe9d66975182b9402933
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from gaepermission.decorator import login_not_required
@login_not_required
@no_csrf
def index():
return TemplateResponse()
| 26.416667
| 57
| 0.794953
| 38
| 317
| 6.289474
| 0.631579
| 0.125523
| 0.133891
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003676
| 0.141956
| 317
| 11
| 58
| 28.818182
| 0.875
| 0.066246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0.5
| 0.125
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
678339361c98cc10642007d7c1d7f3cd2aaa14a8
| 164
|
py
|
Python
|
implicit/gpu/__init__.py
|
r-yanyo/implicit
|
47260275193a258cf5215d3a0cf1a201c4351817
|
[
"MIT"
] | null | null | null |
implicit/gpu/__init__.py
|
r-yanyo/implicit
|
47260275193a258cf5215d3a0cf1a201c4351817
|
[
"MIT"
] | null | null | null |
implicit/gpu/__init__.py
|
r-yanyo/implicit
|
47260275193a258cf5215d3a0cf1a201c4351817
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
try:
import cupy # noqa
from ._cuda import * # noqa
HAS_CUDA = True
except ImportError:
HAS_CUDA = False
| 16.4
| 38
| 0.689024
| 21
| 164
| 5
| 0.619048
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.262195
| 164
| 9
| 39
| 18.222222
| 0.867769
| 0.054878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
678405ea012035ea0333925c235b4c9089c225c0
| 96
|
py
|
Python
|
research/utils/__init__.py
|
ZviBaratz/pylabber
|
35337284f3d0615249f642743b993b7dad407390
|
[
"Apache-2.0"
] | 3
|
2020-08-28T21:33:07.000Z
|
2021-07-19T17:52:17.000Z
|
research/utils/__init__.py
|
TheLabbingProject/pylabber
|
27d6073e7bde871c16912a8ea5e0e389711bbd9f
|
[
"Apache-2.0"
] | 74
|
2019-09-04T11:40:16.000Z
|
2022-01-03T19:43:04.000Z
|
research/utils/__init__.py
|
ZviBaratz/pylabber
|
35337284f3d0615249f642743b993b7dad407390
|
[
"Apache-2.0"
] | 3
|
2019-05-07T07:09:05.000Z
|
2019-08-30T15:40:47.000Z
|
from research.utils.utils import get_measurement_model, get_subject_model
# flake8: noqa: F401
| 24
| 73
| 0.833333
| 14
| 96
| 5.428571
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0.104167
| 96
| 3
| 74
| 32
| 0.837209
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6792ab9177e5f849432e7e1487362888318a09f4
| 92
|
py
|
Python
|
2014/11/student-debt/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
2014/11/student-debt/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | null | null | null |
2014/11/student-debt/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
COPY_GOOGLE_DOC_KEY = '14muD-D_AKwFCTeXRSoWx8pmbFXwQRdQjUFC6S0Uu_FA'
| 23
| 68
| 0.836957
| 12
| 92
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 0.065217
| 92
| 3
| 69
| 30.666667
| 0.77907
| 0.217391
| 0
| 0
| 0
| 0
| 0.619718
| 0.619718
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6797464b23377b7e21a8c42f7536e614dac9de93
| 250
|
py
|
Python
|
evaluate/forms.py
|
mohilkhare1708/descriptiveAnswerChecker
|
839404e807f884afd8b59e6f2eebfbc8b1189e83
|
[
"MIT"
] | 2
|
2021-02-15T20:50:47.000Z
|
2022-02-14T18:31:30.000Z
|
evaluate/forms.py
|
mohilkhare1708/descriptiveAnswerChecker
|
839404e807f884afd8b59e6f2eebfbc8b1189e83
|
[
"MIT"
] | null | null | null |
evaluate/forms.py
|
mohilkhare1708/descriptiveAnswerChecker
|
839404e807f884afd8b59e6f2eebfbc8b1189e83
|
[
"MIT"
] | 1
|
2022-01-11T15:10:50.000Z
|
2022-01-11T15:10:50.000Z
|
from django import forms
from evaluate.models import Test
class TestCreateForm(forms.ModelForm):
class Meta:
model = Test
fields = ('test_name', 'total_marks', 'passing_marks', 'no_of_ans', 'model_answer_key', 'response_sheet', )
| 35.714286
| 115
| 0.712
| 32
| 250
| 5.3125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176
| 250
| 7
| 115
| 35.714286
| 0.825243
| 0
| 0
| 0
| 0
| 0
| 0.286853
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
67ac01198bc767cf744a6c219eb6fa8b2830b927
| 3,506
|
py
|
Python
|
active_directory_ldap/komand_active_directory_ldap/connection/connection.py
|
emartin-merrill-r7/insightconnect-plugins
|
a589745dbcc9f01d3e601431e77ab7221a84c117
|
[
"MIT"
] | 6
|
2020-11-10T03:07:00.000Z
|
2022-02-24T18:07:57.000Z
|
active_directory_ldap/komand_active_directory_ldap/connection/connection.py
|
OSSSP/insightconnect-plugins
|
846758dab745170cf1a8c146211a8bea9592e8ff
|
[
"MIT"
] | 17
|
2020-01-21T16:02:04.000Z
|
2022-01-12T15:11:26.000Z
|
active_directory_ldap/komand_active_directory_ldap/connection/connection.py
|
OSSSP/insightconnect-plugins
|
846758dab745170cf1a8c146211a8bea9592e8ff
|
[
"MIT"
] | 2
|
2020-12-26T11:33:23.000Z
|
2021-09-30T22:22:43.000Z
|
import komand
from komand.exceptions import ConnectionTestException
from .schema import ConnectionSchema
# Custom imports below
import ldap3
from ldap3.core import exceptions
class Connection(komand.Connection):
def __init__(self):
super(self.__class__, self).__init__(input=ConnectionSchema())
def connect(self, params):
"""
Connect to LDAP
"""
self.ssl = params.get('use_ssl')
self.logger.info("Connecting to %s:%d" % (params['host'], params['port']))
params['port'] = params.get('port') or 389
use_ssl = False
if params.get('use_ssl'):
use_ssl = True
server = ldap3.Server(
host=params['host'],
port=params['port'],
use_ssl=use_ssl,
get_info=ldap3.ALL)
try:
conn = ldap3.Connection(server=server,
user=params.get('username_password').get('username'),
password=params.get('username_password').get('password'),
auto_encode=True,
auto_escape=True,
auto_bind=True,
auto_referrals=False,
authentication=ldap3.NTLM)
except exceptions.LDAPBindError as e:
self.logger.error(f'ldap3 returned the following error {e}')
raise ConnectionTestException(preset=ConnectionTestException.Preset.USERNAME_PASSWORD)
except exceptions.LDAPAuthorizationDeniedResult as e:
self.logger.error(f'ldap3 returned the following error {e}')
raise ConnectionTestException(preset=ConnectionTestException.Preset.UNAUTHORIZED)
except exceptions.LDAPSocketOpenError as e:
self.logger.error(f'ldap3 returned the following error {e}')
raise ConnectionTestException(
preset=ConnectionTestException.Preset.SERVICE_UNAVAILABLE)
except:
try:
conn = ldap3.Connection(server=server,
user=params.get('username_password').get('username'),
password=params.get('username_password').get('password'),
auto_referrals=False,
auto_bind=True)
except exceptions.LDAPBindError as e:
self.logger.error(f'ldap3 returned the following error {e}')
raise ConnectionTestException(
preset=ConnectionTestException.Preset.USERNAME_PASSWORD)
except exceptions.LDAPAuthorizationDeniedResult as e:
self.logger.error(f'ldap3 returned the following error {e}')
raise ConnectionTestException(preset=ConnectionTestException.Preset.UNAUTHORIZED)
except exceptions.LDAPSocketOpenError as e:
self.logger.error(f'ldap3 returned the following error {e}')
raise ConnectionTestException(
preset=ConnectionTestException.Preset.SERVICE_UNAVAILABLE)
self.logger.info("Connected!")
self.conn = conn
def test(self):
try:
test = self.conn.extend.standard.who_am_i()
except:
raise ConnectionTestException(preset=ConnectionTestException.Preset.UNAUTHORIZED)
return {'connection': 'successful'}
| 43.283951
| 98
| 0.582145
| 315
| 3,506
| 6.368254
| 0.244444
| 0.202393
| 0.118644
| 0.198903
| 0.651545
| 0.651545
| 0.614158
| 0.614158
| 0.614158
| 0.614158
| 0
| 0.006846
| 0.333428
| 3,506
| 80
| 99
| 43.825
| 0.851519
| 0.010553
| 0
| 0.507692
| 0
| 0
| 0.120464
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046154
| false
| 0.092308
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
67dc18f58485017ab38a2770f0766ad69656a146
| 470
|
py
|
Python
|
prpl/apis/hl/com/__init__.py
|
prplfoundation/prpl-ssi-api-parser
|
1811825ee2c852db8d5945dfb0748cfa3e57c0ee
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
prpl/apis/hl/com/__init__.py
|
prplfoundation/prpl-ssi-api-parser
|
1811825ee2c852db8d5945dfb0748cfa3e57c0ee
|
[
"BSD-2-Clause-Patent"
] | 1
|
2019-05-14T05:48:10.000Z
|
2019-05-14T05:48:10.000Z
|
prpl/apis/hl/com/__init__.py
|
prplfoundation/prpl-ssi-api-parser
|
1811825ee2c852db8d5945dfb0748cfa3e57c0ee
|
[
"BSD-2-Clause-Patent"
] | 1
|
2019-07-01T14:32:51.000Z
|
2019-07-01T14:32:51.000Z
|
from prpl.apis.hl.com.version import Version
from prpl.apis.hl.com.event import Event
from prpl.apis.hl.com.field import Field
from prpl.apis.hl.com.procedure import Procedure
from prpl.apis.hl.com.instance import Instance
from prpl.apis.hl.com.object import Object
from prpl.apis.hl.com.response_code import ResponseCode
from prpl.apis.hl.com.api import API
__all__ = ['Version', 'Event', 'Field', 'Procedure', 'Instance', 'Object', 'ResponseCode', 'API']
| 39.166667
| 98
| 0.759574
| 74
| 470
| 4.756757
| 0.22973
| 0.181818
| 0.272727
| 0.318182
| 0.386364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117021
| 470
| 11
| 99
| 42.727273
| 0.848193
| 0
| 0
| 0
| 0
| 0
| 0.120087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.888889
| 0
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
67e4b64fdc3bb595a4919a74a36572afa702c47b
| 130
|
py
|
Python
|
python/testData/inspections/PyUnresolvedReferencesInspection/MetaClassMembersInStubs/b.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PyUnresolvedReferencesInspection/MetaClassMembersInStubs/b.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PyUnresolvedReferencesInspection/MetaClassMembersInStubs/b.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class GenericMeta(type):
def __getitem__(self, args):
pass
class Generic(object):
__metaclass__ = GenericMeta
| 13
| 32
| 0.676923
| 13
| 130
| 6.153846
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238462
| 130
| 9
| 33
| 14.444444
| 0.808081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
67e7ead417921385479bf056a2783b3e64b40b1b
| 2,669
|
py
|
Python
|
sknetwork/hierarchy/tests/test_metrics.py
|
altana-tech/scikit-network
|
dedc9d3e694c7106e4709aae22dffb5142c15859
|
[
"BSD-3-Clause"
] | 1
|
2020-09-14T11:06:13.000Z
|
2020-09-14T11:06:13.000Z
|
sknetwork/hierarchy/tests/test_metrics.py
|
altana-tech/scikit-network
|
dedc9d3e694c7106e4709aae22dffb5142c15859
|
[
"BSD-3-Clause"
] | 2
|
2020-10-17T08:21:38.000Z
|
2020-10-21T09:13:30.000Z
|
sknetwork/hierarchy/tests/test_metrics.py
|
altana-tech/scikit-network
|
dedc9d3e694c7106e4709aae22dffb5142c15859
|
[
"BSD-3-Clause"
] | 1
|
2020-06-19T09:39:11.000Z
|
2020-06-19T09:39:11.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on March 2019
@author: Thomas Bonald <bonald@enst.fr>
"""
import unittest
from sknetwork.data.test_graphs import *
from sknetwork.hierarchy import Paris, LouvainHierarchy, dasgupta_cost, dasgupta_score, tree_sampling_divergence
# noinspection PyMissingOrEmptyDocstring
class TestMetrics(unittest.TestCase):
def setUp(self):
self.paris = Paris()
self.louvain_hierarchy = LouvainHierarchy()
def test_undirected(self):
adjacency = test_graph()
dendrogram = self.paris.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_cost(adjacency, dendrogram), 3.25, 2)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram), 0.675, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram), 0.533, 2)
dendrogram = self.louvain_hierarchy.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_cost(adjacency, dendrogram), 4.08, 2)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram), 0.592, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram), 0.485, 2)
def test_directed(self):
adjacency = test_digraph()
dendrogram = self.paris.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram), 0.672, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram), 0.484, 2)
dendrogram = self.louvain_hierarchy.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram), 0.627, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram), 0.463, 2)
def test_disconnected(self):
adjacency = test_graph_disconnect()
dendrogram = self.paris.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram), 0.752, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram), 0.627, 2)
dendrogram = self.louvain_hierarchy.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram), 0.691, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram), 0.549, 2)
def test_options(self):
adjacency = test_graph()
dendrogram = self.paris.fit_transform(adjacency)
self.assertAlmostEqual(dasgupta_score(adjacency, dendrogram, weights='degree'), 0.659, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram, weights='uniform'), 0.418, 2)
self.assertAlmostEqual(tree_sampling_divergence(adjacency, dendrogram, normalized=False), 0.738, 2)
| 47.660714
| 112
| 0.738854
| 295
| 2,669
| 6.515254
| 0.264407
| 0.185744
| 0.12487
| 0.108221
| 0.707076
| 0.703954
| 0.703954
| 0.703954
| 0.581165
| 0.35744
| 0
| 0.039661
| 0.159236
| 2,669
| 55
| 113
| 48.527273
| 0.816845
| 0.053953
| 0
| 0.230769
| 0
| 0
| 0.005169
| 0
| 0
| 0
| 0
| 0
| 0.435897
| 1
| 0.128205
| false
| 0
| 0.076923
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
67eef06692cdce258c971fb6e8bdb51600387542
| 866
|
py
|
Python
|
lib/systems/nitrobenzene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/nitrobenzene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/nitrobenzene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns nitrobenzene as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 1.1584 2.3244 0.0000
C 1.9309 1.1623 0.0000
C 1.3210 -0.0901 0.0000
C -0.0817 -0.1642 0.0000
C -0.8671 1.0003 0.0000
C -0.2344 2.2416 0.0000
N -0.7449 -1.4946 0.0000
O -0.0637 -2.4850 0.0000
O -1.9459 -1.5466 0.0000
H 1.6495 3.3098 0.0000
H 3.0297 1.2298 0.0000
H 1.9275 -1.0118 0.0000
H -1.9682 0.9300 0.0000
H -0.8425 3.1591 0.0000
""")
| 39.363636
| 68
| 0.426097
| 127
| 866
| 2.88189
| 0.488189
| 0.191257
| 0.081967
| 0.057377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.466667
| 0.48037
| 866
| 21
| 69
| 41.238095
| 0.346667
| 0.125866
| 0
| 0
| 0
| 0
| 0.886024
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| true
| 0
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
67ef10fd5c5b99395ceddac4e1ac2213d1745f29
| 333
|
py
|
Python
|
tests/test_day12.py
|
khwilson/advent2021
|
6499b883f1b6d7366f3fe75dc229d646154a4cf8
|
[
"MIT"
] | null | null | null |
tests/test_day12.py
|
khwilson/advent2021
|
6499b883f1b6d7366f3fe75dc229d646154a4cf8
|
[
"MIT"
] | null | null | null |
tests/test_day12.py
|
khwilson/advent2021
|
6499b883f1b6d7366f3fe75dc229d646154a4cf8
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from advent.solutions import day12
def test_part1(fixtures_path: Path):
solution = day12.Day12(fixtures_path / "test_input12.txt")
assert solution.part1() == 10
def test_part2(fixtures_path: Path):
solution = day12.Day12(fixtures_path / "test_input12.txt")
assert solution.part2() == 36
| 23.785714
| 62
| 0.735736
| 45
| 333
| 5.266667
| 0.4
| 0.202532
| 0.135021
| 0.202532
| 0.624473
| 0.624473
| 0.624473
| 0.624473
| 0.624473
| 0.624473
| 0
| 0.078571
| 0.159159
| 333
| 13
| 63
| 25.615385
| 0.767857
| 0
| 0
| 0.25
| 0
| 0
| 0.096096
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
67f05f43a6ae5f830b624f1ca2eeb939adf04c55
| 626
|
py
|
Python
|
tests/test_inrepo.py
|
minrk/repo2docker-checker
|
3cb3f231c032388c374929bbefd79366d64ad767
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_inrepo.py
|
minrk/repo2docker-checker
|
3cb3f231c032388c374929bbefd79366d64ad767
|
[
"BSD-3-Clause"
] | 3
|
2020-07-02T06:55:11.000Z
|
2020-07-20T13:40:24.000Z
|
tests/test_inrepo.py
|
minrk/repo2docker-checker
|
3cb3f231c032388c374929bbefd79366d64ad767
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import pytest
from repo2docker_checker import inrepo
def test_import_fails():
with pytest.raises(ImportError):
inrepo.import_test("nosuchmod")
def test_import_ok():
inrepo.import_test("sys")
def test_notebook_fails(tmpdir, here):
output_dir = str(tmpdir.mkdir("out"))
nb = os.path.join(here, "fails.ipynb")
with pytest.raises(Exception):
inrepo.run_notebook(nb, output_dir)
def test_notebook_ok(tmpdir, here):
output_dir = str(tmpdir.mkdir("out"))
nb = os.path.join(here, "passes.ipynb")
inrepo.run_notebook(nb, output_dir)
assert os.listdir(output_dir)
| 21.586207
| 43
| 0.707668
| 88
| 626
| 4.829545
| 0.375
| 0.105882
| 0.061176
| 0.089412
| 0.376471
| 0.376471
| 0.244706
| 0.244706
| 0.244706
| 0.244706
| 0
| 0.001923
| 0.169329
| 626
| 28
| 44
| 22.357143
| 0.815385
| 0
| 0
| 0.222222
| 0
| 0
| 0.065495
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.222222
| false
| 0.055556
| 0.444444
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
e1fe1403d469c0e383f9e1004c0a638bff223eed
| 1,177
|
py
|
Python
|
angrmanagement/logic/disassembly/jump_history.py
|
Kyle-Kyle/angr-management
|
6ea65f19d813be510a38f06510b2b2148a6b5000
|
[
"BSD-2-Clause"
] | 2
|
2022-01-23T21:43:54.000Z
|
2022-02-02T08:20:20.000Z
|
angrmanagement/logic/disassembly/jump_history.py
|
Kyle-Kyle/angr-management
|
6ea65f19d813be510a38f06510b2b2148a6b5000
|
[
"BSD-2-Clause"
] | 1
|
2021-12-04T01:11:46.000Z
|
2021-12-04T01:11:46.000Z
|
angrmanagement/logic/disassembly/jump_history.py
|
Kyle-Kyle/angr-management
|
6ea65f19d813be510a38f06510b2b2148a6b5000
|
[
"BSD-2-Clause"
] | 1
|
2021-05-17T05:46:19.000Z
|
2021-05-17T05:46:19.000Z
|
class JumpHistory(object):
def __init__(self):
self._history = [ ]
self._pos = 0
def __len__(self):
return len(self._history)
def jump_to(self, addr):
if self._pos != len(self._history) - 1:
self.trim()
if not self._history or self._history[-1] != addr:
self._history.append(addr)
self._pos = len(self._history) - 1
def record_address(self, addr):
if self._pos != len(self._history) - 1:
self.trim()
if not self._history or self._history[-1] != addr:
self._history.append(addr)
self._pos = len(self._history) - 1
def trim(self):
self._history = self._history[ : self._pos + 1]
def backtrack(self):
if self._pos > 0:
self._pos -= 1
if self._pos >= len(self._history):
return None
else:
return self._history[self._pos]
def forwardstep(self):
if self._pos < len(self._history) - 1:
self._pos += 1
if self._pos < len(self._history):
return self._history[self._pos]
else:
return None
| 24.020408
| 58
| 0.536109
| 145
| 1,177
| 4.055172
| 0.17931
| 0.355442
| 0.190476
| 0.166667
| 0.646259
| 0.57483
| 0.57483
| 0.57483
| 0.534014
| 0.534014
| 0
| 0.015564
| 0.344945
| 1,177
| 48
| 59
| 24.520833
| 0.747082
| 0
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205882
| false
| 0
| 0
| 0.029412
| 0.382353
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c00e36d0aed2b6f3af2d49b889dbb204d52d5183
| 92
|
py
|
Python
|
type_page/apps.py
|
dumel93/project-
|
f9ad52d9c8449953e2151fd1c13b39631113eea7
|
[
"MIT"
] | null | null | null |
type_page/apps.py
|
dumel93/project-
|
f9ad52d9c8449953e2151fd1c13b39631113eea7
|
[
"MIT"
] | null | null | null |
type_page/apps.py
|
dumel93/project-
|
f9ad52d9c8449953e2151fd1c13b39631113eea7
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class TypePageConfig(AppConfig):
name = 'type_page'
| 15.333333
| 33
| 0.76087
| 11
| 92
| 6.272727
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163043
| 92
| 5
| 34
| 18.4
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0.097826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
c0566b70caae576cf3eb4870977c7d4a8aebf82d
| 108
|
py
|
Python
|
Baekjoon/Python/1026.py
|
KHJcode/Algorithm-study
|
fa08d3c752fcb3557fd45fb394157926afc0de4a
|
[
"MIT"
] | 2
|
2020-05-23T01:55:38.000Z
|
2020-07-07T15:59:00.000Z
|
Baekjoon/Python/1026.py
|
KHJcode/Algorithm-study
|
fa08d3c752fcb3557fd45fb394157926afc0de4a
|
[
"MIT"
] | null | null | null |
Baekjoon/Python/1026.py
|
KHJcode/Algorithm-study
|
fa08d3c752fcb3557fd45fb394157926afc0de4a
|
[
"MIT"
] | null | null | null |
N,A,B= eval("map(int,input().split()),"*3)
print(sum(a*b for a,b in zip(sorted(A, reverse=True),sorted(B))))
| 54
| 65
| 0.638889
| 24
| 108
| 2.875
| 0.708333
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.064815
| 108
| 2
| 65
| 54
| 0.673267
| 0
| 0
| 0
| 0
| 0
| 0.229358
| 0.229358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
22385d0163f80633bb0715925b569051531f3461
| 46
|
py
|
Python
|
tests/conftest.py
|
Chavithra/investments-toolkit
|
d78ddc843ccc178d6a3b2eab35e2512e634bc9d4
|
[
"Apache-2.0"
] | 3
|
2021-08-23T16:47:22.000Z
|
2021-11-19T12:41:19.000Z
|
tests/conftest.py
|
Chavithra/investments-toolkit
|
d78ddc843ccc178d6a3b2eab35e2512e634bc9d4
|
[
"Apache-2.0"
] | 14
|
2021-08-28T14:17:50.000Z
|
2021-11-28T20:12:54.000Z
|
tests/conftest.py
|
Chavithra/investments-toolkit
|
d78ddc843ccc178d6a3b2eab35e2512e634bc9d4
|
[
"Apache-2.0"
] | 1
|
2021-11-04T06:51:32.000Z
|
2021-11-04T06:51:32.000Z
|
pytest_plugins = ["_fixtures.fixture_barset"]
| 23
| 45
| 0.804348
| 5
| 46
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 46
| 1
| 46
| 46
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0.521739
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
226b014ee4a3a406f55be3e030266df128bdc73b
| 412
|
py
|
Python
|
Reading/tmt/models.py
|
SnowmanZhang/CharacterTest
|
c4936174557594fca93a747b3a9893446e9afd67
|
[
"MIT"
] | null | null | null |
Reading/tmt/models.py
|
SnowmanZhang/CharacterTest
|
c4936174557594fca93a747b3a9893446e9afd67
|
[
"MIT"
] | null | null | null |
Reading/tmt/models.py
|
SnowmanZhang/CharacterTest
|
c4936174557594fca93a747b3a9893446e9afd67
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=200)
attribute = models.CharField(max_length=12)
project = models.CharField(max_length=32,default ="")
project_id = models.CharField(max_length=12,default="")
def __unicode__(self):
return self.question_text
# Create your models here.
| 29.428571
| 56
| 0.769417
| 56
| 412
| 5.375
| 0.553571
| 0.199336
| 0.239203
| 0.318937
| 0.172757
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.114078
| 412
| 13
| 57
| 31.692308
| 0.79726
| 0.11165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.222222
| 0.111111
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
97e999f6dd05902f61a79580463ecec8ecb3f6df
| 117
|
py
|
Python
|
matt_site/home/views/homeview.py
|
hamtamtots/website
|
1b69aae7d543942ea87cc4d2370f58a3f5977b89
|
[
"MIT"
] | null | null | null |
matt_site/home/views/homeview.py
|
hamtamtots/website
|
1b69aae7d543942ea87cc4d2370f58a3f5977b89
|
[
"MIT"
] | null | null | null |
matt_site/home/views/homeview.py
|
hamtamtots/website
|
1b69aae7d543942ea87cc4d2370f58a3f5977b89
|
[
"MIT"
] | null | null | null |
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home/index.html'
| 19.5
| 46
| 0.777778
| 15
| 117
| 6.066667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 117
| 5
| 47
| 23.4
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0.12931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3f0070b187cd40ca85b27363886cc31e6aec8822
| 255
|
py
|
Python
|
sponge-integration-tests/examples/core/kb_file_order_1.py
|
mnpas/sponge
|
7190f23ae888bbef49d0fbb85157444d6ea48bcd
|
[
"Apache-2.0"
] | 9
|
2017-12-16T21:48:57.000Z
|
2022-01-06T12:22:24.000Z
|
sponge-integration-tests/examples/core/kb_file_order_1.py
|
mnpas/sponge
|
7190f23ae888bbef49d0fbb85157444d6ea48bcd
|
[
"Apache-2.0"
] | 3
|
2020-12-18T11:56:46.000Z
|
2022-03-31T18:37:10.000Z
|
sponge-integration-tests/examples/core/kb_file_order_1.py
|
mnpas/sponge
|
7190f23ae888bbef49d0fbb85157444d6ea48bcd
|
[
"Apache-2.0"
] | 2
|
2019-12-29T16:08:32.000Z
|
2020-06-15T14:05:34.000Z
|
"""
Sponge Knowledge Base
Knowledge base file order
"""
from java.util import ArrayList
def onInit():
# Variables for assertions only
sponge.setVariable("order", ArrayList())
def onStartup():
sponge.getVariable("order").add(1)
| 18.214286
| 45
| 0.67451
| 29
| 255
| 5.931034
| 0.724138
| 0.151163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004951
| 0.207843
| 255
| 13
| 46
| 19.615385
| 0.846535
| 0.305882
| 0
| 0
| 0
| 0
| 0.064103
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
3f00eab3eae546524b6fb53929062a39b28da98c
| 108
|
py
|
Python
|
tgstats/main.py
|
Ensenasty/Chat-Analytics
|
b2c061f101b5c47a4dfb882866e4f5b03f37b5c6
|
[
"BSD-3-Clause"
] | null | null | null |
tgstats/main.py
|
Ensenasty/Chat-Analytics
|
b2c061f101b5c47a4dfb882866e4f5b03f37b5c6
|
[
"BSD-3-Clause"
] | null | null | null |
tgstats/main.py
|
Ensenasty/Chat-Analytics
|
b2c061f101b5c47a4dfb882866e4f5b03f37b5c6
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
## ts = ddf
# ts.reindex(ts["date"])
# daily = ts.resample("D")
# daily.agg(sum)
| 13.5
| 26
| 0.527778
| 16
| 108
| 3.5625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.185185
| 108
| 7
| 27
| 15.428571
| 0.636364
| 0.87037
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3f1a7a8126eb0dbcff8197d73836c0de94f5756f
| 5,068
|
py
|
Python
|
tmp_copyApp73HCMLCLfiles2Git.py
|
judysu1983/PythonMBSi
|
9481bf1409a888c3f8511bcd05718ea81a063fa1
|
[
"bzip2-1.0.6"
] | null | null | null |
tmp_copyApp73HCMLCLfiles2Git.py
|
judysu1983/PythonMBSi
|
9481bf1409a888c3f8511bcd05718ea81a063fa1
|
[
"bzip2-1.0.6"
] | null | null | null |
tmp_copyApp73HCMLCLfiles2Git.py
|
judysu1983/PythonMBSi
|
9481bf1409a888c3f8511bcd05718ea81a063fa1
|
[
"bzip2-1.0.6"
] | null | null | null |
import os, shutil,stat
#Copy files from sd to git folder structure by C:\Depots\MBSI\Projects\OOB\UI\OOBAppsTest.eol:
#e.g. from C:\Depots\MBSI\Projects\OOB\UI\ar\HcmApp\HCM.en-US.label.txt.lcl
# to C:\GitProjects\HCM\source\Translation\LCL\ar\HCM.en-US.label.txt.lcl
#f= Accounting Foundation\source\metadata\Calendar\Calendar\AxLabelFile\LabelResources
langs=["ar","ar-AE","cs","da","de","de-AT","de-CH","en-AU","en-CA","en-GB","en-IE","en-IN","en-MY","en-NZ","en-SG","en-ZA","es","es-MX","et","fi","fr","fr-BE","fr-CA","fr-CH","hu","is","it","it-CH","ja","lt","lv","nb-NO","nl","nl-BE","pl","pt-BR","ru","sv","th","tr","zh-Hans"]
HCMfiles=["UserDefinedApp.en-US.label.txt.lcl", "BusinessProcess.en-US.label.txt.lcl", "CaseManagement.en-US.label.txt.lcl", "HcmPeopleNavigatorControl.en-US.label.txt.lcl", "HcmPeopleSearchControl.en-US.label.txt.lcl", "HcmPersonCard.en-US.label.txt.lcl", "HumanCapitalManagement.en-US.label.txt.lcl", "HumanCapitalMobile.en-US.label.txt.lcl", "Leave.en-US.label.txt.lcl", "Personnel.en-US.label.txt.lcl", "HcmOnboard.en-US.label.txt.lcl", "PersonnelBusinessProcess.en-US.label.txt.lcl", "PersonnelCore.en-US.label.txt.lcl", "Benefits.en-US.label.txt.lcl", "Compensation.en-US.label.txt.lcl", "HCM.en-us.label.txt.lcl", "HcmACA.en-US.label.txt.lcl", "HcmGenericProcess.en-US.label.txt.lcl", "Payroll.en-US.label.txt.lcl", "Talent.en-US.label.txt.lcl", "TalentClient.en-US.label.txt.lcl", "Workforce.en-US.label.txt.lcl", "HcmMobile.en-US.label.txt.lcl", "PersonnelUpgrade.en-US.label.txt.lcl", "TimeAtt.en-US.label.txt.lcl"]
ApplicationCommonfiles=["ApplicationCommon.en-US.label.txt.lcl", "GetStarted.en-US.label.txt.lcl", "HierarchicalGridCommon.en-US.label.txt.lcl", "ContactPersonManagement.en-US.label.txt.lcl", "Directory_InvoicesCommunication.en-US.label.txt.lcl", "GlobalAddressBook_App73Hotfix.en-US.label.txt.lcl", "GlobalAddressBook.en-US.label.txt.lcl", "SysSecReportLabels.en-US.label.txt.lcl", "DirectoryUpgrade.en-US.label.txt.lcl", "SysPolicy.en-US.label.txt.lcl", "SysBasicUpgrade.en-US.label.txt.lcl", "UnitOfMeasure.en-US.label.txt.lcl", "UserDefinedFields.en-US.label.txt.lcl"]
CostAccountingfiles=["CostAccounting.en-US.label.txt.lcl"]
AccountingFoundationfiles=["BankAccountType.en-US.label.txt.lcl", "Calendars.en-US.label.txt.lcl", "CurrencyCodesDynamics_3945654.en-US.label.txt.lcl", "CurrencyExchange.en-US.label.txt.lcl", "Dimension.en-US.label.txt.lcl", "DimensionSegmentSeparator.en-US.label.txt.lcl", "Ledger.en-US.label.txt.lcl", "LedgerEntity.en-US.label.txt.lcl", "SegmentedEntry.en-US.label.txt.lcl", "Measurement.en-US.label.txt.lcl", "AccountingFramework.en-US.label.txt.lcl", "SourceDocumentation.en-US.label.txt.lcl", "TaxEngineIntegration_SourceDocumentation.en-US.label.txt.lcl", "TaxEngineIntegration_SourceDocumentationTypes.en-US.label.txt.lcl", "Subledger.en-US.label.txt.lcl"]
#AccountingFoundationfiles=["Ledger.en-us.label.txt.lcl", "Measurement.en-us.label.txt.lcl", "AccountingFramework.en-us.label.txt.lcl", "SourceDocumentation.en-us.label.txt.lcl", "TaxEngineIntegration_SourceDoc.en-us.label.txt.lcl", "TaxEngineIntegration_SourceDocTypes.en-us.label.txt.lcl", "Subledger.en-us.label.txt.lcl"]
#ElectronicReportingfiles=["ElectronicReporting.en-US.label.txt.lcl", "ElectronicReportingPrintManagementIntegration.en-US.label.txt.lcl", "ElectronicReportingCore.en-US.label.txt.lcl", "ElectronicReportingForAx.en-US.label.txt.lcl", "ElectronicReportingMapping.en-US.label.txt.lcl", "ElectronicReportingRetail.en-US.label.txt.lcl", "ElectronicReportingRetailForAx.en-US.label.txt.lcl", "TaxEngine.en-US.label.txt.lcl", "TaxEngineConfiguration.en-US.label.txt.lcl", "TaxEngineInterface.en-US.label.txt.lcl", "TaxSettlement.en-US.label.txt.lcl"]
ElectronicReportingfiles=["ElectronicReporting.en-US.label.txt.lcl","ElectronicReportingMapping.en-US.label.txt.lcl"]
def del_rw(action, name, exc):
os.chmod(name, stat.S_IWRITE)
os.remove(name)
def copyLCL(Component,fileList):
GitLCLpath=os.path.join('C:\\test\App73HB\\')+Component+'\\source\\Translation\\LCL'
print(GitLCLpath)
#remove all the files under C:\SourceMonitor\SDCopy
if os.path.exists(GitLCLpath):
shutil.rmtree(GitLCLpath, onerror=del_rw)
os.makedirs(GitLCLpath)
SDpath=r'C:\Depots\MBSI\Projects\OOB\App7x\UI'
for lang in langs:
for f in fileList:
print('copy LCL for: '+lang+': '+f)
sdpath=os.path.join(SDpath,lang,'HcmApp')
dstpath=os.path.join(GitLCLpath,lang)
if not os.path.exists(dstpath):
os.makedirs(dstpath)
if os.path.exists(sdpath):
print(os.path.join(dstpath,f))
shutil.copy2(os.path.join(sdpath,f),os.path.join(dstpath,f))
##
##copyLCL('HCM',HCMfiles)
##copyLCL('ApplicationCommon',ApplicationCommonfiles)
copyLCL('ElectronicReporting',ElectronicReportingfiles)
##copyLCL('Cost%20Accounting',CostAccountingfiles)
##copyLCL('Accounting%20Foundation',AccountingFoundationfiles)
| 95.622642
| 926
| 0.732242
| 721
| 5,068
| 5.133148
| 0.269071
| 0.08214
| 0.184815
| 0.24642
| 0.460416
| 0.24615
| 0.166982
| 0.152391
| 0.152391
| 0.152391
| 0
| 0.003629
| 0.07577
| 5,068
| 52
| 927
| 97.461538
| 0.786507
| 0.282163
| 0
| 0
| 0
| 0
| 0.638274
| 0.580555
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.035714
| 0
| 0.107143
| 0.107143
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3f615488f020ce199b4caf906ee1c315ba88ce6a
| 4,368
|
py
|
Python
|
data_visualization/queries.py
|
danielrenes/car-data-visualization
|
9fb18c7bc8ec3e796793178acab350f0b5e7ef5b
|
[
"MIT"
] | null | null | null |
data_visualization/queries.py
|
danielrenes/car-data-visualization
|
9fb18c7bc8ec3e796793178acab350f0b5e7ef5b
|
[
"MIT"
] | null | null | null |
data_visualization/queries.py
|
danielrenes/car-data-visualization
|
9fb18c7bc8ec3e796793178acab350f0b5e7ef5b
|
[
"MIT"
] | null | null | null |
from .models import Category, Sensor, Data, Subview, View, ChartConfig, User, PredefinedConfiguration, PreconfiguredView
from .decorators import success_or_abort
@success_or_abort
def query_all_categories(user_id):
return Category.query.join(User, User.id==Category.user_id).filter(User.id==user_id).order_by(Category.id).all()
@success_or_abort
def query_get_category_by_id(id, user_id):
return Category.query.join(User, User.id==Category.user_id).filter(User.id==user_id, Category.id==id).first()
@success_or_abort
def query_get_category_by_name(name, user_id):
return Category.query.join(User, User.id==Category.user_id).filter(User.id==user_id, Category.name==name).first()
@success_or_abort
def query_all_sensors(user_id):
return Sensor.query.join(Category, Sensor.category_id==Category.id).join(User, User.id==Category.user_id)\
.filter(User.id==user_id).order_by(Sensor.id).all()
@success_or_abort
def query_get_sensor_by_id(id, user_id):
return Sensor.query.join(Category, Sensor.category_id==Category.id).join(User, User.id==Category.user_id)\
.filter(User.id==user_id, Sensor.id==id).first()
@success_or_abort
def query_get_sensor_by_name(name, user_id):
return Sensor.query.join(Category, Sensor.category_id==Category.id).join(User, User.id==Category.user_id)\
.filter(User.id==user_id, Sensor.name==name).first()
@success_or_abort
def query_all_datas(user_id):
return Data.query.join(Sensor, Data.sensor_id==Sensor.id).join(Category, Sensor.category_id==Category.id)\
.join(User, User.id==Category.user_id).filter(User.id==user_id).order_by(Data.id).all()
@success_or_abort
def query_get_data_by_id(id, user_id):
return Data.query.join(Sensor, Data.sensor_id==Sensor.id).join(Category, Sensor.category_id==Category.id)\
.join(User, User.id==Category.user_id).filter(User.id==user_id, Data.id==id).first()
@success_or_abort
def query_all_subviews(user_id):
return Subview.query.join(Sensor, Subview.sensor_id==Sensor.id).join(Category, Sensor.category_id==Category.id)\
.join(User, User.id==Category.user_id).filter(User.id==user_id).order_by(Subview.id).all()
@success_or_abort
def query_get_subview_by_id(id, user_id):
return Subview.query.join(Sensor, Subview.sensor_id==Sensor.id).join(Category, Sensor.category_id==Category.id)\
.join(User, User.id==Category.user_id).filter(User.id==user_id, Subview.id==id).first()
@success_or_abort
def query_all_views(user_id):
all_views = []
all_views.extend(View.query.join(User, User.id==View.user_id).filter(User.id==user_id).order_by(View.id).all())
all_views.extend(PreconfiguredView.query.join(User, User.id==PreconfiguredView.user_id).filter(User.id==user_id).order_by(PreconfiguredView.id).all())
return all_views
@success_or_abort
def query_get_view_by_id(id, user_id):
view = View.query.join(User, User.id==View.user_id).filter(User.id==user_id, View.id==id).first()
if view:
return view
else:
view = PreconfiguredView.query.join(User, User.id==PreconfiguredView.user_id).filter(User.id==user_id, PreconfiguredView.id==id).first()
return view
@success_or_abort
def query_get_user_by_id(id):
return User.query.filter(User.id==id).first()
@success_or_abort
def query_get_user_by_name(username):
return User.query.filter(User.username==username).first()
@success_or_abort
def query_all_chartconfigs():
return ChartConfig.query.order_by(ChartConfig.id).all()
@success_or_abort
def query_get_chartconfig_by_id(id):
return ChartConfig.query.filter(ChartConfig.id==id).first()
@success_or_abort
def query_get_chartconfig_by_type(type):
return ChartConfig.query.filter(ChartConfig.type==type).first()
@success_or_abort
def query_all_predefined_configs():
return PredefinedConfiguration.query.order_by(PredefinedConfiguration.id).all()
# TODO: modify so the user can only access their own predefined config
@success_or_abort
def query_get_predefined_config_by_id(id):
return PredefinedConfiguration.query.filter(PredefinedConfiguration.id==id).first()
@success_or_abort
def query_all_parking_spaces(user_id):
return Sensor.query.join(Category, Sensor.category_id==Category.id).join(User, User.id==Category.user_id)\
.filter(User.id==user_id, Sensor.name.like('parking_space%')).order_by(Sensor.id).all()
| 43.68
| 154
| 0.765339
| 688
| 4,368
| 4.600291
| 0.079942
| 0.140284
| 0.092891
| 0.107425
| 0.766825
| 0.715008
| 0.679937
| 0.660979
| 0.582306
| 0.479937
| 0
| 0
| 0.09272
| 4,368
| 99
| 155
| 44.121212
| 0.798637
| 0.015797
| 0
| 0.384615
| 0
| 0
| 0.003258
| 0
| 0
| 0
| 0
| 0.010101
| 0
| 1
| 0.25641
| false
| 0
| 0.025641
| 0.230769
| 0.551282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
3f62626f2f84f7a8aed1f7fff5e2be58bfa05eef
| 582
|
py
|
Python
|
tests/json_placeholder_client.py
|
QualiSystemsLab/abstract-requests-client
|
eb8281082cdfbd54b78451e907b9e7eef21b718a
|
[
"MIT"
] | 1
|
2021-12-08T20:54:40.000Z
|
2021-12-08T20:54:40.000Z
|
tests/json_placeholder_client.py
|
QualiSystemsLab/abstract-requests-client
|
eb8281082cdfbd54b78451e907b9e7eef21b718a
|
[
"MIT"
] | null | null | null |
tests/json_placeholder_client.py
|
QualiSystemsLab/abstract-requests-client
|
eb8281082cdfbd54b78451e907b9e7eef21b718a
|
[
"MIT"
] | null | null | null |
from abstract_http_client.http_clients.requests_client import RequestsClient
class JsonPlaceholderApiClient(RequestsClient):
def __init__(self, host):
super().__init__(host=host, use_https=True)
def get_posts(self):
return self.rest_service.request_get("/posts").json()
def add_post(self):
return self.rest_service.request_post("/posts", json={"post": "my_post"}).json()
def edit_post(self):
return self.rest_service.request_put("/posts/1")
def delete_post(self):
return self.rest_service.request_delete("/posts/1")
| 30.631579
| 88
| 0.709622
| 76
| 582
| 5.092105
| 0.421053
| 0.103359
| 0.144703
| 0.186047
| 0.361757
| 0.361757
| 0.27907
| 0
| 0
| 0
| 0
| 0.004107
| 0.16323
| 582
| 18
| 89
| 32.333333
| 0.790554
| 0
| 0
| 0
| 0
| 0
| 0.06701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0
| 0.083333
| 0.333333
| 0.916667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
58d94207018b26eb7606dc88f01f641019560424
| 56
|
py
|
Python
|
solutions/filter_min.py
|
lbasora/back2python
|
76e825049579ebf52e3b3e88003a1c2f53d2062b
|
[
"MIT"
] | null | null | null |
solutions/filter_min.py
|
lbasora/back2python
|
76e825049579ebf52e3b3e88003a1c2f53d2062b
|
[
"MIT"
] | null | null | null |
solutions/filter_min.py
|
lbasora/back2python
|
76e825049579ebf52e3b3e88003a1c2f53d2062b
|
[
"MIT"
] | null | null | null |
carburant[carburant['prix'] == carburant['prix'].min()]
| 28
| 55
| 0.678571
| 6
| 56
| 6.333333
| 0.5
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053571
| 56
| 1
| 56
| 56
| 0.716981
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
58df8a4d174fdfaf10050cbe1a0c037f96449889
| 4,339
|
py
|
Python
|
DailyProgrammer/DP20150605.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | 2
|
2020-12-23T18:59:22.000Z
|
2021-04-14T13:16:09.000Z
|
DailyProgrammer/DP20150605.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
DailyProgrammer/DP20150605.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
"""
[2015-06-05] Challenge #217 [Practical Exercise] TeXSCII
https://www.reddit.com/r/dailyprogrammer/comments/38nhgx/20150605_challenge_217_practical_exercise_texscii/
# [](#PEIcon) _(Practical Exercise)_: TeXSCII
LaTeX is a typesetting utility based on the TeX typesetting and macro system which can be used to output mathematical
formulae to display or print. For example, the LaTeX code `\frac{-b\pm\sqrt{b^{2}-4ac}}{2a}` will be transformed into
[this](http://latex.codecogs.com/gif.latex?%5Cdpi%7B200%7D%20%5Cfrac%7B-b%5Cpm%5Csqrt%7Bb%5E%7B2%7D-4ac%7D%7D%7B2a%7D)
when typeset.
The syntax of LaTeX formulae is fairly simple; commands begin with a backslash `\`, followed by the command name,
followed by its arguments in curly braces, such as `\sqrt{-1}` (square-root of -1) or `\frac{1}{3}` (1/3 as a
fraction). Subscript and superscript are also supported, with the `_` and `^` characters respectively, followed by the
script in curly braces - for example, `x^{2}` outputs x^(2). Everything else is output as plain text.
In today's challenge, you'll implement a simplified subset of LaTeX which outputs the resulting formula as ASCII.
# Formal Inputs and Outputs
## Input Specification
You'll be given a LaTeX equation on one line. The commands you need to support are:
* `\frac{top}{bottom}`: A fraction with the given top and bottom pieces
* `\sqrt{content}`: A square-root sign
* `\root{power}{content}`: A root sign with an arbitrary power (eg. cube-root, where the power 3 is at the top-left of
the radical symbol)
* `_{sub}`: Subscript
* `^{sup}`: Superscript
* `_{sub}^{sup}`: Subscript and superscript (one on top of the other)
* `\pi`: Output the greek symbol for pi
Feel free to extend your solution to support any additional structures such as integral signs.
## Output Description
Output the formula with ASCII symbols in the appropriate locations. You're free to pick the output style that looks
most appropriate to you. One possible way might be something like this:
3_
√x
y=--
3
# Sample Inputs and Outputs
## Subscripts and Superscripts
### Input
log_{e}(e^{x})=x
### Output
x
log (e )=x
e
## Stacked Scripts
### Input
F_{21}^{3}=2^{5}*7^{3}-30
### Output
3 5 3
F =2 *7 -30
21
## Fractions
### Input
sin^{3}(\frac{1}{3}\pi)=\frac{3}{8}\sqrt{3}
### Output
3 1 3 _
sin (-π)=-√3
3 8
## Quadratic Formula
### Input
x=\frac{-b+\sqrt{b^{2}-4ac}}{2a}
### Output
______
/ 2
-b+√ b -4ac
x=-----------
2a
## Cubic Formula
(I hope)
### Input
x=\frac{\root{3}{-2b^{3}+9abc-27a^{2}d+\sqrt{4(-b^{2}+3ac)^{3}+(-2b^{3}+9abc-27a^{2}d)^{2}}}}{3\root{3}{2}a} -
\frac{b}{3a} -
\frac{\root{3}{2}(-b^{2}+3ac)}{3a\root{3}{-2b^{3}+9abc-27a^{2}d+\sqrt{4(-b^{2}+3ac)^{3}+(-2b^{3}+9abc-27a^{2}d)^{2}}}}
### Output
3________________________________________________
/ ______________________________
/ 3 2 / 2 3 3 2 2 3_ 2
√ -2b +9abc-27a d+√ 4(-b +3ac) +(-2b +9abc-27a d) b √2(-b +3ac)
x=--------------------------------------------------- - -- - -----------------------------------------------------
3_ 3a 3________________________________________________
3√2a / ______________________________
/ 3 2 / 2 3 3 2 2
3a√ -2b +9abc-27a d+√ 4(-b +3ac) +(-2b +9abc-27a d)
# Notes and Further Reading
Solutions have a recommended order of *new* again - feel free to change it back if you prefer *best*. If you want to
play around some with LaTeX, try [this online tool](http://www.codecogs.com/latex/eqneditor.php).
Got any cool challenge ideas? Submit them to /r/DailyProgrammer_Ideas!
"""
def main():
pass
if __name__ == "__main__":
main()
| 45.197917
| 118
| 0.555428
| 594
| 4,339
| 3.757576
| 0.397306
| 0.007168
| 0.007168
| 0.014337
| 0.110215
| 0.067204
| 0.067204
| 0.060932
| 0.060932
| 0.060932
| 0
| 0.061131
| 0.294999
| 4,339
| 95
| 119
| 45.673684
| 0.665577
| 0.98018
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
58e17351b70266ac4e02137b17bf13bfe4aae177
| 188
|
py
|
Python
|
workflow/rules/__paths__.py
|
welfare-state-analytics/pyriksprot_tagger
|
769cc3460e0bbd23d3c3276dfeb76a2a8cd7a081
|
[
"MIT"
] | null | null | null |
workflow/rules/__paths__.py
|
welfare-state-analytics/pyriksprot_tagger
|
769cc3460e0bbd23d3c3276dfeb76a2a8cd7a081
|
[
"MIT"
] | 8
|
2021-03-13T13:23:39.000Z
|
2021-08-17T07:13:49.000Z
|
workflow/rules/__paths__.py
|
welfare-state-analytics/pyriksprot_tagger
|
769cc3460e0bbd23d3c3276dfeb76a2a8cd7a081
|
[
"MIT"
] | null | null | null |
import os
import sys
def find_root(d: str) -> str:
return os.path.join(os.getcwd().split(d)[0], d)
def fix_path():
sys.path.insert(0, find_root("westac_parlaclarin_pipeline"))
| 17.090909
| 64
| 0.68617
| 32
| 188
| 3.875
| 0.59375
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0125
| 0.148936
| 188
| 10
| 65
| 18.8
| 0.7625
| 0
| 0
| 0
| 0
| 0
| 0.143617
| 0.143617
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
45127d3805bac881ef06b35f5af36073e671e78f
| 148
|
py
|
Python
|
test_txt.py
|
hanshantong/DataAnalysisLearning
|
182bf705c9138ff6e239492304162c544c83141f
|
[
"Apache-2.0"
] | null | null | null |
test_txt.py
|
hanshantong/DataAnalysisLearning
|
182bf705c9138ff6e239492304162c544c83141f
|
[
"Apache-2.0"
] | null | null | null |
test_txt.py
|
hanshantong/DataAnalysisLearning
|
182bf705c9138ff6e239492304162c544c83141f
|
[
"Apache-2.0"
] | null | null | null |
with open('test.txt', 'r+') as f:
f.seek(15)
print(f.readline())
with open('test.txt') as f:
data = []
for d in f:
data.append(d)
print(data)
| 16.444444
| 33
| 0.601351
| 29
| 148
| 3.068966
| 0.551724
| 0.179775
| 0.269663
| 0.337079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.175676
| 148
| 9
| 34
| 16.444444
| 0.713115
| 0
| 0
| 0
| 0
| 0
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
45144f726c2649b602d095b321e91620854fae92
| 89
|
py
|
Python
|
deberes/apps.py
|
MonkeyAndres/AlexioProject
|
561fdbbfb561bb2ee40c6e90696ba4759029959d
|
[
"MIT"
] | null | null | null |
deberes/apps.py
|
MonkeyAndres/AlexioProject
|
561fdbbfb561bb2ee40c6e90696ba4759029959d
|
[
"MIT"
] | null | null | null |
deberes/apps.py
|
MonkeyAndres/AlexioProject
|
561fdbbfb561bb2ee40c6e90696ba4759029959d
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class DeberesConfig(AppConfig):
name = 'deberes'
| 14.833333
| 33
| 0.752809
| 10
| 89
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 34
| 17.8
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
188bf3fc1c42e1ea75d3bc79aebdae9558fec5da
| 475
|
py
|
Python
|
test/test_body59.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
test/test_body59.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
test/test_body59.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import unittest
class TestBody59(unittest.TestCase):
"""Body59 unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBody59(self):
"""Test Body59"""
# FIXME: construct object with mandatory attributes with example values
# model = gitee.models.body59.Body59() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 17.592593
| 79
| 0.633684
| 53
| 475
| 5.433962
| 0.679245
| 0.055556
| 0.076389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045977
| 0.267368
| 475
| 26
| 80
| 18.269231
| 0.781609
| 0.355789
| 0
| 0.272727
| 0
| 0
| 0.027397
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 1
| 0.272727
| false
| 0.272727
| 0.181818
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
188bf77028e1263d11cb5550cb6bea6b3822f7c5
| 730
|
py
|
Python
|
Day 58/qubit.py
|
AN3223/code-a-day
|
dd98713159a921c1266dabff2a9c298509dfbe54
|
[
"MIT"
] | null | null | null |
Day 58/qubit.py
|
AN3223/code-a-day
|
dd98713159a921c1266dabff2a9c298509dfbe54
|
[
"MIT"
] | null | null | null |
Day 58/qubit.py
|
AN3223/code-a-day
|
dd98713159a921c1266dabff2a9c298509dfbe54
|
[
"MIT"
] | null | null | null |
from random import randrange
class Qubit:
"""Quick and dirty representation of a qubit"""
def __init__(self, x=0, y=0, z=-1):
self.x = x
self.y = y
self.z = z
def __repr__(self):
return f'Qubit(x={self.x}, y={self.y}, z={self.z})'
def measure(self):
if self.x + self.y == 0:
if randrange(-100, 101) < self.z * 100:
return 1
else:
return 0
return randrange(0, 2)
def x(q: Qubit) -> Qubit:
return Qubit(q.x, q.y, -q.z)
def y(q: Qubit) -> Qubit:
return Qubit(q.x, -q.y, q.z)
def z(q: Qubit) -> Qubit:
return Qubit(-q.x, q.y, q.z)
def h(q: Qubit) -> Qubit:
return Qubit(q.z, q.y, q.x)
| 19.210526
| 59
| 0.505479
| 123
| 730
| 2.934959
| 0.252033
| 0.055402
| 0.121884
| 0.188366
| 0.32133
| 0.32133
| 0.257618
| 0.257618
| 0.257618
| 0.257618
| 0
| 0.034765
| 0.330137
| 730
| 37
| 60
| 19.72973
| 0.703476
| 0.056164
| 0
| 0
| 0
| 0
| 0.060029
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304348
| false
| 0
| 0.043478
| 0.217391
| 0.73913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
189a358017c39ba49badb36ef706dd2df2c787f5
| 85
|
py
|
Python
|
script/utils.py
|
jayeolasegun/Term-Deposit-Classification
|
e1ce928814f26712813f600bc704183d5a9e603d
|
[
"CC0-1.0"
] | null | null | null |
script/utils.py
|
jayeolasegun/Term-Deposit-Classification
|
e1ce928814f26712813f600bc704183d5a9e603d
|
[
"CC0-1.0"
] | null | null | null |
script/utils.py
|
jayeolasegun/Term-Deposit-Classification
|
e1ce928814f26712813f600bc704183d5a9e603d
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 30 18:07:30 2020
@author: user
"""
| 9.444444
| 35
| 0.552941
| 14
| 85
| 3.357143
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19697
| 0.223529
| 85
| 8
| 36
| 10.625
| 0.515152
| 0.858824
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
18b208b6ce2ae6c4d2e55c57e3ca86e6aa8fd179
| 119
|
py
|
Python
|
controledefornecedores/apps.py
|
brendo1806/Estudos1
|
b3db0d622a1b80c98a3a14800852de7c822f4cc0
|
[
"Apache-2.0"
] | null | null | null |
controledefornecedores/apps.py
|
brendo1806/Estudos1
|
b3db0d622a1b80c98a3a14800852de7c822f4cc0
|
[
"Apache-2.0"
] | null | null | null |
controledefornecedores/apps.py
|
brendo1806/Estudos1
|
b3db0d622a1b80c98a3a14800852de7c822f4cc0
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class ControledefornecedoresConfig(AppConfig):
name = 'controledefornecedores'
| 19.833333
| 46
| 0.815126
| 10
| 119
| 9.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12605
| 119
| 5
| 47
| 23.8
| 0.932692
| 0
| 0
| 0
| 0
| 0
| 0.184874
| 0.184874
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
18bd147bc5abe998fb09aeb24e3efec8d27502f3
| 82
|
py
|
Python
|
src/data/preprocess.py
|
juliazam/healthcare_ASEAN
|
3cc451f723124f18b2d11c79ff80a6c5a9354c6e
|
[
"MIT"
] | 25
|
2016-04-24T14:00:36.000Z
|
2021-01-07T07:26:00.000Z
|
src/data/preprocess.py
|
shikhakhanna19/healthcare_ASEAN
|
3f89ec67fde825bf3b6275cee0e8f13812533c1e
|
[
"MIT"
] | 25
|
2016-06-24T07:04:39.000Z
|
2020-10-03T23:01:41.000Z
|
src/data/preprocess.py
|
shikhakhanna19/healthcare_ASEAN
|
3f89ec67fde825bf3b6275cee0e8f13812533c1e
|
[
"MIT"
] | 112
|
2016-06-24T01:41:00.000Z
|
2020-10-03T00:33:22.000Z
|
# Script to process the data into processed folder for visualization and analysis
| 41
| 81
| 0.829268
| 12
| 82
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158537
| 82
| 1
| 82
| 82
| 0.985507
| 0.963415
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
18be98673f773636e73d9e2c5d4b717bcb6352a6
| 266
|
py
|
Python
|
ding/utils/data/__init__.py
|
davide97l/DI-engine
|
d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423
|
[
"Apache-2.0"
] | 1
|
2022-03-21T16:15:39.000Z
|
2022-03-21T16:15:39.000Z
|
ding/utils/data/__init__.py
|
jiaruonan/DI-engine
|
268d77db3cb54401b2cfc83e2bc3ec87c31e7b83
|
[
"Apache-2.0"
] | null | null | null |
ding/utils/data/__init__.py
|
jiaruonan/DI-engine
|
268d77db3cb54401b2cfc83e2bc3ec87c31e7b83
|
[
"Apache-2.0"
] | null | null | null |
from .collate_fn import diff_shape_collate, default_collate, default_decollate, timestep_collate, ttorch_collate
from .dataloader import AsyncDataLoader
from .dataset import NaiveRLDataset, D4RLDataset, HDF5Dataset, create_dataset, hdf5_save, offline_data_save_type
| 66.5
| 112
| 0.87594
| 33
| 266
| 6.69697
| 0.666667
| 0.126697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012245
| 0.078947
| 266
| 3
| 113
| 88.666667
| 0.889796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
18d66e70073f23269425c2478f78b85369b8f142
| 194
|
py
|
Python
|
cdc/streams/types.py
|
getsentry/cdc
|
8643ee7a5bf491755c46169c6841131521d34b6c
|
[
"Apache-2.0"
] | 4
|
2021-06-25T07:51:59.000Z
|
2022-01-13T11:31:13.000Z
|
cdc/streams/types.py
|
getsentry/cdc
|
8643ee7a5bf491755c46169c6841131521d34b6c
|
[
"Apache-2.0"
] | 4
|
2021-04-05T23:24:27.000Z
|
2021-05-17T21:36:17.000Z
|
cdc/streams/types.py
|
getsentry/cdc
|
8643ee7a5bf491755c46169c6841131521d34b6c
|
[
"Apache-2.0"
] | null | null | null |
from typing import Mapping, NamedTuple, NewType, Optional
from cdc.types import Payload
class StreamMessage(NamedTuple):
payload: Payload
metadata: Optional[Mapping[str, str]] = None
| 21.555556
| 57
| 0.762887
| 23
| 194
| 6.434783
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159794
| 194
| 8
| 58
| 24.25
| 0.907975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
18e9ef25ce58e9b7ebdbf6ed818e94cbbc41710e
| 348
|
py
|
Python
|
tests/test_qtile.py
|
edwardoughton/itmlogic
|
1e454e3b4b3c8e24c4bc74ec6c076a2f97d86d23
|
[
"MIT"
] | 24
|
2019-12-05T17:46:10.000Z
|
2022-03-24T07:29:00.000Z
|
tests/test_qtile.py
|
edwardoughton/itmlogic
|
1e454e3b4b3c8e24c4bc74ec6c076a2f97d86d23
|
[
"MIT"
] | 9
|
2019-12-05T16:20:58.000Z
|
2021-09-09T16:34:32.000Z
|
tests/test_qtile.py
|
edwardoughton/itmlogic
|
1e454e3b4b3c8e24c4bc74ec6c076a2f97d86d23
|
[
"MIT"
] | 9
|
2019-07-09T14:42:30.000Z
|
2021-08-25T16:54:26.000Z
|
import pytest
from itmlogic.misc.qtile import qtile
def test_qtile(setup_a):
"""
Test the routine for returning the ith entry of a given vector after sorting in
descending order, to obtain user-defined quantile values.
"""
assert qtile(setup_a, 5) == 100
assert qtile(setup_a, 8) == 40
assert qtile(setup_a, 3) == 140
| 26.769231
| 83
| 0.695402
| 54
| 348
| 4.388889
| 0.685185
| 0.168776
| 0.185654
| 0.21519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04059
| 0.221264
| 348
| 12
| 84
| 29
| 0.833948
| 0.393678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
18fc9ffc5603cd013e01f8c438e8739e6c8f8e56
| 65
|
py
|
Python
|
gatenlp/lang/de/__init__.py
|
joancf/python-gatenlp
|
21441d72ded19e9348052e99ac5bc1fc6af7ab6e
|
[
"Apache-2.0"
] | 30
|
2020-04-18T12:28:15.000Z
|
2022-02-18T21:31:18.000Z
|
gatenlp/lang/de/__init__.py
|
joancf/python-gatenlp
|
21441d72ded19e9348052e99ac5bc1fc6af7ab6e
|
[
"Apache-2.0"
] | 133
|
2019-10-16T07:41:59.000Z
|
2022-03-31T07:27:07.000Z
|
gatenlp/lang/de/__init__.py
|
joancf/python-gatenlp
|
21441d72ded19e9348052e99ac5bc1fc6af7ab6e
|
[
"Apache-2.0"
] | 4
|
2021-01-20T08:12:19.000Z
|
2021-10-21T13:29:44.000Z
|
"""
Subpackage for German language resources and annotators.
"""
| 16.25
| 56
| 0.753846
| 7
| 65
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 65
| 3
| 57
| 21.666667
| 0.875
| 0.861538
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e13a98ae3b2a269ba6464e7d4710d4312a1b153a
| 248
|
py
|
Python
|
python/ray/serve/pipeline/conftest.py
|
mgelbart/ray
|
4cec2286572e368a4bd64aae467751a384eff62d
|
[
"Apache-2.0"
] | 22
|
2018-05-08T05:52:34.000Z
|
2020-04-01T10:09:55.000Z
|
python/ray/serve/pipeline/conftest.py
|
mgelbart/ray
|
4cec2286572e368a4bd64aae467751a384eff62d
|
[
"Apache-2.0"
] | 73
|
2021-09-25T07:11:39.000Z
|
2022-03-26T07:10:59.000Z
|
python/ray/serve/pipeline/conftest.py
|
mgelbart/ray
|
4cec2286572e368a4bd64aae467751a384eff62d
|
[
"Apache-2.0"
] | 10
|
2018-04-27T10:50:59.000Z
|
2020-02-24T02:41:43.000Z
|
import pytest
import ray
from ray.serve.tests.conftest import _shared_serve_instance, serve_instance # noqa
@pytest.fixture(scope="session")
def shared_ray_instance():
yield ray.init(num_cpus=36, _system_config={"task_retry_delay_ms": 50})
| 24.8
| 83
| 0.790323
| 37
| 248
| 4.972973
| 0.702703
| 0.141304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018018
| 0.104839
| 248
| 9
| 84
| 27.555556
| 0.810811
| 0.016129
| 0
| 0
| 0
| 0
| 0.107438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e1449dbf416adb70fd23f08a8926f9af413892ab
| 86
|
py
|
Python
|
Solutions/7kyu/7kyu_complementary_dna.py
|
citrok25/Codewars-1
|
dc641c5079e2e8b5955eb027fd15427e5bdb2e26
|
[
"MIT"
] | 46
|
2017-08-24T09:27:57.000Z
|
2022-02-25T02:24:33.000Z
|
Solutions/7kyu/7kyu_complementary_dna.py
|
abbhishek971/Codewars
|
9e761811db724da1e8aae44594df42b4ee879a16
|
[
"MIT"
] | null | null | null |
Solutions/7kyu/7kyu_complementary_dna.py
|
abbhishek971/Codewars
|
9e761811db724da1e8aae44594df42b4ee879a16
|
[
"MIT"
] | 35
|
2017-08-01T22:09:48.000Z
|
2022-02-18T17:21:37.000Z
|
DNA_strand = lambda dna: dna.translate(__import__('string').maketrans('ATCG','TAGC'))
| 43
| 85
| 0.744186
| 11
| 86
| 5.363636
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 86
| 1
| 86
| 86
| 0.728395
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e16049da0f62a4624cbfba93dbc58f37f459f03b
| 149
|
py
|
Python
|
python/testData/testRunner/env/unit/relativeImports/relative_imports/tests/test_imps.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/testRunner/env/unit/relativeImports/relative_imports/tests/test_imps.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/testRunner/env/unit/relativeImports/relative_imports/tests/test_imps.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from ..utils import util
class MyTest(unittest.TestCase):
def test_multiply(self):
self.assertEquals(4, util.multiply(2, 2))
| 18.625
| 45
| 0.744966
| 21
| 149
| 5.238095
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023438
| 0.14094
| 149
| 7
| 46
| 21.285714
| 0.835938
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e1719f7848d9020b288aafa772ca6db8b1e1217b
| 34
|
py
|
Python
|
python/testData/copyPaste/EmptyBranchBlock.src.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2018-12-29T09:53:39.000Z
|
2018-12-29T09:53:42.000Z
|
python/testData/copyPaste/EmptyBranchBlock.src.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/copyPaste/EmptyBranchBlock.src.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
<selection>x = 1
y = 2</selection>
| 17
| 17
| 0.647059
| 6
| 34
| 3.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0.147059
| 34
| 2
| 17
| 17
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e17b7a7559962418087621ff9785dbf678267fef
| 208
|
py
|
Python
|
EPL21232/apps/accounts/views.py
|
nverbois/TFE21-232
|
7113837b5263b5c508bfc6903cb6982b48aa7ee4
|
[
"MIT"
] | null | null | null |
EPL21232/apps/accounts/views.py
|
nverbois/TFE21-232
|
7113837b5263b5c508bfc6903cb6982b48aa7ee4
|
[
"MIT"
] | null | null | null |
EPL21232/apps/accounts/views.py
|
nverbois/TFE21-232
|
7113837b5263b5c508bfc6903cb6982b48aa7ee4
|
[
"MIT"
] | null | null | null |
from django.views.generic.base import TemplateView
from django.contrib.auth.mixins import LoginRequiredMixin
class ProfileView(LoginRequiredMixin, TemplateView):
template_name = "accounts/profil.html"
| 26
| 57
| 0.826923
| 23
| 208
| 7.434783
| 0.782609
| 0.116959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100962
| 208
| 7
| 58
| 29.714286
| 0.914439
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e1922afc3ab942f928093455f5791bfbc190b2a1
| 51
|
py
|
Python
|
All_Source_Code/GatherData/GatherData_16.py
|
APMonitor/pds
|
fa9a7ec920802de346dcdf7f5dd92d752142c16f
|
[
"MIT"
] | 11
|
2021-01-21T09:46:29.000Z
|
2022-03-16T19:33:10.000Z
|
All_Source_Code/GatherData/GatherData_16.py
|
the-mahapurush/pds
|
7cb4087dd8e75cb1e9b2a4283966c798175f61f7
|
[
"MIT"
] | 1
|
2022-03-16T19:47:09.000Z
|
2022-03-16T20:11:50.000Z
|
All_Source_Code/GatherData/GatherData_16.py
|
the-mahapurush/pds
|
7cb4087dd8e75cb1e9b2a4283966c798175f61f7
|
[
"MIT"
] | 12
|
2021-02-08T21:11:11.000Z
|
2022-03-20T12:42:49.000Z
|
dx = dx.set_index('Time')
dy = dy.set_index('Time')
| 25.5
| 25
| 0.666667
| 10
| 51
| 3.2
| 0.5
| 0.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 51
| 2
| 26
| 25.5
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e1a939e6920718718e508282d7f30594ef54ad13
| 120
|
py
|
Python
|
traffic/core/types.py
|
amirdib/traffic
|
d4af963e8f9aaff64be58b8407c527fe5a616d1c
|
[
"MIT"
] | 209
|
2018-06-29T10:55:27.000Z
|
2022-03-31T19:30:06.000Z
|
traffic/core/types.py
|
amirdib/traffic
|
d4af963e8f9aaff64be58b8407c527fe5a616d1c
|
[
"MIT"
] | 151
|
2018-10-05T12:47:53.000Z
|
2022-02-20T14:47:57.000Z
|
traffic/core/types.py
|
amirdib/traffic
|
d4af963e8f9aaff64be58b8407c527fe5a616d1c
|
[
"MIT"
] | 68
|
2018-12-19T13:42:34.000Z
|
2022-03-11T15:33:41.000Z
|
from typing import Callable, Iterable, TypeVar
T = TypeVar("T")
ProgressbarType = Callable[[Iterable[T]], Iterable[T]]
| 24
| 54
| 0.741667
| 15
| 120
| 5.933333
| 0.533333
| 0.359551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 120
| 4
| 55
| 30
| 0.839623
| 0
| 0
| 0
| 0
| 0
| 0.008333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e1b9bcdaf3b6cd460df780c92a4b742cbb8a51d0
| 317
|
py
|
Python
|
rochambeau/opponents.py
|
kkarrancsu/rl_course
|
8dff4f36a4c4bb6426c69a8e18f55f72807cc8b2
|
[
"Apache-2.0"
] | null | null | null |
rochambeau/opponents.py
|
kkarrancsu/rl_course
|
8dff4f36a4c4bb6426c69a8e18f55f72807cc8b2
|
[
"Apache-2.0"
] | null | null | null |
rochambeau/opponents.py
|
kkarrancsu/rl_course
|
8dff4f36a4c4bb6426c69a8e18f55f72807cc8b2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import numpy as np
from numpy.random import RandomState
class UniformRandomOpponent:
def __init__(self, num_actions, seed):
self.num_actions = num_actions
self.random_state = RandomState(seed)
def act(self):
return self.random_state.randint(self.num_actions)
| 22.642857
| 58
| 0.722397
| 42
| 317
| 5.214286
| 0.547619
| 0.182648
| 0.191781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195584
| 317
| 13
| 59
| 24.384615
| 0.858824
| 0.063091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.125
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
e1c791d605c6a332cd26b2cc1a7da129ef8bdde0
| 103
|
py
|
Python
|
asu/index.py
|
greenHandLjj/PythonStudy
|
3a9a5454876a3021863b157f3c7c1abdef494adb
|
[
"MulanPSL-1.0"
] | 2
|
2021-01-11T01:19:13.000Z
|
2021-01-12T10:02:04.000Z
|
asu/index.py
|
greenHandLjj/PythonStudy
|
3a9a5454876a3021863b157f3c7c1abdef494adb
|
[
"MulanPSL-1.0"
] | 1
|
2021-01-11T09:52:32.000Z
|
2021-01-11T09:55:00.000Z
|
asu/index.py
|
greenHandLjj/PythonStudy
|
3a9a5454876a3021863b157f3c7c1abdef494adb
|
[
"MulanPSL-1.0"
] | null | null | null |
# print('adas')
import sys
sys.path.append('..')
from test import multify
multify.multi(1, 5)
| 12.875
| 25
| 0.640777
| 15
| 103
| 4.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024096
| 0.194175
| 103
| 8
| 26
| 12.875
| 0.771084
| 0.126214
| 0
| 0
| 0
| 0
| 0.02439
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e1c86d85627b68ffd17858e75073eb25c0a161ad
| 148
|
py
|
Python
|
tests/urls.py
|
drewbrew/drf-nested-routers
|
520b2df3206c4d23a2169361488ce8b129f68026
|
[
"Apache-2.0"
] | null | null | null |
tests/urls.py
|
drewbrew/drf-nested-routers
|
520b2df3206c4d23a2169361488ce8b129f68026
|
[
"Apache-2.0"
] | null | null | null |
tests/urls.py
|
drewbrew/drf-nested-routers
|
520b2df3206c4d23a2169361488ce8b129f68026
|
[
"Apache-2.0"
] | 1
|
2020-11-05T09:42:57.000Z
|
2020-11-05T09:42:57.000Z
|
from django.conf.urls import url, include
from tests.serializers.urls import urlpatterns as serializers_urls
urlpatterns = [
] + serializers_urls
| 21.142857
| 66
| 0.810811
| 19
| 148
| 6.210526
| 0.578947
| 0.381356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128378
| 148
| 6
| 67
| 24.666667
| 0.914729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e1c8f0816b63e9fd25ba488d6334a185e70b7d5e
| 603
|
py
|
Python
|
src/pipeline.py
|
avbatchelor/insight-articles-project
|
852b338b786cb5b9c281fcec2e378aed8d3dc617
|
[
"MIT"
] | null | null | null |
src/pipeline.py
|
avbatchelor/insight-articles-project
|
852b338b786cb5b9c281fcec2e378aed8d3dc617
|
[
"MIT"
] | null | null | null |
src/pipeline.py
|
avbatchelor/insight-articles-project
|
852b338b786cb5b9c281fcec2e378aed8d3dc617
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 18 22:05:23 2018
@author: Alex
Pipeline
"""
#%% Import packages
import os
os.chdir('C:\\Users\\Alex\\Documents\\GitHub\\insight-articles-project\\src\\scraping\\')
#%%
'''
Input = local HTML
Output = Save article strings and document sentences
'''
import read_and_parse
#%%
import get_article_info
#%%
'''
Topic modeling
'''
#topic_modeling()
#%% Generate graph
os.chdir('C:\\Users\\Alex\\Documents\\GitHub\\insight-articles-project\\src\\topic modeling\\')
import generate_graph
#%% Linear topic sequence
import linear_topic_sequence
| 15.461538
| 96
| 0.693201
| 78
| 603
| 5.25641
| 0.615385
| 0.095122
| 0.039024
| 0.063415
| 0.278049
| 0.278049
| 0.278049
| 0.278049
| 0.278049
| 0.278049
| 0
| 0.025
| 0.137645
| 603
| 39
| 97
| 15.461538
| 0.763462
| 0.280265
| 0
| 0
| 0
| 0.142857
| 0.506329
| 0.471519
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
befcb77a1215260286d6db07785f539135585918
| 179
|
py
|
Python
|
tests/test_version.py
|
quadio-media/acru-l-toolkit
|
6702e6b8261adc57f328b0d16fb5992bd3dedde6
|
[
"MIT"
] | null | null | null |
tests/test_version.py
|
quadio-media/acru-l-toolkit
|
6702e6b8261adc57f328b0d16fb5992bd3dedde6
|
[
"MIT"
] | null | null | null |
tests/test_version.py
|
quadio-media/acru-l-toolkit
|
6702e6b8261adc57f328b0d16fb5992bd3dedde6
|
[
"MIT"
] | 1
|
2020-12-30T19:43:49.000Z
|
2020-12-30T19:43:49.000Z
|
import toml
from acrul_toolkit import __version__
def test_version():
version = toml.load("./pyproject.toml")["tool"]["poetry"]["version"]
assert __version__ == version
| 22.375
| 72
| 0.715084
| 21
| 179
| 5.619048
| 0.619048
| 0.237288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139665
| 179
| 7
| 73
| 25.571429
| 0.766234
| 0
| 0
| 0
| 0
| 0
| 0.184358
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
8308e9458cd04d38209d25cb6497de9afc018892
| 363
|
py
|
Python
|
selia_admin/admin/types.py
|
CONABIO-audio/selia-admin
|
0fe3326d63de7904f86f3040cb613801737880f7
|
[
"BSD-4-Clause"
] | null | null | null |
selia_admin/admin/types.py
|
CONABIO-audio/selia-admin
|
0fe3326d63de7904f86f3040cb613801737880f7
|
[
"BSD-4-Clause"
] | 9
|
2020-02-06T02:10:46.000Z
|
2022-01-13T01:53:53.000Z
|
selia_admin/admin/types.py
|
CONABIO-audio/selia-admin
|
0fe3326d63de7904f86f3040cb613801737880f7
|
[
"BSD-4-Clause"
] | null | null | null |
from django.contrib import admin
class EventTypeAdmin(admin.ModelAdmin):
search_fields = ['name']
autocomplete_fields = ['should_imply']
class TermTypeAdmin(admin.ModelAdmin):
search_fields = ['name']
class AnnotationTypeAdmin(admin.ModelAdmin):
search_fields = ['name']
class ItemTypeAdmin(admin.ModelAdmin):
search_fields = ['name']
| 19.105263
| 44
| 0.732782
| 37
| 363
| 7.027027
| 0.459459
| 0.230769
| 0.323077
| 0.415385
| 0.515385
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 363
| 18
| 45
| 20.166667
| 0.844156
| 0
| 0
| 0.4
| 0
| 0
| 0.077135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
8361b08be26b785dfcd46227a7a5699f6dde1827
| 6,102
|
py
|
Python
|
cogs/utils/permission_checks.py
|
quiprr/gir
|
c6910f3f61d15d52da7b12e57d1d4f159c61689b
|
[
"MIT"
] | null | null | null |
cogs/utils/permission_checks.py
|
quiprr/gir
|
c6910f3f61d15d52da7b12e57d1d4f159c61689b
|
[
"MIT"
] | null | null | null |
cogs/utils/permission_checks.py
|
quiprr/gir
|
c6910f3f61d15d52da7b12e57d1d4f159c61689b
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
import cogs.utils.context as context
class PermissionsFailure(commands.BadArgument):
def __init__(self, message):
super().__init__(message)
class ModsAndAboveMember(commands.Converter):
async def convert(self, ctx: context.Context, argument):
user = await commands.MemberConverter().convert(ctx, argument)
await check_invokee(ctx, user)
return user
class ModsAndAboveExternal(commands.Converter):
async def convert(self, ctx: context.Context, argument):
try:
user = await commands.MemberConverter().convert(ctx, argument)
except PermissionsFailure as e:
raise e
except Exception:
try:
argument = int(argument)
user = await ctx.bot.fetch_user(argument)
except Exception:
raise PermissionsFailure("Could not parse argument \"user\".")
except discord.NotFound:
raise PermissionsFailure(
f"Couldn't find user with ID {argument}")
await check_invokee(ctx, user)
return user
async def check_invokee(ctx, user):
if isinstance(user, discord.Member):
if user.id == ctx.author.id:
await ctx.message.add_reaction("🤔")
raise PermissionsFailure("You can't call that on yourself.")
if user.id == ctx.bot.user.id:
await ctx.message.add_reaction("🤔")
raise PermissionsFailure("You can't call that on me :(")
if user:
if isinstance(user, discord.Member):
if user.top_role >= ctx.author.top_role:
raise PermissionsFailure(
message=f"{user.mention}'s top role is the same or higher than yours!")
####################
# Channels
####################
def bot_channel_only_unless_mod():
async def predicate(ctx):
bot_chan = ctx.bot.settings.guild().channel_botspam
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 5) and ctx.channel.id != bot_chan:
raise PermissionsFailure(f"Command only allowed in <#{bot_chan}>.")
return True
return commands.check(predicate)
####################
# Member Roles
####################
def memplus_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 1):
raise PermissionsFailure("You do not have permission to use this command.")
return True
return commands.check(predicate)
def mempro_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 2):
raise PermissionsFailure("You do not have permission to use this command.")
return True
return commands.check(predicate)
def memed_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 3):
raise PermissionsFailure("You do not have permission to use this command.")
return True
return commands.check(predicate)
def genius_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 4):
raise PermissionsFailure("You do not have permission to use this command.")
return True
return commands.check(predicate)
####################
# Staff Roles
####################
def submod_or_admin_and_up():
async def predicate(ctx):
db = ctx.bot.settings.guild()
submod = ctx.guild.get_role(db.role_sub_mod)
if not submod:
return
if not (ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 6) or submod in ctx.author.roles):
raise commands.BadArgument(
"You do not have permission to use this command.")
return True
return commands.check(predicate)
def genius_or_submod_and_up():
async def predicate(ctx):
db = ctx.bot.settings.guild()
submod = ctx.guild.get_role(db.role_sub_mod)
if not submod:
return
if not (ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 4) or submod in ctx.author.roles):
raise commands.BadArgument(
"You do not have permission to use this command.")
return True
return commands.check(predicate)
def mod_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 5):
raise PermissionsFailure(
"You do not have permission to use this command.")
return True
return commands.check(predicate)
def admin_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 6):
raise PermissionsFailure(
"You do not have permission to use this command.")
return True
return commands.check(predicate)
####################
# Other
####################
def guild_owner_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 7):
raise PermissionsFailure(
"You do not have permission to use this command.")
return True
return commands.check(predicate)
def bot_owner_and_up():
async def predicate(ctx):
if not ctx.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 9):
raise PermissionsFailure(
"You do not have permission to use this command.")
return True
return commands.check(predicate)
def ensure_invokee_role_lower_than_bot():
async def predicate(ctx):
if ctx.me.top_role < ctx.author.top_role:
raise PermissionsFailure(
f"Your top role is higher than mine. I can't change your nickname :(")
return True
return commands.check(predicate)
| 34.089385
| 113
| 0.61783
| 728
| 6,102
| 5.09478
| 0.171703
| 0.027501
| 0.052844
| 0.064707
| 0.761661
| 0.755729
| 0.735239
| 0.707738
| 0.639256
| 0.639256
| 0
| 0.002474
| 0.271386
| 6,102
| 179
| 114
| 34.089385
| 0.831309
| 0.006391
| 0
| 0.612403
| 0
| 0
| 0.128666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100775
| false
| 0
| 0.023256
| 0
| 0.364341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
55ed58c5a3b65b6c9cf7b94806ac24c08de88575
| 148
|
py
|
Python
|
czsc/Save/__init__.py
|
newlyedward/czsc
|
7b87298a09d2f317afbd9552b001a433f8954c78
|
[
"MIT"
] | null | null | null |
czsc/Save/__init__.py
|
newlyedward/czsc
|
7b87298a09d2f317afbd9552b001a433f8954c78
|
[
"MIT"
] | null | null | null |
czsc/Save/__init__.py
|
newlyedward/czsc
|
7b87298a09d2f317afbd9552b001a433f8954c78
|
[
"MIT"
] | null | null | null |
# coding :utf-8
import pymongo
ASCENDING = pymongo.ASCENDING
DESCENDING = pymongo.DESCENDING
from czsc.Save.save_tdx import save_financial_files
| 16.444444
| 51
| 0.817568
| 20
| 148
| 5.9
| 0.65
| 0.271186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007692
| 0.121622
| 148
| 8
| 52
| 18.5
| 0.9
| 0.087838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3612960b0d1e5d2bb6e4f88fb5d5205e0b9cd75f
| 71
|
py
|
Python
|
search_views/search.py
|
inmagik/django-search-views
|
315cbe8e6cac158884ced02069aa945bc7438dba
|
[
"MIT"
] | 31
|
2016-10-01T15:41:02.000Z
|
2022-02-12T18:44:20.000Z
|
search_views/search.py
|
bianchimro/django-search-views
|
315cbe8e6cac158884ced02069aa945bc7438dba
|
[
"MIT"
] | 8
|
2016-10-01T12:19:05.000Z
|
2019-09-30T09:33:47.000Z
|
search_views/search.py
|
bianchimro/django-search-views
|
315cbe8e6cac158884ced02069aa945bc7438dba
|
[
"MIT"
] | 6
|
2015-01-18T05:49:16.000Z
|
2016-09-03T22:33:32.000Z
|
#this file is here for backward compat optionally
from .views import *
| 23.666667
| 49
| 0.788732
| 11
| 71
| 5.090909
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 2
| 50
| 35.5
| 0.949153
| 0.676056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
361cf9d5fd819918d1d9ff0d726c0bb2ef826c81
| 1,105
|
py
|
Python
|
src/arpSpoofing_test.py
|
Cloudxtreme/NetDefense
|
2e6cd3ea1f61cb8c449d79a0cafdd807849e1d32
|
[
"MIT"
] | 2
|
2019-12-04T16:13:29.000Z
|
2020-03-18T10:40:40.000Z
|
src/arpSpoofing_test.py
|
Cloudxtreme/NetDefense
|
2e6cd3ea1f61cb8c449d79a0cafdd807849e1d32
|
[
"MIT"
] | null | null | null |
src/arpSpoofing_test.py
|
Cloudxtreme/NetDefense
|
2e6cd3ea1f61cb8c449d79a0cafdd807849e1d32
|
[
"MIT"
] | 2
|
2020-07-08T11:01:52.000Z
|
2021-03-14T08:54:32.000Z
|
#encoding: UTF-8
from arpSpoofing import ArpSpoofing
ACTIVE_MIN = 1 #Temps en que s'executa la defença de ARP Spoofing (minuts)
CHECK_PERIOD = 10 #Temps d'espera per comprobar si hi ha un atac (segons)
test = ArpSpoofing(ACTIVE_MIN, CHECK_PERIOD)
def stopThread_test():
assert(test.getState() == True)
test.stopThread()
assert(test.getState() == False)
#stopThread_test()
def getArpTable_test():
test.getArpTable()
assert('192.168.1.1' in test.getArpRecord())
#getArpTable_test()
def checkArpSpoofing_test():
arpInfo = {"192.168.1.1":"11:00:00:00:00:00", "192.168.1.2":"12:00:00:00:00:00", "192.168.1.3":"13:00:00:00:00:00",
"192.168.1.4":"14:00:00:00:00:00","192.168.1.5":"15:00:00:00:00:00"}
test.setArpRecord(arpInfo)
result = test.checkArpSpoofing()
assert(len(result) == 0)
arpInfo = {"192.168.1.1":"11:00:00:00:00:00", "192.168.1.2":"12:00:00:00:00:00", "192.168.1.3":"13:00:00:00:00:00",
"192.168.1.4":"14:00:00:00:00:00","192.168.1.5":"11:00:00:00:00:00"}
test.setArpRecord(arpInfo)
result = test.checkArpSpoofing()
assert(len(result) == 2)
#checkArpSpoofing_test()
| 31.571429
| 116
| 0.685068
| 198
| 1,105
| 3.772727
| 0.308081
| 0.21419
| 0.240964
| 0.21419
| 0.455154
| 0.455154
| 0.452477
| 0.452477
| 0.452477
| 0.452477
| 0
| 0.215726
| 0.102262
| 1,105
| 34
| 117
| 32.5
| 0.537298
| 0.167421
| 0
| 0.272727
| 0
| 0
| 0.318381
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 1
| 0.136364
| false
| 0
| 0.045455
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
36362e42ed64d87f93fc1ff7737f6dd7c64ca8a2
| 97
|
py
|
Python
|
Tutorials/01. 10 Days of Statistics/006. Day 4 - Binomial Distribution I.py
|
stonehengee/HackerrankPractice
|
ec052e7447391e40d1919cf0b641ff5023da3da3
|
[
"MIT"
] | null | null | null |
Tutorials/01. 10 Days of Statistics/006. Day 4 - Binomial Distribution I.py
|
stonehengee/HackerrankPractice
|
ec052e7447391e40d1919cf0b641ff5023da3da3
|
[
"MIT"
] | null | null | null |
Tutorials/01. 10 Days of Statistics/006. Day 4 - Binomial Distribution I.py
|
stonehengee/HackerrankPractice
|
ec052e7447391e40d1919cf0b641ff5023da3da3
|
[
"MIT"
] | null | null | null |
# Problem: https://www.hackerrank.com/challenges/s10-binomial-distribution-1/problem
# Score: 30
| 32.333333
| 84
| 0.783505
| 13
| 97
| 5.846154
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 0.061856
| 97
| 2
| 85
| 48.5
| 0.78022
| 0.948454
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3665f001293e8abe2d81c2e58c6d39f9757dc273
| 89
|
py
|
Python
|
officialWebsite/podcast/apps.py
|
NUMBART/officialWebsite
|
2eed541fde1fcc792f29b89a3837fe13fd0d610f
|
[
"MIT"
] | 34
|
2019-11-20T14:13:47.000Z
|
2022-02-07T06:46:42.000Z
|
officialWebsite/podcast/apps.py
|
NUMBART/officialWebsite
|
2eed541fde1fcc792f29b89a3837fe13fd0d610f
|
[
"MIT"
] | 399
|
2020-02-02T13:40:06.000Z
|
2022-03-12T01:07:41.000Z
|
officialWebsite/podcast/apps.py
|
NUMBART/officialWebsite
|
2eed541fde1fcc792f29b89a3837fe13fd0d610f
|
[
"MIT"
] | 95
|
2019-11-20T14:03:38.000Z
|
2022-02-01T11:02:34.000Z
|
from django.apps import AppConfig
class PodcastConfig(AppConfig):
name = "podcast"
| 14.833333
| 33
| 0.752809
| 10
| 89
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 34
| 17.8
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
366cc5e2e1246ee2a1008bf22075e292f82d8e5f
| 127
|
py
|
Python
|
venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py
|
liuzhongning/python_learn
|
47d471e40e6c25271faab549dfa235849264c3b4
|
[
"MIT"
] | 6,263
|
2017-01-20T17:41:36.000Z
|
2022-02-15T20:48:57.000Z
|
env/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py
|
aammjian/cotton
|
f72b814f795f79a4054688e465c8b0ae5560f3b7
|
[
"Apache-2.0"
] | 1,242
|
2015-01-22T14:56:46.000Z
|
2022-03-31T18:02:38.000Z
|
env/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py
|
aammjian/cotton
|
f72b814f795f79a4054688e465c8b0ae5560f3b7
|
[
"Apache-2.0"
] | 671
|
2017-09-21T08:04:01.000Z
|
2022-03-29T14:30:07.000Z
|
__all__ = ["Sequence"]
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
| 18.142857
| 40
| 0.755906
| 14
| 127
| 6.571429
| 0.642857
| 0.326087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181102
| 127
| 6
| 41
| 21.166667
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0.062992
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
36781d197098db55bc69bbe8cca410aceca2cccf
| 8,742
|
py
|
Python
|
atom/proton/python/proton_api/__init__.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2019-04-16T02:11:17.000Z
|
2021-12-16T22:51:40.000Z
|
atom/proton/python/proton_api/__init__.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 81
|
2019-11-19T23:24:28.000Z
|
2022-03-28T11:35:47.000Z
|
atom/proton/python/proton_api/__init__.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2020-07-08T02:29:56.000Z
|
2022-03-28T10:05:33.000Z
|
# coding: utf-8
# flake8: noqa
"""
Hydrogen Proton API
Financial engineering module of Hydrogen Atom # noqa: E501
OpenAPI spec version: 1.9.2
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from proton_api.api.annuities_api import AnnuitiesApi
from proton_api.api.business_financial_management_api import BusinessFinancialManagementApi
from proton_api.api.cards_api import CardsApi
from proton_api.api.financial_health_api import FinancialHealthApi
from proton_api.api.financial_planning_api import FinancialPlanningApi
from proton_api.api.goals_api import GoalsApi
from proton_api.api.life_insurance_api import LifeInsuranceApi
from proton_api.api.personal_financial_management_api import PersonalFinancialManagementApi
from proton_api.api.portfolio_construction_api import PortfolioConstructionApi
from proton_api.api.portfolio_management_api import PortfolioManagementApi
from proton_api.api.risk_scoring_api import RiskScoringApi
from proton_api.api.simulations_api import SimulationsApi
from proton_api.api.util_api import UtilApi
from proton_api.auth_api import AuthApi
from proton_api.environment import Environment
# import ApiClient
from proton_api.api_client import ApiClient
from proton_api.configuration import Configuration
# import models into sdk package
from proton_api.models.accumulation_goal_deposit_config import AccumulationGoalDepositConfig
from proton_api.models.annuity_calculator_accumulation_horizon_request import AnnuityCalculatorAccumulationHorizonRequest
from proton_api.models.annuity_calculator_annuity_amount_request import AnnuityCalculatorAnnuityAmountRequest
from proton_api.models.annuity_calculator_decumulation_horizon_request import AnnuityCalculatorDecumulationHorizonRequest
from proton_api.models.annuity_calculator_deposit_amount_request import AnnuityCalculatorDepositAmountRequest
from proton_api.models.annuity_calculator_initial_balance_request import AnnuityCalculatorInitialBalanceRequest
from proton_api.models.annuity_deposit_schedule import AnnuityDepositSchedule
from proton_api.models.answer import Answer
from proton_api.models.backtest_request import BacktestRequest
from proton_api.models.beneficiary_bequest_config import BeneficiaryBequestConfig
from proton_api.models.budget_aggregation_account import BudgetAggregationAccount
from proton_api.models.budget_calculator_request import BudgetCalculatorRequest
from proton_api.models.budget_component import BudgetComponent
from proton_api.models.budget_details import BudgetDetails
from proton_api.models.business_financial_health_check_request import BusinessFinancialHealthCheckRequest
from proton_api.models.business_invoice_analysis_request import BusinessInvoiceAnalysisRequest
from proton_api.models.calculator_deposit_schedule import CalculatorDepositSchedule
from proton_api.models.calculator_deposit_schedule1 import CalculatorDepositSchedule1
from proton_api.models.card_analysis_request import CardAnalysisRequest
from proton_api.models.card_limit_check_request import CardLimitCheckRequest
from proton_api.models.card_transaction_authorization_request import CardTransactionAuthorizationRequest
from proton_api.models.cash_analysis_request import CashAnalysisRequest
from proton_api.models.cash_flow_analysis_request import CashFlowAnalysisRequest
from proton_api.models.children_education_config import ChildrenEducationConfig
from proton_api.models.customer_analysis_request import CustomerAnalysisRequest
from proton_api.models.decision_tree_result_request import DecisionTreeResultRequest
from proton_api.models.decumulation_goal_deposit_config import DecumulationGoalDepositConfig
from proton_api.models.dimensional_risk_score_request import DimensionalRiskScoreRequest
from proton_api.models.diversification_score_request import DiversificationScoreRequest
from proton_api.models.education_calculator_annual_cost_request import EducationCalculatorAnnualCostRequest
from proton_api.models.education_calculator_deposit_amount_request import EducationCalculatorDepositAmountRequest
from proton_api.models.education_calculator_percent_covered_request import EducationCalculatorPercentCoveredRequest
from proton_api.models.education_config import EducationConfig
from proton_api.models.emergency_fund_calculator_request import EmergencyFundCalculatorRequest
from proton_api.models.event_study_request import EventStudyRequest
from proton_api.models.fee_analysis_request import FeeAnalysisRequest
from proton_api.models.financial_health_check_request import FinancialHealthCheckRequest
from proton_api.models.financial_picture_request import FinancialPictureRequest
from proton_api.models.financial_statement_analysis_request import FinancialStatementAnalysisRequest
from proton_api.models.goal_accumulation_allocation_request import GoalAccumulationAllocationRequest
from proton_api.models.goal_accumulation_recommendation_request import GoalAccumulationRecommendationRequest
from proton_api.models.goal_accumulation_status_request import GoalAccumulationStatusRequest
from proton_api.models.goal_config import GoalConfig
from proton_api.models.goal_decumulation_allocation_request import GoalDecumulationAllocationRequest
from proton_api.models.goal_decumulation_recommendation_request import GoalDecumulationRecommendationRequest
from proton_api.models.goal_decumulation_status_request import GoalDecumulationStatusRequest
from proton_api.models.goal_withdrawal_config import GoalWithdrawalConfig
from proton_api.models.guaranteed_rate_benefit import GuaranteedRateBenefit
from proton_api.models.income_config import IncomeConfig
from proton_api.models.life_insurance_needs_calculator_request import LifeInsuranceNeedsCalculatorRequest
from proton_api.models.location import Location
from proton_api.models.monte_carlo_request import MonteCarloRequest
from proton_api.models.mortgage_calculator_down_payment_request import MortgageCalculatorDownPaymentRequest
from proton_api.models.mortgage_calculator_home_price_request import MortgageCalculatorHomePriceRequest
from proton_api.models.mortgage_calculator_periodic_payment_request import MortgageCalculatorPeriodicPaymentRequest
from proton_api.models.mvo_request import MvoRequest
from proton_api.models.opt_config import OptConfig
from proton_api.models.opt_config1 import OptConfig1
from proton_api.models.order_rebalance_request import OrderRebalanceRequest
from proton_api.models.performance_calculator_request import PerformanceCalculatorRequest
from proton_api.models.periods import Periods
from proton_api.models.portfolio_optimization_score_request import PortfolioOptimizationScoreRequest
from proton_api.models.portfolio_what_if_request import PortfolioWhatIfRequest
from proton_api.models.purchase_calculator_amount_request import PurchaseCalculatorAmountRequest
from proton_api.models.purchase_calculator_deposit_amount_request import PurchaseCalculatorDepositAmountRequest
from proton_api.models.purchase_calculator_horizon_request import PurchaseCalculatorHorizonRequest
from proton_api.models.ratio_targets import RatioTargets
from proton_api.models.ratio_targets1 import RatioTargets1
from proton_api.models.rebalancing_signal_request import RebalancingSignalRequest
from proton_api.models.recommendation_config import RecommendationConfig
from proton_api.models.recommendation_config1 import RecommendationConfig1
from proton_api.models.recurring_transaction_analysis_request import RecurringTransactionAnalysisRequest
from proton_api.models.retirement_calculator_deposit_amount_request import RetirementCalculatorDepositAmountRequest
from proton_api.models.retirement_calculator_expenses_request import RetirementCalculatorExpensesRequest
from proton_api.models.retirement_calculator_percent_covered_request import RetirementCalculatorPercentCoveredRequest
from proton_api.models.risk_allocation_request import RiskAllocationRequest
from proton_api.models.risk_score_request import RiskScoreRequest
from proton_api.models.savings_calculator_request import SavingsCalculatorRequest
from proton_api.models.savings_deposit_schedule import SavingsDepositSchedule
from proton_api.models.scenario_analysis_request import ScenarioAnalysisRequest
from proton_api.models.sensitivity_analysis_request import SensitivityAnalysisRequest
from proton_api.models.sensitivity_factor import SensitivityFactor
from proton_api.models.settings import Settings
from proton_api.models.spending_analysis_request import SpendingAnalysisRequest
from proton_api.models.variable_annuity_request import VariableAnnuityRequest
from proton_api.models.w_config import WConfig
from proton_api.models.w_config1 import WConfig1
| 69.380952
| 121
| 0.916609
| 1,007
| 8,742
| 7.620655
| 0.253227
| 0.123143
| 0.176179
| 0.215403
| 0.287334
| 0.126271
| 0
| 0
| 0
| 0
| 0
| 0.002182
| 0.056394
| 8,742
| 125
| 122
| 69.936
| 0.928113
| 0.035919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
3678e7cd1a3365121a1e3909a3ba6854106bb030
| 2,701
|
gyp
|
Python
|
gyp/SimpleiOSApp.gyp
|
Frankie-666/color-emoji.skia
|
f1634a9952086155b9069d49ab91f1fa43b5ec6a
|
[
"BSD-3-Clause"
] | 2
|
2017-05-19T08:53:12.000Z
|
2017-08-28T11:59:26.000Z
|
gyp/SimpleiOSApp.gyp
|
Frankie-666/color-emoji.skia
|
f1634a9952086155b9069d49ab91f1fa43b5ec6a
|
[
"BSD-3-Clause"
] | 2
|
2017-07-25T09:37:22.000Z
|
2017-08-04T07:18:56.000Z
|
gyp/SimpleiOSApp.gyp
|
Frankie-666/color-emoji.skia
|
f1634a9952086155b9069d49ab91f1fa43b5ec6a
|
[
"BSD-3-Clause"
] | 2
|
2017-08-09T09:03:23.000Z
|
2020-05-26T09:14:49.000Z
|
{
'conditions' : [
[ 'skia_os != "ios"', {
'error': '<!(set GYP_DEFINES=\"skia_os=\'ios\'\")'
}],
],
'targets': [
{
'target_name': 'SimpleiOSApp',
'type': 'executable',
'mac_bundle' : 1,
'include_dirs' : [
'../experimental/iOSSampleApp/Shared',
],
'sources': [
'../src/views/ios/SkOSWindow_iOS.mm',
'../src/views/mac/SkEventNotifier.h',
'../src/views/mac/SkEventNotifier.mm',
'../experimental/iOSSampleApp/iPad/AppDelegate_iPad.h',
'../experimental/iOSSampleApp/iPad/AppDelegate_iPad.mm',
'../experimental/iOSSampleApp/iPhone/AppDelegate_iPhone.h',
'../experimental/iOSSampleApp/iPhone/AppDelegate_iPhone.mm',
'../experimental/iOSSampleApp/Shared/SkUIView.h',
'../experimental/iOSSampleApp/Shared/SkUIView.mm',
'../experimental/iOSSampleApp/Shared/skia_ios.mm',
'../experimental/SimpleiOSApp/SimpleApp.h',
'../experimental/SimpleiOSApp/SimpleApp.mm',
'../experimental/SimpleiOSApp/SimpleiOSApp-Info.plist',
],
'dependencies': [
'skia_base_libs.gyp:skia_base_libs',
'effects.gyp:effects',
'images.gyp:images',
'views.gyp:views',
'xml.gyp:xml',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreText.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/ImageIO.framework',
'$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
'libraries!': [
#remove mac dependencies
'$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
'$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework',
],
},
'xcode_settings' : {
'INFOPLIST_FILE' : '../experimental/SimpleiOSApp/SimpleiOSApp-Info.plist',
},
'xcode_config_file': '../experimental/iOSSampleApp/SkiOSSampleApp-Base.xcconfig',
'mac_bundle_resources' : [
'../experimental/SimpleiOSApp/iPad/MainWindow_iPad.xib',
'../experimental/SimpleiOSApp/iPhone/MainWindow_iPhone.xib',
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| 37.513889
| 87
| 0.621622
| 237
| 2,701
| 6.978903
| 0.35443
| 0.086457
| 0.133011
| 0.199516
| 0.387545
| 0.106409
| 0.106409
| 0.106409
| 0.106409
| 0.106409
| 0
| 0.001872
| 0.208812
| 2,701
| 71
| 88
| 38.042254
| 0.77211
| 0.044798
| 0
| 0.15625
| 0
| 0
| 0.705517
| 0.594794
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3696e68de63daf9a3f0eca3facd40c0061bac257
| 396
|
py
|
Python
|
slixmpp/features/feature_rosterver/__init__.py
|
marconfus/slixmpp
|
bcf186f42dc31d360e0a0af8a4b3aaf1e0b212aa
|
[
"BSD-3-Clause"
] | null | null | null |
slixmpp/features/feature_rosterver/__init__.py
|
marconfus/slixmpp
|
bcf186f42dc31d360e0a0af8a4b3aaf1e0b212aa
|
[
"BSD-3-Clause"
] | null | null | null |
slixmpp/features/feature_rosterver/__init__.py
|
marconfus/slixmpp
|
bcf186f42dc31d360e0a0af8a4b3aaf1e0b212aa
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Slixmpp: The Slick XMPP Library
Copyright (C) 2012 Nathanael C. Fritz
This file is part of Slixmpp.
See the file LICENSE for copying permission.
"""
from slixmpp.plugins.base import register_plugin
from slixmpp.features.feature_rosterver.rosterver import FeatureRosterVer
from slixmpp.features.feature_rosterver.stanza import RosterVer
register_plugin(FeatureRosterVer)
| 24.75
| 73
| 0.790404
| 50
| 396
| 6.18
| 0.62
| 0.106796
| 0.122977
| 0.168285
| 0.226537
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01194
| 0.15404
| 396
| 15
| 74
| 26.4
| 0.910448
| 0.368687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
36bfdb83f851957c2c6255a8b2b7f16a2eb1dde0
| 278
|
py
|
Python
|
src/cache/messaging/__init__.py
|
moibenko/enstore
|
6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9
|
[
"Intel",
"Unlicense"
] | 4
|
2021-10-17T11:17:59.000Z
|
2022-02-28T16:58:40.000Z
|
src/cache/messaging/__init__.py
|
moibenko/enstore
|
6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9
|
[
"Intel",
"Unlicense"
] | 17
|
2021-10-05T21:44:06.000Z
|
2022-03-31T16:58:40.000Z
|
src/cache/messaging/__init__.py
|
moibenko/enstore
|
6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9
|
[
"Intel",
"Unlicense"
] | 8
|
2021-09-02T18:55:49.000Z
|
2022-03-09T21:05:28.000Z
|
###############################################################################
#
# $Id$
#
###############################################################################
__all__ = ["client","constants","file_list","enq_message","messages","md_client","mw_client","pe_client"]
| 34.75
| 105
| 0.284173
| 15
| 278
| 4.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035971
| 278
| 7
| 106
| 39.714286
| 0.261194
| 0.014388
| 0
| 0
| 0
| 0
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
36cbc4f123a81cddc0f19fc0a0c9d6b3516f8f55
| 420
|
py
|
Python
|
__init__.py
|
Amazeryogo/stunning-broccoli
|
fe8248a6afb2668b722ec7acddaf1b157de31be9
|
[
"MIT"
] | null | null | null |
__init__.py
|
Amazeryogo/stunning-broccoli
|
fe8248a6afb2668b722ec7acddaf1b157de31be9
|
[
"MIT"
] | null | null | null |
__init__.py
|
Amazeryogo/stunning-broccoli
|
fe8248a6afb2668b722ec7acddaf1b157de31be9
|
[
"MIT"
] | null | null | null |
import os
def make_user(username,password,email,aboutme):
x = input("THIS WILL CREATE A NEW USER, ARE YOU SURE?(y/n)")
if x == "y" or x == "Y":
string = 'http POST http://www.amazeryogo.in/api/users username={} password={} email={} "about_me={}"'.format(username,password,email,aboutme)
os.system(string)
print("done!")
else:
pass
def get_token(username,password):
pass
| 30
| 150
| 0.62619
| 60
| 420
| 4.333333
| 0.683333
| 0.246154
| 0.242308
| 0.215385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211905
| 420
| 13
| 151
| 32.307692
| 0.785498
| 0
| 0
| 0.181818
| 0
| 0.090909
| 0.345238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0.454545
| 0.090909
| 0
| 0.272727
| 0.090909
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
36d1c391410ea62d1e9b70b95d4f7b81347dc59f
| 603
|
py
|
Python
|
pricers/__init__.py
|
exleym/simpaq
|
0f076424a953a4168e36560439db1dfd8c861fef
|
[
"MIT"
] | 8
|
2018-10-22T11:21:42.000Z
|
2021-07-04T16:42:01.000Z
|
simpaq-master/pricers/__init__.py
|
ArvydasA/javafun
|
3b0218194bb9351c568c5074746c486c53a8b6f7
|
[
"MIT"
] | null | null | null |
simpaq-master/pricers/__init__.py
|
ArvydasA/javafun
|
3b0218194bb9351c568c5074746c486c53a8b6f7
|
[
"MIT"
] | 2
|
2017-03-24T01:40:11.000Z
|
2019-12-03T18:16:00.000Z
|
class Pricer(object):
def __init__(self):
pass
def price(self, asset, underlying, greeks, save=False):
""" Header / layout for primary API to the Pricer class.
:param asset: instance of Asset class or a derivative
:param underlying: instance of Asset class or a derivative
:param save: boolean to save calculated price
:return: price or (price & greeks)
"""
return None
def __repr__(self):
return "<Pricer>"
from .numerical import LatticeOptionPricer, MCOptionPricer
from .analytic import BlackScholesPricer, DCF
| 27.409091
| 66
| 0.661692
| 71
| 603
| 5.507042
| 0.549296
| 0.051151
| 0.076726
| 0.102302
| 0.194373
| 0.194373
| 0.194373
| 0.194373
| 0
| 0
| 0
| 0
| 0.26534
| 603
| 21
| 67
| 28.714286
| 0.882619
| 0.40796
| 0
| 0
| 0
| 0
| 0.026144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.111111
| 0.222222
| 0.111111
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
36deea8305772a928ff42c67890fee2ccc86c057
| 186
|
py
|
Python
|
src/helpers/errors.py
|
henryvalbuena/mothership-v2
|
24e7afa73bdfd7f58e1845478338628b1b2a09c8
|
[
"MIT"
] | null | null | null |
src/helpers/errors.py
|
henryvalbuena/mothership-v2
|
24e7afa73bdfd7f58e1845478338628b1b2a09c8
|
[
"MIT"
] | null | null | null |
src/helpers/errors.py
|
henryvalbuena/mothership-v2
|
24e7afa73bdfd7f58e1845478338628b1b2a09c8
|
[
"MIT"
] | null | null | null |
"""Custom errors goes here"""
class Error(Exception):
"""Base error class"""
pass
class InvalidUserInput(Error):
"""Exception raised for invalid user input"""
pass
| 13.285714
| 49
| 0.650538
| 21
| 186
| 5.761905
| 0.714286
| 0.231405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22043
| 186
| 13
| 50
| 14.307692
| 0.834483
| 0.430108
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
36e1ca0056a2ec1c41b278858bfbd04b6fd87bd2
| 125
|
py
|
Python
|
Semana2/par.py
|
BrayanTorres2/Algoritmosyprogramaci-n-Grupo2Ciclo4-
|
ad64b5a3d3d129efaa297617748a74872522d7a1
|
[
"MIT"
] | 4
|
2021-09-27T17:20:56.000Z
|
2021-09-28T23:12:49.000Z
|
Semana2/par.py
|
BrayanTorres2/Algoritmosyprogramaci-n-Grupo2Ciclo4-
|
ad64b5a3d3d129efaa297617748a74872522d7a1
|
[
"MIT"
] | null | null | null |
Semana2/par.py
|
BrayanTorres2/Algoritmosyprogramaci-n-Grupo2Ciclo4-
|
ad64b5a3d3d129efaa297617748a74872522d7a1
|
[
"MIT"
] | 1
|
2021-11-19T02:26:18.000Z
|
2021-11-19T02:26:18.000Z
|
numero=int(input("Digite numero: "))
if(numero%2==0):
print("el numero es par")
else:
print("el numero es impar")
| 20.833333
| 36
| 0.624
| 20
| 125
| 3.9
| 0.65
| 0.179487
| 0.333333
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019802
| 0.192
| 125
| 5
| 37
| 25
| 0.752475
| 0
| 0
| 0
| 0
| 0
| 0.392
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
36f4682424cd24cde1e6d7832f62f9c375b3bdb2
| 2,699
|
py
|
Python
|
Crawler/github_osori_api.py
|
bees1114/Osori_level_meter
|
6af9b06e91fc4935be75e8293879e2bf881beefe
|
[
"MIT"
] | 1
|
2017-11-04T21:27:59.000Z
|
2017-11-04T21:27:59.000Z
|
Crawler/github_osori_api.py
|
bees1114/Osori_level_meter
|
6af9b06e91fc4935be75e8293879e2bf881beefe
|
[
"MIT"
] | null | null | null |
Crawler/github_osori_api.py
|
bees1114/Osori_level_meter
|
6af9b06e91fc4935be75e8293879e2bf881beefe
|
[
"MIT"
] | null | null | null |
import requests
import json
def get_osori_commit_counts_all():
list_osori_repo_url = 'https://api.github.com/orgs/HyOsori/repos?\
access_token=6a49e18d2cf83edf2d8717b42517fccec77d1e6f'
# GET
response = requests.get(list_osori_repo_url)
repo_infos = json.loads(response.text)
commit_counts = {}
print("총 Repository 개수 : %d" % len(repo_infos))
for repo_info in repo_infos:
try:
print(repo_info['name'])
except:
continue
contribute_info_url = "https://api.github.com/repos/HyOsori/%s/contributors?access_token=\
6a49e18d2cf83edf2d8717b42517fccec77d1e6f" % repo_info['name']
response = requests.get(contribute_info_url)
contribute_infos = json.loads(response.text)
for contribute_info in contribute_infos:
try:
commit_counts[contribute_info['login']] += contribute_info['contributions']
# print(contribute_info['login'])
# print(contribute_info['contributions'])
except:
try:
commit_counts.update({contribute_info['login']: contribute_info['contributions']})
except:
continue
for id in commit_counts:
print(id + ":" + str(commit_counts[id]))
return commit_counts
def get_osori_commit_counts(id):
list_osori_repo_url = 'https://api.github.com/users/%s/events?\
access_token=6a49e18d2cf83edf2d8717b42517fccec77d1e6f' % id
# params = {'key1': 'value1', 'key2': 'value2'}
# GET
response = requests.get(list_osori_repo_url)
event_infos = json.loads(response.text)
commit_counts = 0
print("총 Event 개수 : %d" % len(event_infos))
for event in event_infos:
try:
print(event['name'])
except:
continue
contribute_info_url = "https://api.github.com/repos/HyOsori/%s/contributors?access_token=\
6a49e18d2cf83edf2d8717b42517fccec77d1e6f" % repo_info['name']
response = requests.get(contribute_info_url)
contribute_infos = json.loads(response.text)
for contribute_info in contribute_infos:
try:
commit_counts[contribute_info['login']] += contribute_info['contributions']
# print(contribute_info['login'])
# print(contribute_info['contributions'])
except:
try:
commit_counts.update({contribute_info['login']: contribute_info['contributions']})
except:
continue
for id in commit_counts:
print(id + ":" + str(commit_counts[id]))
return commit_counts
| 30.670455
| 102
| 0.623564
| 281
| 2,699
| 5.736655
| 0.202847
| 0.156328
| 0.07072
| 0.039702
| 0.797767
| 0.769231
| 0.769231
| 0.722084
| 0.633995
| 0.633995
| 0
| 0.047256
| 0.270841
| 2,699
| 87
| 103
| 31.022989
| 0.77185
| 0.07299
| 0
| 0.678571
| 0
| 0
| 0.05014
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.035714
| 0
| 0.107143
| 0.107143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
36fcc31f8291f2d43c816c1a33ee07438680aeb4
| 27,621
|
py
|
Python
|
src/dataops/tests/test_views.py
|
ShizhuZhang/ontask_b
|
acbf05ff9b18dae0a41c67d1e41774e54a890c40
|
[
"MIT"
] | null | null | null |
src/dataops/tests/test_views.py
|
ShizhuZhang/ontask_b
|
acbf05ff9b18dae0a41c67d1e41774e54a890c40
|
[
"MIT"
] | null | null | null |
src/dataops/tests/test_views.py
|
ShizhuZhang/ontask_b
|
acbf05ff9b18dae0a41c67d1e41774e54a890c40
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import os
import time
from django.conf import settings
from django.shortcuts import reverse
from django.utils.html import escape
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
import test
from dataops import pandas_db
from workflow.models import Workflow
class DataopsSymbols(test.OntaskLiveTestCase):
fixtures = ['wflow_symbols']
filename = os.path.join(
settings.BASE_DIR(),
'dataops',
'fixtures',
'wflow_symbols.sql'
)
def setUp(self):
super(DataopsSymbols, self).setUp()
pandas_db.pg_restore_table(self.filename)
def tearDown(self):
pandas_db.delete_all_tables()
super(DataopsSymbols, self).tearDown()
def test_01_symbols(self):
symbols = '!#$%&()*+,-./:;<=>?@[\]^_`{|}~'
# Login
self.login('instructor01@bogus.com')
# Go to the details page
self.access_workflow_from_home_page('sss')
# Edit the name column
self.open_column_edit('name')
# Replace name by symbols
self.selenium.find_element_by_id("id_name").click()
self.selenium.find_element_by_id("id_name").clear()
self.selenium.find_element_by_id("id_name").send_keys(symbols)
# Click in the submit/save button
self.selenium.find_element_by_xpath("//button[@type='submit']").click()
# MODAL WAITING
self.wait_close_modal_refresh_table('column-table_previous')
# Click on the Add Column button
self.open_add_regular_column()
# Set name to symbols (new column) and type to string
self.selenium.find_element_by_id("id_name").click()
self.selenium.find_element_by_id("id_name").clear()
self.selenium.find_element_by_id("id_name").send_keys(symbols)
self.selenium.find_element_by_id("id_data_type").click()
Select(self.selenium.find_element_by_id(
"id_data_type"
)).select_by_visible_text("string")
# Save the new column
self.selenium.find_element_by_xpath("//button[@type='submit']").click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.ID, 'error_1_id_name')))
# There should be a message saying that the name of this column already
# exists
self.assertIn('There is a column already with this name',
self.selenium.page_source)
# Click again in the name and introduce something different
self.selenium.find_element_by_id("id_name").click()
self.selenium.find_element_by_id("id_name").clear()
self.selenium.find_element_by_id("id_name").send_keys(symbols + '2')
# Save the new column
self.selenium.find_element_by_xpath("//button[@type='submit']").click()
self.wait_close_modal_refresh_table('column-table_previous')
# Click in the attributes section
self.go_to_attribute_page()
# Delete the existing one and confirm deletion
self.selenium.find_element_by_xpath(
"//table[@id='attribute-table']/tbody/tr/td[3]/button[2]"
).click()
# Wait for the delete confirmation frame
WebDriverWait(self.selenium, 10).until(
EC.text_to_be_present_in_element((By.CLASS_NAME, 'modal-title'),
'Confirm attribute deletion')
)
# Click in the delete confirm button
self.selenium.find_element_by_xpath(
"//div[@class='modal-footer']/button[2]"
).click()
# MODAL WAITING
self.wait_close_modal_refresh_table('attribute-table_previous')
# Add a new attribute and insert key (symbols) and value
self.create_attribute(symbols + '3', 'vvv')
# Save and close the attribute page
self.selenium.find_element_by_link_text('Back').click()
# Wait for the details page
self.wait_close_modal_refresh_table('column-table_previous')
# Click in the TABLE link
self.go_to_table()
# Verify that everything appears normally
self.assertIn(escape(symbols), self.selenium.page_source)
self.assertIn(escape(symbols + '2'), self.selenium.page_source)
# Click in the Actions navigation menu
self.go_to_actions()
# Edit the action-in
self.open_action_edit('action in')
# Set the right columns to process
select = Select(self.selenium.find_element_by_id(
'select-column-name'))
select.select_by_visible_text('!#$%&()*+,-./:;<=>?@[\]^_`{|}~2')
self.wait_for_datatable('column-selected-table_previous')
# Wait for the table to be refreshed
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located(
(By.ID, 'column-selected-table_previous')
)
)
select = Select(self.selenium.find_element_by_id(
'select-key-column-name'))
select.select_by_visible_text('sid')
WebDriverWait(self.selenium, 10).until_not(
EC.visibility_of_element_located((By.ID, 'div-spinner'))
)
select = Select(self.selenium.find_element_by_id(
'select-key-column-name'))
select.select_by_visible_text('email')
WebDriverWait(self.selenium, 10).until_not(
EC.visibility_of_element_located((By.ID, 'div-spinner'))
)
# Save action-in
self.selenium.find_element_by_link_text('Done').click()
self.wait_for_datatable('action-table_previous')
# Click in the RUN link of the action in
element = self.search_action('action in')
element.find_element_by_link_text("Run").click()
# Wait for paging widget
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.ID, 'actioninrun-data_previous'))
)
# Enter data using the RUN menu. Select one entry to populate
self.selenium.find_element_by_link_text("student01@bogus.com").click()
self.selenium.find_element_by_id("id____ontask___select_2").click()
self.selenium.find_element_by_id("id____ontask___select_2").clear()
self.selenium.find_element_by_id("id____ontask___select_2").send_keys(
"Carmelo Coton2")
self.selenium.find_element_by_id("id____ontask___select_3").click()
self.selenium.find_element_by_id("id____ontask___select_3").clear()
self.selenium.find_element_by_id("id____ontask___select_3").send_keys(
"xxx"
)
# Submit the data for one entry
self.selenium.find_element_by_xpath(
"//body/div[4]/div/form/button[1]/span").click()
# Wait for paging widget
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.ID, 'actioninrun-data_previous'))
)
# Go Back to the action table
self.selenium.find_element_by_xpath(
"//div[@id='table-content']/a"
).click()
# Wait for paging widget
self.wait_for_datatable('action-table_previous')
# Edit the action out
element = self.search_action('action_out')
element.find_element_by_link_text("Edit").click()
# Insert attribute
self.selenium.find_element_by_id("select-attribute-name").click()
Select(self.selenium.find_element_by_id(
"select-attribute-name")).select_by_visible_text("- Attribute -")
# Insert column name
self.selenium.find_element_by_id("select-column-name").click()
Select(self.selenium.find_element_by_id(
"select-column-name")).select_by_visible_text(symbols)
# Insert second column name
self.selenium.find_element_by_id("select-column-name").click()
Select(self.selenium.find_element_by_id(
"select-column-name")).select_by_visible_text(symbols + '2')
# Create new condition
self.create_condition(symbols + "4",
'',
[(symbols, "begins with", "C")])
# Create the filter
self.create_filter(symbols,
'',
[(symbols + "2", "doesn't begin with", "x")])
# Click the preview button
self.selenium.find_element_by_xpath(
"//div[@id='html-editor']/form/div[3]/button").click()
WebDriverWait(self.selenium, 10).until(
EC.element_to_be_clickable((By.CLASS_NAME, 'js-action-preview-nxt'))
)
# Certain name should be in the page now.
self.assertIn('Carmelo Coton', self.selenium.page_source)
# Click in the "Close" button
self.selenium.find_element_by_xpath(
"//div[@id='modal-item']/div/div/div/div[2]/button[2]").click()
# End of session
self.logout()
def test_02_symbols(self):
symbols = '!#$%&()*+,-./:;<=>?@[\]^_`{|}~'
# Login
self.login('instructor01@bogus.com')
# GO TO THE WORKFLOW PAGE
self.access_workflow_from_home_page('sss')
# Edit the email column
self.open_column_edit('email')
# Append symbols to the name
self.selenium.find_element_by_id("id_name").click()
self.selenium.find_element_by_id("id_name").send_keys(symbols)
# Save column information
self.selenium.find_element_by_xpath("//button[@type='submit']").click()
self.wait_close_modal_refresh_table('column-table_previous')
# Select the age column and click in the edit button
self.open_column_edit('age')
# Append symbols to the name
self.selenium.find_element_by_id("id_name").click()
self.selenium.find_element_by_id("id_name").send_keys(symbols)
# Save column information
self.selenium.find_element_by_xpath("//button[@type='submit']").click()
self.wait_close_modal_refresh_table('column-table_previous')
# Go to the table link
self.go_to_table()
# Verify that everything appears normally
self.assertIn(escape(symbols), self.selenium.page_source)
self.assertIn('<td class=" dt-center">12</td>',
self.selenium.page_source)
self.assertIn('<td class=" dt-center">12.1</td>',
self.selenium.page_source)
self.assertIn('<td class=" dt-center">13.2</td>',
self.selenium.page_source)
# Go to the actions page
self.go_to_actions()
# Edit the action-in at the top of the table
self.open_action_edit('action in')
# Set the correct values for an action-in
# Set the right columns to process
select = Select(self.selenium.find_element_by_id(
'select-key-column-name'
))
select.select_by_visible_text('email' + symbols)
# This wait is incorrect. Don't know how to wait for an AJAX call.
WebDriverWait(self.selenium, 10).until_not(
EC.visibility_of_element_located((By.ID, 'div-spinner'))
)
# Done editing the action in
self.selenium.find_element_by_link_text('Done').click()
self.wait_for_datatable('action-table_previous')
# Click in the run link
self.open_action_run('action in')
# Click on the first value
self.selenium.find_element_by_link_text("student01@bogus.com").click()
# Modify the value of the column
self.selenium.find_element_by_id("id____ontask___select_1").click()
self.selenium.find_element_by_id("id____ontask___select_1").clear()
self.selenium.find_element_by_id("id____ontask___select_1").send_keys(
"14"
)
# Submit changes to the first element
self.selenium.find_element_by_xpath(
"(//button[@name='submit'])[1]"
).click()
self.wait_for_datatable('actioninrun-data_previous')
# Click on the second value
self.selenium.find_element_by_link_text("student02@bogus.com").click()
# Modify the value of the column
self.selenium.find_element_by_id("id____ontask___select_1").clear()
self.selenium.find_element_by_id(
"id____ontask___select_1"
).send_keys("15")
# Submit changes to the second element
self.selenium.find_element_by_xpath(
"(//button[@name='submit'])[1]"
).click()
self.wait_for_datatable('actioninrun-data_previous')
# Click on the third value
self.selenium.find_element_by_link_text("student03@bogus.com").click()
# Modify the value of the column
self.selenium.find_element_by_id("id____ontask___select_1").click()
self.selenium.find_element_by_id("id____ontask___select_1").clear()
self.selenium.find_element_by_id(
"id____ontask___select_1"
).send_keys("16")
# Submit changes to the second element
self.selenium.find_element_by_xpath(
"(//button[@name='submit'])[1]"
).click()
self.wait_for_datatable('actioninrun-data_previous')
# Click in the back link!
self.selenium.find_element_by_link_text('Back').click()
self.wait_for_datatable('action-table_previous')
# Go to the table page
self.go_to_table()
# Assert the new values
self.assertIn('<td class=" dt-center">14</td>',
self.selenium.page_source)
self.assertIn('<td class=" dt-center">15</td>',
self.selenium.page_source)
self.assertIn('<td class=" dt-center">16</td>',
self.selenium.page_source)
# End of session
self.logout()
class DataopsExcelUpload(test.OntaskLiveTestCase):
fixtures = ['empty_wflow']
def tearDown(self):
pandas_db.delete_all_tables()
super(DataopsExcelUpload, self).tearDown()
def test_01_excelupload(self):
# Login
self.login('instructor01@bogus.com')
# GO TO THE WORKFLOW PAGE
self.access_workflow_from_home_page('wflow1', False)
# Go to Excel upload/merge
self.go_to_excel_upload_merge_step_1()
# Upload file
self.selenium.find_element_by_id("id_file").send_keys(
os.path.join(settings.BASE_DIR(),
'dataops',
'fixtures',
'excel_upload.xlsx')
)
self.selenium.find_element_by_id("id_sheet").click()
self.selenium.find_element_by_id("id_sheet").clear()
self.selenium.find_element_by_id("id_sheet").send_keys("results")
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.element_to_be_clickable(
(By.ID, 'checkAll'))
)
self.selenium.find_element_by_name("Submit").click()
self.wait_for_datatable('column-table_previous')
# The number of rows must be 29
wflow = Workflow.objects.all()[0]
self.assertEqual(wflow.nrows, 29)
self.assertEqual(wflow.ncols, 14)
# End of session
self.logout()
class DataopsExcelUploadSheet(test.OntaskLiveTestCase):
fixtures = ['empty_wflow']
def tearDown(self):
pandas_db.delete_all_tables()
super(DataopsExcelUploadSheet, self).tearDown()
def test_01_excelupload_sheet(self):
# Login
self.login('instructor01@bogus.com')
# GO TO THE WORKFLOW PAGE
self.access_workflow_from_home_page('wflow1', False)
# Go to Excel upload/merge
self.go_to_excel_upload_merge_step_1()
# Upload the file
self.selenium.find_element_by_id("id_file").send_keys(
os.path.join(settings.BASE_DIR(),
'dataops',
'fixtures',
'excel_upload.xlsx')
)
self.selenium.find_element_by_id("id_sheet").click()
self.selenium.find_element_by_id("id_sheet").clear()
self.selenium.find_element_by_id("id_sheet").send_keys("second sheet")
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.element_to_be_clickable(
(By.ID, 'checkAll'))
)
self.selenium.find_element_by_name("Submit").click()
self.wait_for_datatable('column-table_previous')
# The number of rows must be 19
wflow = Workflow.objects.all()[0]
self.assertEqual(wflow.nrows, 19)
self.assertEqual(wflow.ncols, 14)
# End of session
self.logout()
class DataopsNaNProcessing(test.OntaskLiveTestCase):
fixtures = ['empty_wflow']
action_text = "Bool1 = {{ bool1 }}\\n" + \
"Bool2 = {{ bool2 }}\\n" + \
"Bool3 = {{ bool3 }}\\n" + \
"{% if bool1 cond %}Bool 1 is true{% endif %}\\n" + \
"{% if bool2 cond %}Bool 2 is true{% endif %}\\n" + \
"{% if bool3 cond %}Bool 3 is true{% endif %}\\n"
def tearDown(self):
pandas_db.delete_all_tables()
super(DataopsNaNProcessing, self).tearDown()
def test_01_nan_manipulation(self):
# Login
self.login('instructor01@bogus.com')
self.create_new_workflow('NaN')
# Go to CSV Upload/Merge
self.selenium.find_element_by_xpath(
"//tbody/tr[1]/td[1]/a[1]"
).click()
WebDriverWait(self.selenium, 10).until(
EC.visibility_of_element_located(
(By.XPATH, "//form")
)
)
# Select file and upload
self.selenium.find_element_by_id("id_file").send_keys(
os.path.join(settings.BASE_DIR(),
'dataops',
'fixtures',
'test_df_merge_update_df1.csv')
)
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.text_to_be_present_in_element((By.CLASS_NAME, 'page-header'),
'Step 2: Select Columns')
)
# Submit
self.selenium.find_element_by_xpath(
"(//button[@name='Submit'])[2]"
).click()
self.wait_for_datatable('column-table_previous')
# Select again the upload/merge function
self.go_to_csv_upload_merge_step_1()
# Select the second file and submit
self.selenium.find_element_by_id("id_file").send_keys(
os.path.join(settings.BASE_DIR(),
'dataops',
'fixtures',
'test_df_merge_update_df2.csv')
)
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.text_to_be_present_in_element((By.CLASS_NAME, 'page-header'),
'Step 2: Select Columns')
)
# Select all the columns for upload
self.selenium.find_element_by_name("Submit").click()
# Wait for the upload/merge
WebDriverWait(self.selenium, 10).until(
EC.text_to_be_present_in_element(
(By.CLASS_NAME, 'page-header'),
'Step 3: Select Keys and Merge Option')
)
# Choose the default options for the merge (key and outer)
# Select the merger function type
select = Select(self.selenium.find_element_by_id('id_how_merge'))
select.select_by_value('outer')
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.text_to_be_present_in_element(
(By.CLASS_NAME, 'page-header'),
'Step 4: Review and confirm')
)
# Check the merge summary and proceed
self.selenium.find_element_by_name("Submit").click()
# Wait for the upload/merge to finish
self.wait_for_datatable('column-table_previous')
# Go to the actions page
self.go_to_actions()
# Create a new action
self.create_new_personalized_text_action("action out", '')
# Create three conditions
self.create_condition("bool1 cond", '', [('bool1', None, True)])
self.create_condition("bool 2 cond", '', [('bool2', None, True)])
self.create_condition('bool3 cond', '', [('bool3', None, True)])
# insert the action text
self.selenium.execute_script(
"""$('#id_content').summernote('editor.insertText',
"{0}");""".format(self.action_text)
)
# Click in the preview and circle around the 12 rows
self.open_browse_preview(11)
# End of session
self.logout()
class DataopsPluginExecution(test.OntaskLiveTestCase):
fixtures = ['plugin_execution']
filename = os.path.join(
settings.BASE_DIR(),
'dataops',
'fixtures',
'plugin_execution.sql'
)
def setUp(self):
super(DataopsPluginExecution, self).setUp()
pandas_db.pg_restore_table(self.filename)
def tearDown(self):
pandas_db.delete_all_tables()
super(DataopsPluginExecution, self).tearDown()
def test_01_first_plugin(self):
# Login
self.login('instructor01@bogus.com')
# GO TO THE WORKFLOW PAGE
self.access_workflow_from_home_page('Plugin test')
# Open the transform page
self.go_to_transform()
# Click in the first plugin
element = self.search_table_row_by_string('transform-table',
1,
'test_plugin_1')
element.find_element_by_link_text('Run').click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.NAME, 'csrfmiddlewaretoken'))
)
# Provide the execution data
self.selenium.find_element_by_xpath("//input[@type='text']").click()
self.selenium.find_element_by_name("columns").click()
self.selenium.find_element_by_xpath(
"(//input[@name='columns'])[2]"
).click()
# Click outside the SOL widget
self.selenium.find_element_by_id('div_id_merge_key').click()
self.selenium.find_element_by_id("id_merge_key").click()
Select(self.selenium.find_element_by_id(
"id_merge_key"
)).select_by_visible_text("email")
# Submit the execution
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.ID, 'plugin-execution-report'))
)
# Done. Click continue.
self.selenium.find_element_by_link_text('Continue').click()
self.wait_for_datatable('column-table_previous')
# Assert the content of the dataframe
wflow = Workflow.objects.get(name='Plugin test')
df = pandas_db.load_from_db(wflow.id)
self.assertTrue('RESULT 1' in set(df.columns))
self.assertTrue('RESULT 2' in set(df.columns))
self.assertTrue(all([x == 1 for x in df['RESULT 1']]))
self.assertTrue(all([x == 2 for x in df['RESULT 2']]))
# Second execution, this time adding a suffix to the column
self.go_to_transform()
# Click in the first plugin
element = self.search_table_row_by_string('transform-table',
1,
'test_plugin_1')
element.find_element_by_link_text('Run').click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.NAME, 'csrfmiddlewaretoken'))
)
# Provide the execution data
self.selenium.find_element_by_xpath("//input[@type='text']").click()
self.selenium.find_element_by_name("columns").click()
self.selenium.find_element_by_xpath(
"(//input[@name='columns'])[2]"
).click()
# Click outside the SOL widget
self.selenium.find_element_by_class_name(
'sol-current-selection'
).click()
self.selenium.find_element_by_id("id_merge_key").click()
Select(self.selenium.find_element_by_id(
"id_merge_key"
)).select_by_visible_text("email")
# Put the suffix _2
self.selenium.find_element_by_id("id_out_column_suffix").click()
self.selenium.find_element_by_id("id_out_column_suffix").clear()
self.selenium.find_element_by_id("id_out_column_suffix").send_keys("_2")
# Submit the execution
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.ID, 'plugin-execution-report'))
)
# Done. Click continue.
self.selenium.find_element_by_link_text('Continue').click()
self.wait_for_datatable('column-table_previous')
# Assert the content of the dataframe
wflow = Workflow.objects.get(name='Plugin test')
df = pandas_db.load_from_db(wflow.id)
self.assertTrue('RESULT 1_2' in set(df.columns))
self.assertTrue('RESULT 2_2' in set(df.columns))
self.assertTrue(all([x == 1 for x in df['RESULT 1_2']]))
self.assertTrue(all([x == 2 for x in df['RESULT 2_2']]))
# End of session
self.logout()
def test_02_second_plugin(self):
# Login
self.login('instructor01@bogus.com')
# GO TO THE WORKFLOW PAGE
self.access_workflow_from_home_page('Plugin test')
# Open the transform page
self.go_to_transform()
# Click in the second plugin
# Click in the first plugin
element = self.search_table_row_by_string('transform-table',
1,
'test_plugin_2')
element.find_element_by_link_text('Run').click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.NAME, 'csrfmiddlewaretoken'))
)
# Provide the execution data
self.selenium.find_element_by_id("id_merge_key").click()
Select(self.selenium.find_element_by_id(
"id_merge_key"
)).select_by_visible_text("email")
# Submit the execution
self.selenium.find_element_by_name("Submit").click()
WebDriverWait(self.selenium, 10).until(
EC.presence_of_element_located((By.ID, 'plugin-execution-report'))
)
# Done. Click continue.
self.selenium.find_element_by_link_text('Continue').click()
self.wait_for_datatable('column-table_previous')
# Assert the content of the dataframe
wflow = Workflow.objects.get(name='Plugin test')
df = pandas_db.load_from_db(wflow.id)
self.assertTrue('RESULT 3' in set(df.columns))
self.assertTrue('RESULT 4' in set(df.columns))
self.assertTrue(df['RESULT 3'].equals(df['A1'] + df['A2']))
self.assertTrue(df['RESULT 4'].equals(df['A1'] - df['A2']))
# End of session
self.logout()
| 37.325676
| 80
| 0.616632
| 3,377
| 27,621
| 4.762511
| 0.100977
| 0.104458
| 0.089722
| 0.151589
| 0.787229
| 0.752347
| 0.729901
| 0.712554
| 0.67817
| 0.633775
| 0
| 0.010932
| 0.268129
| 27,621
| 739
| 81
| 37.376184
| 0.784665
| 0.13211
| 0
| 0.606452
| 0
| 0.004301
| 0.175161
| 0.084172
| 0
| 0
| 0
| 0
| 0.058065
| 1
| 0.030108
| false
| 0
| 0.027957
| 0
| 0.086022
| 0.002151
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7fdfacb11ca13d24468cdbe6cbc2f902dee9070e
| 657
|
py
|
Python
|
src/othello/stone.py
|
upnt/othello-python
|
1b7d73006ef25b6d52f7d2b1a42e90e9d99521ee
|
[
"Apache-2.0"
] | null | null | null |
src/othello/stone.py
|
upnt/othello-python
|
1b7d73006ef25b6d52f7d2b1a42e90e9d99521ee
|
[
"Apache-2.0"
] | null | null | null |
src/othello/stone.py
|
upnt/othello-python
|
1b7d73006ef25b6d52f7d2b1a42e90e9d99521ee
|
[
"Apache-2.0"
] | null | null | null |
from enum import Enum, auto
class Stone:
def __init__(self, color):
self.__color = color
@property
def color(self):
return self.__color
def reverse(self):
if self.__color == Color.BLACK:
self.__color = Color.WHITE
return
if self.__color == Color.WHITE:
self.__color = Color.BLACK
return
def __str__(self):
if self.__color == Color.BLACK:
return '○'
if self.__color == Color.WHITE:
return '●'
class Candidate:
def __str__(self):
return '・'
class Color(Enum):
BLACK = auto()
WHITE = auto()
| 18.771429
| 39
| 0.543379
| 75
| 657
| 4.426667
| 0.28
| 0.243976
| 0.295181
| 0.192771
| 0.463855
| 0.150602
| 0
| 0
| 0
| 0
| 0
| 0
| 0.354642
| 657
| 34
| 40
| 19.323529
| 0.775943
| 0
| 0
| 0.32
| 0
| 0
| 0.004566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.04
| 0.08
| 0.68
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
7feecf1607edb440c280da99f884666d58d662cf
| 226
|
py
|
Python
|
tournaments/forms.py
|
JonnyFunFun/pyParty
|
972690881c88d27139b076d2acb50b69275759b8
|
[
"MIT"
] | 1
|
2017-03-23T18:07:33.000Z
|
2017-03-23T18:07:33.000Z
|
tournaments/forms.py
|
JonnyFunFun/pyParty
|
972690881c88d27139b076d2acb50b69275759b8
|
[
"MIT"
] | null | null | null |
tournaments/forms.py
|
JonnyFunFun/pyParty
|
972690881c88d27139b076d2acb50b69275759b8
|
[
"MIT"
] | null | null | null |
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit, Field
from crispy_forms.bootstrap import FormActions, TabHolder, Tab
from admin.settings import get_setting
| 28.25
| 62
| 0.849558
| 32
| 226
| 5.875
| 0.5625
| 0.159574
| 0.239362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115044
| 226
| 7
| 63
| 32.285714
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3d225c37053121a964037f87b164258f226d70e6
| 234
|
py
|
Python
|
encryption/lib/ope_encrypt_String.py
|
dongy6/type-inference
|
90d002a1e2d0a3d160ab204084da9d5be5fdd971
|
[
"Apache-2.0"
] | 1
|
2019-12-07T16:13:03.000Z
|
2019-12-07T16:13:03.000Z
|
encryption/lib/ope_encrypt_String.py
|
dongy6/type-inference
|
90d002a1e2d0a3d160ab204084da9d5be5fdd971
|
[
"Apache-2.0"
] | null | null | null |
encryption/lib/ope_encrypt_String.py
|
dongy6/type-inference
|
90d002a1e2d0a3d160ab204084da9d5be5fdd971
|
[
"Apache-2.0"
] | null | null | null |
import sys
from pyope.ope import OPE, ValueRange
cipher = OPE(b'key goes here' * 2, in_range=ValueRange(-2**31, 2**31-1), out_range=ValueRange(-2**53, 2**53-1))
input = list(sys.argv[1])
for c in input:
print cipher.encrypt(ord(c))
| 29.25
| 111
| 0.696581
| 45
| 234
| 3.577778
| 0.577778
| 0.186335
| 0.198758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078049
| 0.123932
| 234
| 7
| 112
| 33.428571
| 0.707317
| 0
| 0
| 0
| 0
| 0
| 0.055794
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.166667
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3d348fdac33a12fa640bce45eff3efaa9194f380
| 184
|
py
|
Python
|
fisswrapper/fisswrapper.py
|
cbirger/firecloud-ws-stats
|
03963a2eadcb26940ee542339cb10ae12c002bde
|
[
"MIT"
] | null | null | null |
fisswrapper/fisswrapper.py
|
cbirger/firecloud-ws-stats
|
03963a2eadcb26940ee542339cb10ae12c002bde
|
[
"MIT"
] | null | null | null |
fisswrapper/fisswrapper.py
|
cbirger/firecloud-ws-stats
|
03963a2eadcb26940ee542339cb10ae12c002bde
|
[
"MIT"
] | null | null | null |
from firecloud import fiss
def list_workspaces(project=project):
class MyInputParams(object):
project=project
args=MyInputParams()
return (fiss.space_list(args))
| 20.444444
| 37
| 0.728261
| 21
| 184
| 6.285714
| 0.666667
| 0.212121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184783
| 184
| 8
| 38
| 23
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
3d4bce0bffa9c1a6439b639ba0d48bcff5157858
| 446
|
py
|
Python
|
servicer/image_servicer/image_servicer.py
|
kkd1510/python_grpc_base
|
ceef5bcf6c6428437a07cba115c7df1c38a21526
|
[
"MIT"
] | null | null | null |
servicer/image_servicer/image_servicer.py
|
kkd1510/python_grpc_base
|
ceef5bcf6c6428437a07cba115c7df1c38a21526
|
[
"MIT"
] | null | null | null |
servicer/image_servicer/image_servicer.py
|
kkd1510/python_grpc_base
|
ceef5bcf6c6428437a07cba115c7df1c38a21526
|
[
"MIT"
] | null | null | null |
import logging
import api.space_telescope_pb2
import api.space_telescope_pb2_grpc
from servicer.image_servicer.search import ImageSearch
from google.protobuf import json_format
logger = logging.getLogger(__name__)
class ImageServicer(api.space_telescope_pb2_grpc.ImageServicer):
def GetImageInformation(self, request, context):
image = ImageSearch().search(request.id)
return api.space_telescope_pb2.ImageReply(**image)
| 26.235294
| 64
| 0.807175
| 55
| 446
| 6.254545
| 0.527273
| 0.093023
| 0.197674
| 0.232558
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.121076
| 446
| 16
| 65
| 27.875
| 0.867347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.5
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e9f35d7bf3b34d604ee409832b159bbb01f1a9d1
| 104,869
|
py
|
Python
|
backend/api_ien.py
|
tec-csf/tc3041-t1-primavera-2020-equipo7
|
2d7f82b5df072ccc556a467fede668335ee6973a
|
[
"Apache-2.0"
] | null | null | null |
backend/api_ien.py
|
tec-csf/tc3041-t1-primavera-2020-equipo7
|
2d7f82b5df072ccc556a467fede668335ee6973a
|
[
"Apache-2.0"
] | null | null | null |
backend/api_ien.py
|
tec-csf/tc3041-t1-primavera-2020-equipo7
|
2d7f82b5df072ccc556a467fede668335ee6973a
|
[
"Apache-2.0"
] | null | null | null |
#API using flask and a db2 connection
"""
Equipo 7:
Roberto Gervacio
Alejandra Nissan
Isaac Harari
Yann Le Lorier
"""
from flask import Flask, jsonify, redirect, request, url_for, make_response #request handler, getting url in a function
from flask_db2 import DB2
import locale, time
from flask_cors import CORS #pip3 install flask-cors
from datetime import timedelta, datetime
app = Flask(__name__)
CORS(app)
#CORS(app, resources={r"/*": {"origins": "*"}})
app.config['DB2_DATABASE']= "BLUDB"#'testdb'
app.config['DB2_HOSTNAME']="dashdb-txn-sbox-yp-dal09-03.services.dal.bluemix.net"
app.config['DB2_PORT']='50000'
app.config['DB2_PROTOCOL']='TCPIP'
app.config['DB2_USER']="bxj73519"
app.config['DB2_PASSWORD']="6p^7wjwws1pbg79l"
db = DB2(app)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
################################################# FUNCTIONS ######################################################
################################################################
# ELECCION
################################################################
@app.route('/elecciones/', methods=['GET', 'POST'])
def all_eleccion():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_eleccion = request.get_json()
fecha_eleccion_inicio = dict_new_eleccion['fecha_inicio'][:10]
fecha_eleccion_final = dict_new_eleccion['fecha_fin'][:10]
descripcion = dict_new_eleccion['descripcion']
tipo = "Municipal" if dict_new_eleccion['tipo_elecciones'] == 'm' else "Federal"
insert_command = "INSERT INTO eleccion (fecha_eleccion_inicio, fecha_eleccion_final, descripcion, tipo) VALUES ('{}', '{}', '{}', '{}')".format(fecha_eleccion_inicio,
fecha_eleccion_final,
descripcion,
tipo)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
cur.close()
return res
else:
show_command = "SELECT id_eleccion, descripcion, tipo, fecha_eleccion_inicio, fecha_eleccion_final FROM ELECCION"
cur.execute(show_command)
elecciones = cur.fetchall()
elecciones_list = []
for eleccion in elecciones:
elecciones_list.append(
{"id": eleccion[0],
"descripcion": eleccion[1],
"tipo": eleccion[2],
"fecha_inicio": eleccion[3],
"fecha_final": eleccion[4]
}
)
cur.close()
return jsonify(elecciones_list)
@app.route('/elecciones/<int:id_eleccion>/', methods=['GET', 'POST', 'DELETE'])
def one_eleccion(id_eleccion):
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_eleccion = request.get_json()
descripcion = dict_new_eleccion['descripcion']
update_command = "UPDATE eleccion SET descripcion='{}' WHERE id_eleccion = {}".format(descripcion,
id_eleccion)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
cur.close()
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM eleccion WHERE id_eleccion={}".format(id_eleccion)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
cur.close()
return res
else: # method == GET
elecciones_list = []
show_command = "SELECT id_eleccion, fecha_eleccion_inicio, fecha_eleccion_final, descripcion, tipo, sys_eleccion_inicio, sys_eleccion_final, trans_id_eleccion FROM ELECCION WHERE id_eleccion={}".format(id_eleccion)
show_command_hist = "SELECT id_eleccion, fecha_eleccion_inicio, fecha_eleccion_final, descripcion, tipo, sys_eleccion_inicio, sys_eleccion_final, trans_id_eleccion FROM hist_eleccion WHERE id_eleccion={}".format(id_eleccion)
cur.execute(show_command)
elecciones = cur.fetchall()
for eleccion in elecciones:
elecciones_list.append(
{
"id": eleccion[0],
"fecha_inicio": eleccion[1],
"fecha_final": eleccion[2],
"descripcion": eleccion[3],
"tipo": eleccion[4],
"sys_inicio": eleccion[5],
"sys_final": eleccion[6],
"trans_id": eleccion[7]
}
)
cur.execute(show_command_hist)
elecciones_hist = cur.fetchall()
for i in range(len(elecciones_hist)-1,-1,-1):
elecciones_list.append(
{"id": elecciones_hist[i][0],
"fecha_inicio": elecciones_hist[i][1],
"fecha_final": elecciones_hist[i][2],
"descripcion": elecciones_hist[i][3],
"tipo": elecciones_hist[i][4],
"sys_inicio": elecciones_hist[i][5],
"sys_final": elecciones_hist[i][6],
"trans_id": elecciones_hist[i][7]
}
)
cur.close()
return jsonify(elecciones_list)
################################################################
# ELECCION - PERIOD LOOKUP
################################################################
@app.route('/elecciones/periodos/', methods=['POST'])
def one_period():
cur = db.connection.cursor()
dict_periodo = request.get_json()
fecha_inicio = dict_periodo['fecha_inicio'][:10]
fecha_final = dict_periodo['fecha_final'][:10]
intervalo = dict_periodo['intervalo']
# Si verdadero, es inclusivo que se usa el between si es falso es exclusivo y se usa from to
period_query = "SELECT id_eleccion, descripcion, tipo, ELECCION.fecha_eleccion_inicio, ELECCION.fecha_eleccion_final FROM ELECCION FOR BUSINESS_TIME BETWEEN '{}' AND '{}'".format(fecha_inicio, fecha_final) if intervalo else "SELECT id_eleccion, descripcion, tipo, ELECCION.fecha_eleccion_inicio, ELECCION.fecha_eleccion_final FROM ELECCION FOR BUSINESS_TIME FROM '{}' TO '{}'".format(fecha_inicio, fecha_final)
cur.execute(period_query)
periodos = cur.fetchall()
periodos_list = []
for periodo in periodos:
periodos_list.append(
{"id": periodo[0],
"descripcion": periodo[1],
"tipo": periodo[2],
"fecha_inicio": periodo[3],
"fecha_final": periodo[4]
}
)
return jsonify(periodos_list)
@app.route('/elecciones/fecha_ex/', methods=['POST'])
def one_date():
cur = db.connection.cursor()
dict_periodo = request.get_json()
fecha = dict_periodo['fecha'][:10]
date_query = "SELECT id_eleccion, descripcion, tipo, ELECCION.fecha_eleccion_inicio, ELECCION.fecha_eleccion_final FROM ELECCION FOR BUSINESS_TIME AS OF '{}'".format(fecha)
cur.execute(date_query)
fechas = cur.fetchall()
fechas_list = []
for fecha in fechas:
fechas_list.append(
{"id": fecha[0],
"descripcion": fecha[1],
"tipo": fecha[2],
"fecha_inicio": fecha[3],
"fecha_final": fecha[4]
}
)
return jsonify(fechas_list)
################################################################
# COLEGIO
################################################################
@app.route('/colegios/', methods=["GET", "POST"])
def all_colegio():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_colegio = request.get_json()
direccion = dict_new_colegio['direccion']
id_eleccion = dict_new_colegio['id_eleccion']
get_eleccion_of_colegio_command = "SELECT fecha_eleccion_inicio, fecha_eleccion_final FROM ELECCION WHERE id_eleccion={}".format(id_eleccion)
cur.execute(get_eleccion_of_colegio_command)
eleccion = cur.fetchall()[0]
fecha_colegio_inicio = eleccion[0]
fecha_colegio_final = eleccion[1]
insert_command = "INSERT INTO colegio (fecha_colegio_inicio, fecha_colegio_final, id_colegio_eleccion, direccion) VALUES ('{}', '{}', {}, '{}');".format(fecha_colegio_inicio,
fecha_colegio_final,
id_eleccion,
direccion)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
cur.close()
return res
else:
select_colegios_command = "SELECT id_colegio, fecha_colegio_inicio, fecha_colegio_final, direccion, id_colegio_eleccion FROM COLEGIO"
cur.execute(select_colegios_command)
colegios = cur.fetchall()
colegios_list = []
for colegio in colegios:
id_colegio_eleccion = colegio[4]
get_eleccion_of_colegio_command = "SELECT descripcion FROM ELECCION WHERE id_eleccion={}".format(id_colegio_eleccion)
cur.execute(get_eleccion_of_colegio_command)
eleccion = cur.fetchall()[0]
colegios_list.append(
{
"id": colegio[0],
"fecha_inicio": colegio[1],
"fecha_final": colegio[2],
"direccion": colegio[3],
"descripcion_eleccion": eleccion[0],
"id_eleccion": id_colegio_eleccion,
# [primera tupla] [primer elemento]
}
)
cur.close()
return jsonify(colegios_list)
@app.route('/colegios/<int:id_colegio>/', methods=['GET', 'POST', 'DELETE'])
def one_colegio(id_colegio):
cur = db.connection.cursor()
if request.method == 'POST':
# Agarras lo que te pasan del front
dict_new_colegio = request.get_json()
direccion = dict_new_colegio['direccion']
id_eleccion = dict_new_colegio['id_eleccion']
# Agarras la eleccion
get_eleccion_of_colegio_command = "SELECT fecha_eleccion_inicio, fecha_eleccion_final FROM ELECCION WHERE id_eleccion={}".format(id_eleccion)
cur.execute(get_eleccion_of_colegio_command)
eleccion = cur.fetchall()[0]
# Obtienes las nuevas (no a la fuerza deben de ser nuevas, pueden ser las mismas) fechas
fecha_colegio_inicio = eleccion[0]
fecha_colegio_final = eleccion[1]
update_command = "UPDATE colegio SET fecha_colegio_inicio='{}', fecha_colegio_final='{}', id_colegio_eleccion={}, direccion='{}' WHERE id_colegio = {}".format(fecha_colegio_inicio,
fecha_colegio_final,
id_eleccion,
direccion,
id_colegio)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
cur.close()
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM colegio WHERE id_colegio = {}".format(id_colegio)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
cur.close()
return res
else:
colegios_list = []
show_command = "SELECT id_colegio, fecha_colegio_inicio, fecha_colegio_final, id_colegio_eleccion, direccion, sys_colegio_inicio, sys_colegio_final, trans_id_colegio FROM colegio WHERE id_colegio={}".format(id_colegio)
show_command_hist = "SELECT id_colegio, fecha_colegio_inicio, fecha_colegio_final, id_colegio_eleccion, direccion, sys_colegio_inicio, sys_colegio_final, trans_id_colegio FROM hist_colegio WHERE id_colegio={}".format(id_colegio)
cur.execute(show_command)
colegios = cur.fetchall()
for colegio in colegios:
id_colegio_eleccion = colegio[3]
get_eleccion_of_colegio_command = "SELECT descripcion FROM ELECCION WHERE id_eleccion={}".format(id_colegio_eleccion)
cur.execute(get_eleccion_of_colegio_command)
eleccion = cur.fetchall()[0]
colegios_list.append(
{"id": colegio[0],
"fecha_inicio": colegio[1],
"fecha_final": colegio[2],
"direccion": colegio[4],
"sys_inicio": colegio[5],
"sys_final": colegio[6],
"trans_id": colegio[7],
"id_eleccion": id_colegio_eleccion,
"descripcion_eleccion": eleccion[0]
}
)
cur.execute(show_command_hist)
colegios_hist = cur.fetchall()
for i in range(len(colegios_hist)-1,-1,-1):
id_colegio_eleccion = colegios_hist[i][3]
get_eleccion_of_colegio_command = "SELECT descripcion FROM ELECCION WHERE id_eleccion={}".format(id_colegio_eleccion)
cur.execute(get_eleccion_of_colegio_command)
eleccion = cur.fetchall()[0]
colegios_list.append(
{"id": colegios_hist[i][0],
"fecha_inicio": colegios_hist[i][1],
"fecha_final": colegios_hist[i][2],
"direccion": colegios_hist[i][4],
"sys_inicio": colegios_hist[i][5],
"sys_final": colegios_hist[i][6],
"trans_id": colegios_hist[i][7],
"id_eleccion": id_colegio_eleccion,
"descripcion_eleccion": eleccion[0]
}
)
return jsonify(colegios_list)
################################################################
# MESA
################################################################
@app.route('/mesas/', methods=['GET', 'POST'])
def all_mesas():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_colegio = request.get_json()
letra = dict_new_colegio['letra']
id_colegio = dict_new_colegio['id_colegio']
get_colegio_of_mesa_command = "SELECT fecha_colegio_inicio, fecha_colegio_final FROM colegio WHERE id_colegio={}".format(id_colegio)
cur.execute(get_colegio_of_mesa_command)
colegio = cur.fetchall()[0]
fecha_mesa_inicio = colegio[0]
fecha_mesa_final = colegio[1]
insert_command = "INSERT INTO mesa (fecha_mesa_inicio, fecha_mesa_final, id_mesa_colegio, letra) VALUES ('{}', '{}', {}, '{}');".format(fecha_mesa_inicio,
fecha_mesa_final,
id_colegio,
letra)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
show_command = "select id_mesa, fecha_mesa_inicio, fecha_mesa_final, letra, id_colegio, descripcion from mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion"
cur.execute(show_command)
mesas = cur.fetchall()
mesas_list = []
for mesa in mesas:
mesas_list.append(
{"id": mesa[0],
"fecha_inicio": mesa[1],
"fecha_final": mesa[2],
"letra": mesa[3],
"id_colegio": mesa[4],
"descripcion_eleccion": mesa[5]
}
)
return jsonify(mesas_list)
@app.route('/mesas/<int:id_mesa>/', methods=['GET', 'POST', 'DELETE'])
def one_mesa(id_mesa):
cur = db.connection.cursor()
if request.method == 'POST':
# Agarras lo que te pasan del front
dict_new_mesa = request.get_json()
letra = dict_new_mesa['letra']
id_colegio = dict_new_mesa['id_colegio']
# Agarras el colegio
get_colegio_of_mesa_command = "SELECT fecha_colegio_inicio, fecha_colegio_final FROM colegio WHERE id_colegio={}".format(id_colegio)
cur.execute(get_colegio_of_mesa_command)
colegio = cur.fetchall()[0]
# Obtienes las nuevas (no a la fuerza deben de ser nuevas, pueden ser las mismas) fechas
fecha_mesa_inicio = colegio[0]
fecha_mesa_final = colegio[1]
update_command = "UPDATE mesa SET fecha_mesa_inicio='{}', fecha_mesa_final='{}', id_mesa_colegio='{}', letra='{}' WHERE id_mesa = '{}'".format(fecha_mesa_inicio,
fecha_mesa_final,
id_colegio,
letra,
id_mesa)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM mesa WHERE id_mesa = {}".format(id_mesa)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
mesas_list = []
show_command = "select id_mesa, letra, fecha_mesa_inicio, fecha_mesa_final, sys_mesa_inicio, sys_mesa_final, trans_id_mesa, id_colegio, descripcion, id_eleccion from mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion where mesa.id_mesa={}".format(id_mesa)
show_command_hist = "select id_mesa, letra, fecha_mesa_inicio, fecha_mesa_final, sys_mesa_inicio, sys_mesa_final, trans_id_mesa, id_colegio, descripcion, id_eleccion from hist_mesa inner join colegio on hist_mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion where hist_mesa.id_mesa={}".format(id_mesa)
cur.execute(show_command)
mesas = cur.fetchall()
for mesa in mesas:
mesas_list.append(
{"id": mesa[0],
"letra": mesa[1],
"fecha_inicio": mesa[2],
"fecha_final": mesa[3],
"sys_inicio": mesa[4],
"sys_final": mesa[5],
"trans_id": mesa[6],
"id_colegio": mesa[7],
"descripcion_eleccion": mesa[8],
"id_eleccion": mesa[9]
}
)
cur.execute(show_command_hist)
mesas_hist = cur.fetchall()
for i in range( len(mesas_hist)-1 ,-1,-1):
mesas_list.append(
{"id": mesas_hist[i][0],
"letra": mesas_hist[i][1],
"fecha_inicio": mesas_hist[i][2],
"fecha_final": mesas_hist[i][3],
"sys_inicio": mesas_hist[i][4],
"sys_final": mesas_hist[i][5],
"trans_id": mesas_hist[i][6],
"id_colegio": mesas_hist[i][7],
"descripcion_eleccion": mesas_hist[i][8],
"id_eleccion": mesas_hist[i][9]
}
)
return jsonify(mesas_list)
################################################################
# PARTIDO
################################################################
@app.route('/partidos/', methods=["GET", "POST"])
def all_partido():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_partido = request.get_json()
siglas = dict_new_partido['siglas']
nombre = dict_new_partido['nombre']
presidente = dict_new_partido['presidente']
fecha_inicio = dict_new_partido['fecha_inicio'][:10]
# Se calcula la fecha final = fecha inicio + 6 anios
fecha_final = datetime.strptime(fecha_inicio, '%Y-%m-%d') + timedelta(weeks=52*6)
fecha_final = fecha_final.strftime('%Y-%m-%d')[:10]
insert_command = "INSERT INTO PARTIDO (siglas, nombre, presidente, fecha_partido_inicio, fecha_partido_final) VALUES ('{}', '{}', '{}', '{}', '{}');".format(siglas,
nombre,
presidente,
fecha_inicio,
fecha_final)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
show_command = "SELECT siglas, nombre, presidente, fecha_partido_inicio, fecha_partido_final FROM PARTIDO"
cur.execute(show_command)
partidos = cur.fetchall()
partidos_list = []
for partido in partidos:
partidos_list.append(
{"siglas": partido[0],
"nombre": partido[1],
"presidente": partido[2],
"fecha_inicio": partido[3],
"fecha_final": partido[4],
}
)
return jsonify(partidos_list)
@app.route('/partidos/<siglas>/', methods=['GET', 'POST', 'DELETE'])
def one_partido(siglas):
cur = db.connection.cursor()
if request.method == 'POST':
# Agarras lo que te pasan del front
dict_new_partido = request.get_json()
new_siglas = dict_new_partido['siglas']
nombre = dict_new_partido['nombre']
presidente = dict_new_partido['presidente']
fecha_ini = dict_new_partido['fecha_inicio'][:10]
fecha_inicio = datetime.strptime(fecha_ini, '%Y-%m-%d')
update_command = "UPDATE partido SET siglas='{}', nombre='{}', presidente='{}' WHERE siglas='{}' AND fecha_partido_inicio='{}'".format(new_siglas,
nombre,
presidente,
siglas,
fecha_inicio
)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM partido WHERE siglas='{}'".format(siglas)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
partidos_list = []
show_command = "SELECT siglas, nombre, presidente, fecha_partido_inicio, fecha_partido_final, sys_partido_inicio, sys_partido_final, trans_id_partido from partido where siglas='{}'".format(siglas)
show_command_hist = "SELECT siglas, nombre, presidente, fecha_partido_inicio, fecha_partido_final, sys_partido_inicio, sys_partido_final, trans_id_partido from hist_partido where siglas='{}'".format(siglas)
cur.execute(show_command)
partidos = cur.fetchall()
for partido in partidos:
partidos_list.append(
{"siglas": partido[0],
"nombre": partido[1],
"presidente": partido[2],
"fecha_inicio": partido[3],
"fecha_final": partido[4],
"sys_inicio": partido[5],
"sys_final": partido[6],
"trans_id": partido[7],
}
)
cur.execute(show_command_hist)
partidos_hist = cur.fetchall()
for i in range( len(partidos_hist)-1 ,-1,-1):
partidos_list.append(
{"siglas": partidos_hist[i][0],
"nombre": partidos_hist[i][1],
"presidente": partidos_hist[i][2],
"fecha_inicio": partidos_hist[i][3],
"fecha_final": partidos_hist[i][4],
"sys_inicio": partidos_hist[i][5],
"sys_final": partidos_hist[i][6],
"trans_id": partidos_hist[i][7],
}
)
return jsonify(partidos_list)
################################################################
# APODERADOS LISTA
################################################################
@app.route('/apoderados/', methods=['GET', 'POST'])
def all_apod_lista():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_apoderado = request.get_json()
ife_pas = dict_new_apoderado['id']
fecha_nac = dict_new_apoderado['fecha_nac'][:10]
direccion = dict_new_apoderado['direccion']
nombre = dict_new_apoderado['nombre']
orden = dict_new_apoderado['orden']
siglas = dict_new_apoderado['siglas']
get_partido = "SELECT fecha_partido_inicio, fecha_partido_final FROM PARTIDO WHERE siglas='{}'".format(siglas)
cur.execute(get_partido)
partido = cur.fetchall()[0]
fecha_partido_inicio = partido[0]
fecha_partido_final = partido[1]
insert_command = "INSERT INTO apod_lista (ife_pasaporte, fecha_nac, direccion, nombre, orden, fecha_apod_lista_inicio, fecha_apod_lista_final, siglas) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}')".format(ife_pas,
fecha_nac,
direccion,
nombre,
orden,
fecha_partido_inicio,
fecha_partido_final,
siglas)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
show_command = "SELECT ife_pasaporte, fecha_nac, direccion, nombre, orden, fecha_apod_lista_inicio, fecha_apod_lista_final, siglas FROM APOD_LISTA"
cur.execute(show_command)
apoderados = cur.fetchall()
apoderados_list = []
for apod in apoderados:
apoderados_list.append(
{"id": apod[0],
"fecha_nac": apod[1],
"direccion": apod[2],
"nombre": apod[3],
"orden": apod[4],
"fecha_inicio": apod[5],
"fecha_final": apod[6],
"siglas": apod[7],
}
)
return jsonify(apoderados_list)
@app.route('/apoderados/<ife_pasaporte>/', methods=['GET', 'POST', 'DELETE'])
def one_apod_lista(ife_pasaporte):
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_apod = request.get_json()
new_ife_pas = dict_new_apod['id']
fecha_nac = dict_new_apod['fecha_nac'][:10]
direccion = dict_new_apod['direccion']
nombre = dict_new_apod['nombre']
orden = dict_new_apod['orden']
siglas = dict_new_apod['siglas']
get_partido = "SELECT fecha_partido_inicio, fecha_partido_final FROM PARTIDO WHERE siglas='{}'".format(siglas)
cur.execute(get_partido)
partido = cur.fetchall()[0]
fecha_apod_lista_inicio = partido[0]
fecha_apod_lista_final = partido[1]
update_command = "UPDATE apod_lista SET ife_pasaporte='{}', fecha_nac='{}', direccion='{}', nombre='{}', orden={}, fecha_apod_lista_inicio='{}', fecha_apod_lista_final='{}', siglas='{}' WHERE ife_pasaporte='{}'".format(new_ife_pas,
fecha_nac,
direccion,
nombre,
orden,
fecha_apod_lista_inicio,
fecha_apod_lista_final,
siglas,
ife_pasaporte)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM apod_lista WHERE ife_pasaporte='{}'".format(ife_pasaporte)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
apoderados_list = []
show_command = "SELECT ife_pasaporte, fecha_nac, direccion, nombre, orden, fecha_apod_lista_inicio, fecha_apod_lista_final, siglas, sys_apod_lista_inicio, sys_apod_lista_final, trans_id_apod_lista FROM APOD_LISTA where ife_pasaporte='{}'".format(ife_pasaporte)
show_command_hist = "SELECT ife_pasaporte, fecha_nac, direccion, nombre, orden, fecha_apod_lista_inicio, fecha_apod_lista_final, siglas, sys_apod_lista_inicio, sys_apod_lista_final, trans_id_apod_lista FROM HIST_APOD_LISTA where ife_pasaporte='{}'".format(ife_pasaporte)
cur.execute(show_command)
apoderados = cur.fetchall()
for apoderado in apoderados:
apoderados_list.append(
{"id": apoderado[0],
"fecha_nac": apoderado[1],
"direccion": apoderado[2],
"nombre": apoderado[3],
"orden": apoderado[4],
"fecha_inicio": apoderado[5],
"fecha_final": apoderado[6],
"siglas": apoderado[7],
"sys_inicio": apoderado[8],
"sys_final": apoderado[9],
"trans_id": apoderado[10],
}
)
cur.execute(show_command_hist)
apoderados_hist = cur.fetchall()
for i in range( len(apoderados_hist)-1 ,-1,-1):
apoderados_list.append(
{"id": apoderados_hist[i][0],
"fecha_nac": apoderados_hist[i][1],
"direccion": apoderados_hist[i][2],
"nombre": apoderados_hist[i][3],
"orden": apoderados_hist[i][4],
"fecha_inicio": apoderados_hist[i][5],
"fecha_final": apoderados_hist[i][6],
"siglas": apoderados_hist[i][7],
"sys_inicio": apoderados_hist[i][8],
"sys_final": apoderados_hist[i][9],
"trans_id": apoderados_hist[i][10],
}
)
return jsonify(apoderados_list)
################################################################
# VOTOS FEDERALES
################################################################
@app.route('/votosfederales/', methods=["GET","POST"])
def all_votosF():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_voto_f = request.get_json()
id_mesa = dict_new_voto_f['id_mesa']
siglas = dict_new_voto_f['siglas']
tipo_voto = dict_new_voto_f['tipo_voto']
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
get_partido = "SELECT fecha_partido_inicio, fecha_partido_final FROM PARTIDO WHERE siglas='{}'".format(siglas)
cur.execute(get_partido)
partido = cur.fetchall()[0]
fecha_partido_inicio = partido[0]
fecha_partido_final = partido[1]
insert_command = "INSERT INTO v_federal (id_mesa, fecha_mesa_inicio, fecha_mesa_final, fecha_partido_inicio, fecha_partido_final, siglas, tipo_voto) VALUES ({}, '{}', '{}', '{}', '{}', '{}', {});".format(id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
fecha_partido_inicio,
fecha_partido_final,
siglas,
tipo_voto)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
cur = db.connection.cursor()
show_command = "select id_v_federal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_federal.siglas from v_federal inner join mesa on v_federal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_federal.siglas=partido.siglas"
cur.execute(show_command)
v_federales = cur.fetchall()
v_federales_list = []
for voto in v_federales:
v_federales_list.append(
{"id": voto[0],
"tipo_voto": voto[1],
"fecha_hora_voto": voto[2],
"letra": voto[3],
"id_colegio": voto[4],
"descripcion": voto[5],
"siglas": voto[6],
}
)
return jsonify(v_federales_list)
###############################################################
# VOTOS FEDERALES - DATE LOOKUP
###############################################################
@app.route('/votosfederales/periodos/', methods=['POST'])
def one_period_vf():
cur = db.connection.cursor()
dict_periodo = request.get_json()
fecha_inicio = dict_periodo['fecha_inicio'][:10]
fecha_final = dict_periodo['fecha_final'][:10]
intervalo = dict_periodo['intervalo']
period_query = "SELECT id_v_federal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_federal.siglas from v_federal inner join mesa on v_federal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_federal.siglas=partido.siglas WHERE (FECHA_HORA_VOTO BETWEEN '{}' AND '{}')".format(fecha_inicio, fecha_final) if intervalo else "SELECT id_v_federal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_federal.siglas from v_federal inner join mesa on v_federal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_federal.siglas=partido.siglas where fecha_hora_voto>='{}' and fecha_hora_voto<'{}'".format(fecha_inicio, fecha_final)
cur.execute(period_query)
v_federales_periodo = cur.fetchall()
periodos_list = []
for voto in v_federales_periodo:
periodos_list.append(
{"id": voto[0],
"tipo_voto": voto[1],
"fecha_hora_voto": voto[2],
"letra": voto[3],
"id_colegio": voto[4],
"descripcion": voto[5],
"siglas": voto[6],
}
)
return jsonify(periodos_list)
@app.route('/votosfederales/fecha_ex/', methods=['POST'])
def one_date_vf():
cur = db.connection.cursor()
dict_periodo = request.get_json()
fecha_inicio = dict_periodo['fecha'][:10]
fecha_final = datetime.strptime(fecha_inicio, '%Y-%m-%d') + timedelta(days=1)
fecha_final = fecha_final.strftime('%Y-%m-%d')[:10]
date_query = "SELECT id_v_federal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_federal.siglas from v_federal inner join mesa on v_federal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_federal.siglas=partido.siglas WHERE (FECHA_HORA_VOTO BETWEEN '{}' AND '{}')".format(fecha_inicio, fecha_final)
cur.execute(date_query)
fechas = cur.fetchall()
fechas_list = []
for voto in fechas:
fechas_list.append(
{"id": voto[0],
"tipo_voto": voto[1],
"fecha_hora_voto": voto[2],
"letra": voto[3],
"id_colegio": voto[4],
"descripcion": voto[5],
"siglas": voto[6],
}
)
return jsonify(fechas_list)
################################################################
# VOTOS MUNICIPALES
################################################################
@app.route('/votosmunicipales/', methods=["GET","POST"])
def all_votosM():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_voto_m = request.get_json()
id_mesa = dict_new_voto_m['id_mesa']
siglas = dict_new_voto_m['siglas']
tipo_voto = dict_new_voto_m['tipo_voto']
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
get_partido = "SELECT fecha_partido_inicio, fecha_partido_final FROM PARTIDO WHERE siglas='{}'".format(siglas)
cur.execute(get_partido)
partido = cur.fetchall()[0]
fecha_partido_inicio = partido[0]
fecha_partido_final = partido[1]
insert_command = "INSERT INTO v_municipal (id_mesa, fecha_mesa_inicio, fecha_mesa_final, fecha_partido_inicio, fecha_partido_final, siglas, tipo_voto) VALUES ({}, '{}', '{}', '{}', '{}', '{}', {});".format(id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
fecha_partido_inicio,
fecha_partido_final,
siglas,
tipo_voto)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
cur = db.connection.cursor()
show_command = "select id_v_municipal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_municipal.siglas from v_municipal inner join mesa on v_municipal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_municipal.siglas=partido.siglas"
cur.execute(show_command)
v_federales = cur.fetchall()
v_federales_list = []
for voto in v_federales:
v_federales_list.append(
{"id": voto[0],
"tipo_voto": voto[1],
"fecha_hora_voto": voto[2],
"letra": voto[3],
"id_colegio": voto[4],
"descripcion": voto[5],
"siglas": voto[6],
}
)
return jsonify(v_federales_list)
###############################################################
# VOTOS MUNICIPALES - DATE LOOKUP
###############################################################
@app.route('/votosmunicipales/periodos/', methods=['POST'])
def one_period_vm():
cur = db.connection.cursor()
dict_periodo = request.get_json()
fecha_inicio = dict_periodo['fecha_inicio'][:10]
fecha_final = dict_periodo['fecha_final'][:10]
intervalo = dict_periodo['intervalo']
period_query = "SELECT id_v_municipal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_municipal.siglas from v_municipal inner join mesa on v_municipal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_municipal.siglas=partido.siglas WHERE (FECHA_HORA_VOTO BETWEEN '{}' AND '{}')".format(fecha_inicio, fecha_final) if intervalo else "SELECT id_v_municipal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_municipal.siglas from v_municipal inner join mesa on v_municipal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_municipal.siglas=partido.siglas where fecha_hora_voto>='{}' and fecha_hora_voto<'{}'".format(fecha_inicio, fecha_final)
cur.execute(period_query)
v_municipales_periodo = cur.fetchall()
periodos_list = []
for voto in v_municipales_periodo:
periodos_list.append(
{"id": voto[0],
"tipo_voto": voto[1],
"fecha_hora_voto": voto[2],
"letra": voto[3],
"id_colegio": voto[4],
"descripcion": voto[5],
"siglas": voto[6],
}
)
return jsonify(periodos_list)
@app.route('/votosmunicipales/fecha_ex/', methods=['POST'])
def one_date_vm():
cur = db.connection.cursor()
dict_periodo = request.get_json()
fecha_inicio = dict_periodo['fecha'][:10]
fecha_final = datetime.strptime(fecha_inicio, '%Y-%m-%d') + timedelta(days=1)
fecha_final = fecha_final.strftime('%Y-%m-%d')[:10]
date_query = "SELECT id_v_municipal, tipo_voto, fecha_hora_voto, letra, id_colegio, descripcion, v_municipal.siglas from v_municipal inner join mesa on v_municipal.id_mesa=mesa.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio inner join eleccion on colegio.id_colegio_eleccion=eleccion.id_eleccion inner join partido on v_municipal.siglas=partido.siglas WHERE (FECHA_HORA_VOTO BETWEEN '{}' AND '{}')".format(fecha_inicio, fecha_final)
cur.execute(date_query)
fechas = cur.fetchall()
fechas_list = []
for voto in fechas:
fechas_list.append(
{"id": voto[0],
"tipo_voto": voto[1],
"fecha_hora_voto": voto[2],
"letra": voto[3],
"id_colegio": voto[4],
"descripcion": voto[5],
"siglas": voto[6],
}
)
return jsonify(fechas_list)
################################################################
# VOTANTE (NO_MEX, MEX)
################################################################
@app.route('/votantes/', methods=["GET", "POST"])
def all_votante():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_votante = request.get_json()
ife_pas = dict_new_votante['id']
fecha_nac = dict_new_votante['fecha_nac'][:10]
direccion = dict_new_votante['direccion']
nombre = dict_new_votante['nombre']
fecha_inicio_votante = dict_new_votante['fecha_inicio'][:10]
# fecha_final_votante = dict_new_votante['fecha_final'][:10]
fecha_final = datetime.strptime(fecha_inicio_votante, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_final_votante = fecha_final.strftime('%Y-%m-%d')[:10]
tipo = dict_new_votante['es_extranjero']
es_extranjero = 0 if tipo else 1
id_mesa = dict_new_votante['id_mesa']
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE MESA.id_mesa='{}'".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
insert_command = "INSERT INTO VOTANTE (ife_pasaporte, fecha_nac, direccion, nombre, fecha_votante_inicio, fecha_votante_final, id_mesa, fecha_mesa_inicio, fecha_mesa_final, tipo) VALUES ('{}', '{}', '{}', '{}', '{}', '{}', {}, '{}', '{}', '{}')".format(ife_pas,
fecha_nac,
direccion,
nombre,
fecha_inicio_votante,
fecha_final_votante,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
es_extranjero)
# cur.execute(insert_command)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except :
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else: #request == get
show_command = "SELECT ife_pasaporte, nombre, letra, V.id_mesa, tipo FROM VOTANTE AS V INNER JOIN MESA ON V.id_mesa=MESA.id_mesa WHERE tipo in (0, 1)"
cur.execute(show_command)
votantes = cur.fetchall()
votantes_list = []
for votante in votantes:
votantes_list.append(
{"id": votante[0],
"nombre": votante[1],
"letra": votante[2],
"id_mesa": votante[3],
"tipo": votante[4]
}
)
return jsonify(votantes_list)
@app.route('/votantes/<ife_pasaporte>/', methods=["GET", "POST", "DELETE"])
def one_votante(ife_pasaporte):
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_votante = request.get_json()
new_ife_pas = dict_new_votante['id']
fecha_nac = dict_new_votante['fecha_nac'][:10]
direccion = dict_new_votante['direccion']
nombre = dict_new_votante['nombre']
fecha_votante_inicio = dict_new_votante['fecha_inicio'][:10]
fecha_votante_final = datetime.strptime(fecha_votante_inicio, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_votante_final = fecha_votante_final.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_votante['id_mesa']
tipo = dict_new_votante['es_extranjero']
es_extranjero = 0 if tipo else 1
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
update_command = "UPDATE votante SET ife_pasaporte='{}', fecha_nac='{}', direccion='{}', nombre='{}', fecha_votante_inicio='{}', fecha_votante_final='{}', id_mesa={}, fecha_mesa_inicio='{}', fecha_mesa_final='{}', tipo={} WHERE ife_pasaporte='{}'".format(new_ife_pas,
fecha_nac,
direccion,
nombre,
fecha_votante_inicio,
fecha_votante_final,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
es_extranjero,
ife_pasaporte)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM VOTANTE WHERE ife_pasaporte='{}'".format(ife_pasaporte)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:#request==get
votantes_list = []
show_command = "SELECT ife_pasaporte, fecha_nac, VOTANTE.direccion, nombre, letra, fecha_votante_inicio, fecha_votante_final, VOTANTE.id_mesa, VOTANTE.fecha_mesa_inicio, VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM VOTANTE INNER JOIN MESA ON VOTANTE.id_mesa=MESA.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio INNER JOIN ELECCION ON COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE VOTANTE.tipo in (0, 1) AND ife_pasaporte='{}'".format(ife_pasaporte)
show_command_hist = "SELECT ife_pasaporte, fecha_nac, HIST_VOTANTE.direccion, nombre, letra, fecha_votante_inicio, fecha_votante_final, HIST_VOTANTE.id_mesa, HIST_VOTANTE.fecha_mesa_inicio, HIST_VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, HIST_VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM HIST_VOTANTE INNER JOIN MESA on HIST_VOTANTE.id_mesa=MESA.id_mesa INNER JOIN COLEGIO on MESA.id_mesa_colegio=COLEGIO.id_colegio INNER JOIN ELECCION on COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE HIST_VOTANTE.tipo in (0, 1) AND ife_pasaporte='{}'".format(ife_pasaporte)
cur.execute(show_command)
votantes = cur.fetchall()
for votante in votantes:
votantes_list.append(
{"id": votante[0],
"fecha_nac": votante[1],
"direccion": votante[2],
"nombre": votante[3],
"letra": votante[4],
"fecha_inicio": votante[5],
"fecha_final": votante[6],
"id_mesa": votante[7],
"fecha_mesa_inicio": votante[8],
"fecha_mesa_final": votante[9],
"id_colegio": votante[10],
"id_eleccion": votante[11],
"descripcion": votante[12],
"tipo": votante[13],
"sys_inicio": votante[14],
"sys_final": votante[15],
"trans_id": votante[16]
}
)
cur.execute(show_command_hist)
votantes_hist = cur.fetchall()
for i in range( len(votantes_hist)-1 ,-1,-1):
votantes_list.append(
{"id": votantes_hist[i][0],
"fecha_nac": votantes_hist[i][1],
"direccion": votantes_hist[i][2],
"nombre": votantes_hist[i][3],
"letra": votantes_hist[i][4],
"fecha_votante_inicio": votantes_hist[i][5],
"fecha_votante_final": votantes_hist[i][6],
"id_mesa": votantes_hist[i][7],
"fecha_mesa_inicio": votantes_hist[i][8],
"fecha_mesa_final": votantes_hist[i][9],
"id_colegio": votantes_hist[i][10],
"id_eleccion": votantes_hist[i][11],
"descripcion": votantes_hist[i][12],
"tipo": votantes_hist[i][13],
"sys_inicio": votantes_hist[i][14],
"sys_final": votantes_hist[i][15],
"trans_id": votantes_hist[i][16]
}
)
return jsonify(votantes_list)
####################################################################
# VOTANTE (PRESIDENTES)
#####################################################################
@app.route('/presidentes/', methods=['GET', 'POST'])
def all_presidentes():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_presi = request.get_json()
ife_pas = dict_new_presi['id']
fecha_nac = dict_new_presi['fecha_nac'][:10]
direccion = dict_new_presi['direccion']
nombre = dict_new_presi['nombre']
fecha_inicio_presi = dict_new_presi['fecha_inicio'][:10]
fecha_final_presi = datetime.strptime(fecha_inicio_presi, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_final_presi = fecha_final_presi.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_presi['id_mesa']
tipo = 2
id_mesa = dict_new_presi['id_mesa']
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
insert_command = "INSERT INTO VOTANTE (ife_pasaporte, fecha_nac, direccion, nombre, fecha_votante_inicio, fecha_votante_final, id_mesa, fecha_mesa_inicio, fecha_mesa_final, tipo) values ('{}', '{}', '{}', '{}', '{}', '{}', {}, '{}', '{}', {})".format(ife_pas,
fecha_nac,
direccion,
nombre,
fecha_inicio_presi,
fecha_final_presi,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
tipo)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
show_command = "select ife_pasaporte, nombre, letra, VOTANTE.id_mesa, tipo, fecha_votante_inicio, fecha_votante_final from votante inner join mesa on votante.id_mesa=mesa.id_mesa where tipo=2"
cur.execute(show_command)
votantes = cur.fetchall()
votantes_list = []
for votante in votantes:
votantes_list.append(
{"id": votante[0],
"nombre": votante[1],
"letra": votante[2],
"id_mesa": votante[3],
"tipo": votante[4],
"fecha_inicio": votante[5],
"fecha_final": votante[6]
}
)
return jsonify(votantes_list)
@app.route('/presidentes/<ife_pasaporte>/', methods=['GET', 'POST', 'DELETE'])
def one_presidente(ife_pasaporte):
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_presi = request.get_json()
new_ife_pas = dict_new_presi['id']
fecha_nac = dict_new_presi['fecha_nac'][:10]
direccion = dict_new_presi['direccion']
nombre = dict_new_presi['nombre']
fecha_presi_inicio = dict_new_presi['fecha_inicio'][:10]
fecha_presi_final = datetime.strptime(fecha_presi_inicio, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_presi_final = fecha_presi_final.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_presi['id_mesa']
tipo = 2
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
update_command = "UPDATE VOTANTE SET ife_pasaporte='{}', fecha_nac='{}', direccion='{}', nombre='{}', fecha_votante_inicio='{}', fecha_votante_final='{}', id_mesa={}, fecha_mesa_inicio='{}', fecha_mesa_final='{}', tipo={} WHERE ife_pasaporte='{}'".format(new_ife_pas,
fecha_nac,
direccion,
nombre,
fecha_presi_inicio,
fecha_presi_final,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
tipo,
ife_pasaporte)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM VOTANTE WHERE ife_pasaporte='{}'".format(ife_pasaporte)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:#request==get
presis_list = []
show_command = "SELECT ife_pasaporte, fecha_nac, VOTANTE.direccion, nombre, letra, fecha_votante_inicio, fecha_votante_final, VOTANTE.id_mesa, VOTANTE.fecha_mesa_inicio, VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM VOTANTE INNER JOIN MESA ON VOTANTE.id_mesa=MESA.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio INNER JOIN ELECCION ON COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE VOTANTE.tipo=2 AND ife_pasaporte='{}'".format(ife_pasaporte)
show_command_hist = "SELECT ife_pasaporte, fecha_nac, HIST_VOTANTE.direccion, nombre, letra, fecha_votante_inicio, fecha_votante_final, HIST_VOTANTE.id_mesa, HIST_VOTANTE.fecha_mesa_inicio, HIST_VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, HIST_VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM HIST_VOTANTE INNER JOIN MESA on HIST_VOTANTE.id_mesa=MESA.id_mesa INNER JOIN COLEGIO on MESA.id_mesa_colegio=COLEGIO.id_colegio INNER JOIN ELECCION on COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE HIST_VOTANTE.tipo=2 AND ife_pasaporte='{}'".format(ife_pasaporte)
cur.execute(show_command)
presis = cur.fetchall()
for presi in presis:
presis_list.append(
{"id": presi[0],
"fecha_nac": presi[1],
"direccion": presi[2],
"nombre": presi[3],
"letra": presi[4],
"fecha_inicio": presi[5],
"fecha_final": presi[6],
"id_mesa": presi[7],
"fecha_mesa_inicio": presi[8],
"fecha_mesa_final": presi[9],
"id_colegio": presi[10],
"id_eleccion": presi[11],
"descripcion": presi[12],
"tipo": presi[13],
"sys_inicio": presi[14],
"sys_final": presi[15],
"trans_id": presi[16]
}
)
cur.execute(show_command_hist)
presis_hist = cur.fetchall()
for i in range( len(presis_hist)-1 ,-1,-1):
presis_list.append(
{"id": presis_hist[i][0],
"fecha_nac": presis_hist[i][1],
"direccion": presis_hist[i][2],
"nombre": presis_hist[i][3],
"letra": presis_hist[i][4],
"fecha_inicio": presis_hist[i][5],
"fecha_final": presis_hist[i][6],
"id_mesa": presis_hist[i][7],
"fecha_mesa_inicio": presis_hist[i][8],
"fecha_mesa_final": presis_hist[i][9],
"id_colegio": presis_hist[i][10],
"id_eleccion": presis_hist[i][11],
"descripcion": presis_hist[i][12],
"tipo": presis_hist[i][13],
"sys_inicio": presis_hist[i][14],
"sys_final": presis_hist[i][15],
"trans_id": presis_hist[i][16]
}
)
return jsonify(presis_list)
##########################################################################
# VOTANTE (VOCALES)
##########################################################################
@app.route('/vocales/', methods=['GET', 'POST'])
def all_vocales():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_vocal = request.get_json()
ife_pas = dict_new_vocal['id']
fecha_nac = dict_new_vocal['fecha_nac'][:10]
direccion = dict_new_vocal['direccion']
nombre = dict_new_vocal['nombre']
fecha_inicio_vocal = dict_new_vocal['fecha_inicio'][:10]
fecha_final_vocal = datetime.strptime(fecha_inicio_vocal, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_final_vocal = fecha_final_vocal.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_vocal['id_mesa']
tipo = 3 #forzosamente ya que es vocal
id_mesa = dict_new_vocal['id_mesa']
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
insert_command = "INSERT INTO VOTANTE (ife_pasaporte, fecha_nac, direccion, nombre, fecha_votante_inicio, fecha_votante_final, id_mesa, fecha_mesa_inicio, fecha_mesa_final, tipo) values ('{}', '{}', '{}', '{}', '{}', '{}', {}, '{}', '{}', {})".format(ife_pas,
fecha_nac,
direccion,
nombre,
fecha_inicio_vocal,
fecha_final_vocal,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
tipo)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
show_command = "select ife_pasaporte, nombre, letra, VOTANTE.id_mesa, tipo, fecha_votante_inicio, fecha_votante_final from votante inner join mesa on votante.id_mesa=mesa.id_mesa where tipo=3"
cur.execute(show_command)
vocales = cur.fetchall()
vocales_list = []
for vocal in vocales:
vocales_list.append(
{"id": vocal[0],
"nombre": vocal[1],
"letra": vocal[2],
"id_mesa": vocal[3],
"tipo": vocal[4],
"fecha_inicio": vocal[5],
"fecha_final": vocal[6]
}
)
return jsonify(vocales_list)
@app.route('/vocales/<ife_pasaporte>/', methods=['GET', 'POST', 'DELETE'])
def one_vocal(ife_pasaporte):
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_vocal = request.get_json()
new_ife_pas = dict_new_vocal['id']
fecha_nac = dict_new_vocal['fecha_nac'][:10]
direccion = dict_new_vocal['direccion']
nombre = dict_new_vocal['nombre']
fecha_vocal_inicio = dict_new_vocal['fecha_inicio'][:10]
fecha_vocal_final = datetime.strptime(fecha_vocal_inicio, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_vocal_final = fecha_vocal_final.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_vocal['id_mesa']
tipo = 3
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
update_command = "UPDATE VOTANTE SET ife_pasaporte='{}', fecha_nac='{}', direccion='{}', nombre='{}', fecha_votante_inicio='{}', fecha_votante_final='{}', id_mesa={}, fecha_mesa_inicio='{}', fecha_mesa_final='{}', tipo={} WHERE ife_pasaporte='{}'".format(new_ife_pas,
fecha_nac,
direccion,
nombre,
fecha_vocal_inicio,
fecha_vocal_final,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
tipo,
ife_pasaporte)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM VOTANTE WHERE ife_pasaporte='{}'".format(ife_pasaporte)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:#request==get
vocales_list = []
show_command = "SELECT ife_pasaporte, fecha_nac, VOTANTE.direccion, nombre, letra, fecha_votante_inicio, fecha_votante_final, VOTANTE.id_mesa, VOTANTE.fecha_mesa_inicio, VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM VOTANTE INNER JOIN MESA ON VOTANTE.id_mesa=MESA.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio INNER JOIN ELECCION ON COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE VOTANTE.tipo=3 AND ife_pasaporte='{}'".format(ife_pasaporte)
show_command_hist = "SELECT ife_pasaporte, fecha_nac, HIST_VOTANTE.direccion, nombre, letra, fecha_votante_inicio, fecha_votante_final, HIST_VOTANTE.id_mesa, HIST_VOTANTE.fecha_mesa_inicio, HIST_VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, HIST_VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM HIST_VOTANTE INNER JOIN MESA on HIST_VOTANTE.id_mesa=MESA.id_mesa INNER JOIN COLEGIO on MESA.id_mesa_colegio=COLEGIO.id_colegio INNER JOIN ELECCION on COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE HIST_VOTANTE.tipo=3 AND ife_pasaporte='{}'".format(ife_pasaporte)
cur.execute(show_command)
vocales = cur.fetchall()
for vocal in vocales:
vocales_list.append(
{"id": vocal[0],
"fecha_nac": vocal[1],
"direccion": vocal[2],
"nombre": vocal[3],
"letra": vocal[4],
"fecha_inicio": vocal[5],
"fecha_final": vocal[6],
"id_mesa": vocal[7],
"fecha_mesa_inicio": vocal[8],
"fecha_mesa_final": vocal[9],
"id_colegio": vocal[10],
"id_eleccion": vocal[11],
"descripcion": vocal[12],
"tipo": vocal[13],
"sys_inicio": vocal[14],
"sys_final": vocal[15],
"trans_id": vocal[16]
}
)
cur.execute(show_command_hist)
vocales_hist = cur.fetchall()
for i in range( len(vocales_hist)-1 ,-1,-1):
vocales_list.append(
{"id": vocales_hist[i][0],
"fecha_nac": vocales_hist[i][1],
"direccion": vocales_hist[i][2],
"nombre": vocales_hist[i][3],
"letra": vocales_hist[i][4],
"fecha_inicio": vocales_hist[i][5],
"fecha_final": vocales_hist[i][6],
"id_mesa": vocales_hist[i][7],
"fecha_mesa_inicio": vocales_hist[i][8],
"fecha_mesa_final": vocales_hist[i][9],
"id_colegio": vocales_hist[i][10],
"id_eleccion": vocales_hist[i][11],
"descripcion": vocales_hist[i][12],
"tipo": vocales_hist[i][13],
"sys_inicio": vocales_hist[i][14],
"sys_final": vocales_hist[i][15],
"trans_id": vocales_hist[i][16]
}
)
return jsonify(vocales_list)
###########################################################################
# VOTANTE (SUPLENTES)
###########################################################################
@app.route('/suplentes/', methods=['GET', 'POST'])
def all_suplentes():
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_suplente = request.get_json()
ife_pas = dict_new_suplente['id']
fecha_nac = dict_new_suplente['fecha_nac'][:10]
direccion = dict_new_suplente['direccion']
nombre = dict_new_suplente['nombre']
id_superior = dict_new_suplente['superior']
fecha_inicio_suplente = dict_new_suplente['fecha_inicio'][:10]
fecha_final_suplente = datetime.strptime(fecha_inicio_suplente, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_final_suplente = fecha_final_suplente.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_suplente['id_mesa']
tipo = 4 #forzosamente ya que es suplente
id_mesa = dict_new_suplente['id_mesa']
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
insert_command = "INSERT INTO VOTANTE (ife_pasaporte, fecha_nac, direccion, nombre, fecha_votante_inicio, fecha_votante_final, id_superior, id_mesa, fecha_mesa_inicio, fecha_mesa_final, tipo) values ('{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}')".format(ife_pas,
fecha_nac,
direccion,
nombre,
fecha_inicio_suplente,
fecha_final_suplente,
id_superior,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
tipo)
try:
cur.execute(insert_command)
res = make_response(jsonify({"message": "Collection created"}), 201)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:
show_command = "SELECT ife_pasaporte, nombre, letra, VOTANTE.id_mesa, tipo, fecha_votante_inicio, fecha_votante_final, id_superior from votante inner join mesa on votante.id_mesa=mesa.id_mesa where tipo=4"
cur.execute(show_command)
suplentes = cur.fetchall()
suplentes_list = []
for suplente in suplentes:
suplentes_list.append(
{"id": suplente[0],
"nombre": suplente[1],
"letra": suplente[2],
"id_mesa": suplente[3],
"tipo": suplente[4],
"fecha_inicio": suplente[5],
"fecha_final": suplente[6],
"id_superior": suplente[7]
}
)
return jsonify(suplentes_list)
@app.route('/suplentes/<ife_pasaporte>/', methods=['GET', 'POST', 'DELETE'])
def one_suplente(ife_pasaporte):
cur = db.connection.cursor()
if request.method == 'POST':
dict_new_suplente = request.get_json()
new_ife_pas = dict_new_suplente['id']
fecha_nac = dict_new_suplente['fecha_nac'][:10]
direccion = dict_new_suplente['direccion']
nombre = dict_new_suplente['nombre']
id_superior = dict_new_suplente['superior']
fecha_suplente_inicio = dict_new_suplente['fecha_inicio'][:10]
fecha_suplente_final = datetime.strptime(fecha_suplente_inicio, '%Y-%m-%d') + timedelta(weeks=52*10)
fecha_suplente_final = fecha_suplente_final.strftime('%Y-%m-%d')[:10]
id_mesa = dict_new_suplente['id_mesa']
tipo = 4
get_mesa = "SELECT fecha_mesa_inicio, fecha_mesa_final FROM MESA WHERE id_mesa={}".format(id_mesa)
cur.execute(get_mesa)
mesa = cur.fetchall()[0]
fecha_mesa_inicio = mesa[0]
fecha_mesa_final = mesa[1]
update_command = "UPDATE VOTANTE SET ife_pasaporte='{}', fecha_nac='{}', direccion='{}', nombre='{}', fecha_votante_inicio='{}', fecha_votante_final='{}', id_superior='{}', id_mesa={}, fecha_mesa_inicio='{}', fecha_mesa_final='{}', tipo={} WHERE ife_pasaporte='{}'".format(new_ife_pas,
fecha_nac,
direccion,
nombre,
fecha_suplente_inicio,
fecha_suplente_final,
id_superior,
id_mesa,
fecha_mesa_inicio,
fecha_mesa_final,
tipo,
ife_pasaporte)
try:
cur.execute(update_command)
res = make_response(jsonify({"message": "Collection updated"}), 200)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
elif request.method == 'DELETE':
delete_command = "DELETE FROM VOTANTE WHERE ife_pasaporte='{}'".format(ife_pasaporte)
try:
cur.execute(delete_command)
res = make_response(jsonify({}), 204)
except:
res = make_response(jsonify({"error": "Collection not found"}), 404)
return res
else:#request==get
suplentes_list = []
show_command = "SELECT ife_pasaporte, fecha_nac, VOTANTE.direccion, nombre, id_superior, letra, fecha_votante_inicio, fecha_votante_final, VOTANTE.id_mesa, VOTANTE.fecha_mesa_inicio, VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM VOTANTE INNER JOIN MESA ON VOTANTE.id_mesa=MESA.id_mesa inner join colegio on mesa.id_mesa_colegio=colegio.id_colegio INNER JOIN ELECCION ON COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE VOTANTE.tipo=4 AND ife_pasaporte='{}'".format(ife_pasaporte)
show_command_hist = "SELECT ife_pasaporte, fecha_nac, HIST_VOTANTE.direccion, nombre, id_superior, letra, fecha_votante_inicio, fecha_votante_final, HIST_VOTANTE.id_mesa, HIST_VOTANTE.fecha_mesa_inicio, HIST_VOTANTE.fecha_mesa_final, id_colegio, id_eleccion, descripcion, HIST_VOTANTE.tipo, sys_votante_inicio, sys_votante_final, trans_id_votante FROM HIST_VOTANTE INNER JOIN MESA on HIST_VOTANTE.id_mesa=MESA.id_mesa INNER JOIN COLEGIO on MESA.id_mesa_colegio=COLEGIO.id_colegio INNER JOIN ELECCION on COLEGIO.id_colegio_eleccion=eleccion.id_eleccion WHERE HIST_VOTANTE.tipo=4 AND ife_pasaporte='{}'".format(ife_pasaporte)
cur.execute(show_command)
suplentes = cur.fetchall()
for suplente in suplentes:
suplentes_list.append(
{"id": suplente[0],
"fecha_nac": suplente[1],
"direccion": suplente[2],
"nombre": suplente[3],
"id_superior": suplente[4],
"letra": suplente[5],
"fecha_inicio": suplente[6],
"fecha_final": suplente[7],
"id_mesa": suplente[8],
"fecha_mesa_inicio": suplente[9],
"fecha_mesa_final": suplente[10],
"id_colegio": suplente[11],
"id_eleccion": suplente[12],
"descripcion": suplente[13],
"tipo": suplente[14],
"sys_inicio": suplente[15],
"sys_final": suplente[16],
"trans_id": suplente[17]
}
)
cur.execute(show_command_hist)
suplentes_hist = cur.fetchall()
for i in range( len(suplentes_hist)-1 ,-1,-1):
suplentes_list.append(
{"id": suplentes_hist[i][0],
"fecha_nac": suplentes_hist[i][1],
"direccion": suplentes_hist[i][2],
"nombre": suplentes_hist[i][3],
"id_superior": suplentes_hist[i][4],
"letra": suplentes_hist[i][5],
"fecha_inicio": suplentes_hist[i][6],
"fecha_final": suplentes_hist[i][7],
"id_mesa": suplentes_hist[i][8],
"fecha_mesa_inicio": suplentes_hist[i][9],
"fecha_mesa_final": suplentes_hist[i][10],
"id_colegio": suplentes_hist[i][11],
"id_eleccion": suplentes_hist[i][12],
"descripcion": suplentes_hist[i][13],
"tipo": suplentes_hist[i][14],
"sys_inicio": suplentes_hist[i][15],
"sys_final": suplentes_hist[i][16],
"trans_id": suplentes_hist[i][17]
}
)
return jsonify(suplentes_list)
########################################### MAIN ################################
if __name__ == "__main__":
#app.run(host='0.0.0.0', port=5000, debug=True) # for development
app.run(port=5000, threaded=True)# for production
| 60.547921
| 929
| 0.422575
| 8,620
| 104,869
| 4.841995
| 0.032831
| 0.024294
| 0.02336
| 0.030572
| 0.786621
| 0.750683
| 0.725119
| 0.700129
| 0.672529
| 0.655302
| 0
| 0.01547
| 0.47484
| 104,869
| 1,731
| 930
| 60.5829
| 0.742396
| 0.015085
| 0
| 0.565649
| 0
| 0.032647
| 0.228302
| 0.054712
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020582
| false
| 0.00071
| 0.003549
| 0
| 0.066004
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1823eb67bdc2bddedc66af9552227161f0b35eee
| 247
|
py
|
Python
|
src/trates/__init__.py
|
Kobie-Kirven/TTSIM
|
afdf06888ae1af7a176848c1ffdf169471076082
|
[
"MIT"
] | 1
|
2021-03-25T21:51:36.000Z
|
2021-03-25T21:51:36.000Z
|
src/trates/__init__.py
|
Kobie-Kirven/trates
|
afdf06888ae1af7a176848c1ffdf169471076082
|
[
"MIT"
] | null | null | null |
src/trates/__init__.py
|
Kobie-Kirven/trates
|
afdf06888ae1af7a176848c1ffdf169471076082
|
[
"MIT"
] | null | null | null |
from .seqSlice import Slicer
from .structureBuilder import Structure
from .vmd_prep import PrepPSF, EditStructure, createConfigFile
from .rmsd import RMSD
from .smooth import Smooth
from .nativeContacts import NativeContacts
from .plot import Plot
| 35.285714
| 62
| 0.846154
| 31
| 247
| 6.709677
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117409
| 247
| 7
| 63
| 35.285714
| 0.954128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
18414a642e840ab8218a8ba9443432018dca04e1
| 13,899
|
py
|
Python
|
clifford/tools/g3c/model_matching.py
|
rotu/clifford
|
7a0a0e83be9e2b67da2681d66e8cb4ede912fa51
|
[
"BSD-3-Clause"
] | null | null | null |
clifford/tools/g3c/model_matching.py
|
rotu/clifford
|
7a0a0e83be9e2b67da2681d66e8cb4ede912fa51
|
[
"BSD-3-Clause"
] | null | null | null |
clifford/tools/g3c/model_matching.py
|
rotu/clifford
|
7a0a0e83be9e2b67da2681d66e8cb4ede912fa51
|
[
"BSD-3-Clause"
] | null | null | null |
from .object_clustering import assign_measurements_to_objects_matrix, compare_labels
from .rotor_estimation import estimate_rotor_objects, estimate_rotor_objects_subsample, \
sequential_object_rotor_estimation, \
estimate_rotor_objects_subsample_sequential
from .rotor_parameterisation import interpolate_TR_rotors
from .cost_functions import val_rotor_cost_sparse
from . import apply_rotor
from .cost_functions import object_set_cost_matrix, object_cost_function, check_p_cost
import numpy as np
from clifford.g3c import *
from clifford.tools.g3c.cuda import sequential_rotor_estimation_cuda_mvs
import clifford as cf
def fingerprint_function(obj, other_objects, nbins=10, max_cost=100.0, cost_func=check_p_cost):
"""
Bins the cost of the object to all the other objects to make a metric
for the scene
"""
cost_list = [cost_func(obj, o_obj) for o_obj in other_objects]
counts, bins = np.histogram(cost_list, bins=nbins, range=(0.0, max_cost))
return counts
def match_by_fingerprint(reference_model, query_model, nbins=None, max_cost=None, cost_func=check_p_cost):
"""
Matches the objects in two scenes based on their fingerprint functions
"""
if nbins is None:
nbins = len(query_model)
if max_cost is None:
a = np.max(np.max(np.abs( np.array([[cost_func(obj, o_obj) for o_obj in query_model] for obj in query_model]))))
b = np.max(np.max(np.abs( np.array([[cost_func(obj, o_obj) for o_obj in reference_model] for obj in reference_model]))))
max_cost = max(a,b)
labels = []
min_costs = []
for j in range(len(query_model)):
query_bins = fingerprint_function(query_model[j], query_model,
nbins=nbins, max_cost=max_cost, cost_func=cost_func)
min_cost_match = np.inf
min_cost_ind = 0
for i in range(len(reference_model)):
ref_plane_bins = fingerprint_function(reference_model[i], reference_model,
nbins=nbins, max_cost=max_cost, cost_func=cost_func)
cost_match = np.sum(np.abs(query_bins-ref_plane_bins))
if cost_match < min_cost_match:
min_cost_match = cost_match
min_cost_ind = i
labels.append(min_cost_ind)
min_costs.append(min_cost_match)
return labels, min_costs
def iterative_model_match_sequential(reference_model, query_model, iterations=100,
object_type='generic', cuda=False, print_rotor=False,
r_track=None, start_labels=None):
"""
Matches the query model to the reference model and estimates the motor between them
Assumes that every query model item has a corresponding reference model item, multiple
query model items can match the same reference model item. Uses the sequential rotor estimation
"""
# Get the starting labels
if start_labels is None:
labels, costs = assign_measurements_to_objects_matrix(reference_model, query_model,
object_type=object_type, cuda=cuda)
else:
labels = [+l for l in start_labels]
old_labels = [l for l in labels]
remapped_objects = [o for o in query_model]
r_est = 1.0 + 0.0*e1
assert iterations > 0, 'Must have at least 1 iteration'
for i in range(iterations):
# Reorder
reordered_list_ref = [reference_model[i] for i in labels]
# Estimate the rotor
r_est_update, exit_flag = sequential_object_rotor_estimation(reordered_list_ref, remapped_objects,
random_sequence=True, n_iterations=10,
object_type=object_type)
# Now update our running estimate
r_est = (r_est_update*r_est)
r_est = r_est.normal()
# Re map with our new rotor
remapped_objects = [apply_rotor(l, r_est).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects,
object_type=object_type, cuda=cuda)
if r_track is not None:
r_track.append(r_est)
if print_rotor:
print(r_est)
print(i)
if compare_labels(old_labels, labels):
return labels, costs, r_est
old_labels = [l for l in labels]
return labels, costs, r_est
def iterative_model_match(reference_model, query_model, iterations=100,
object_type='generic', cuda=False, start_labels=None,
symmetric=False):
"""
Matches the query model to the reference model and estimates the motor between them
Assumes that every query model item has a corresponding reference model item, multiple
query model items can match the same reference model item
"""
# Get the starting labels
if start_labels is None:
labels, costs = assign_measurements_to_objects_matrix(reference_model, query_model,
object_type=object_type, cuda=cuda,
symmetric=symmetric)
else:
labels = [+l for l in start_labels]
old_labels = [+l for l in labels]
remapped_objects = [o for o in query_model]
r_est = 1.0 + 0.0*e1
assert iterations > 0, 'Must have at least 1 iteration'
for i in range(iterations):
# Reorder
reordered_list_ref = [reference_model[i] for i in labels]
# Estimate the rotor
r_est_update, cost = estimate_rotor_objects(reordered_list_ref, remapped_objects,
object_type=object_type,
symmetric=symmetric)
# Now update our running estimate
r_est = (r_est_update*r_est)
r_est = r_est.normal()
# Re map with our new rotor
remapped_objects = [apply_rotor(l, r_est).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects,
object_type=object_type, cuda=cuda,
symmetric=symmetric)
if compare_labels(old_labels, labels):
return labels, costs, r_est
old_labels = [+l for l in labels]
print(i)
return labels, costs, r_est
def REFORM(reference_model, query_model, n_samples=100, objects_per_sample=5,
iterations=100, covergence_threshold=0.00000001,
pool_size=1,object_type='generic', cuda=False,
print_rotor=False, start_labels=None, motor=True):
# Get the starting labels
if start_labels is None:
labels, costs = assign_measurements_to_objects_matrix(reference_model, query_model,
object_type=object_type, cuda=cuda)
else:
labels = [+l for l in start_labels]
min_global_cost = np.inf
min_global_rotor = 1.0 + 0.0 * e1
r_est = 1.0 + 0.0 * e1
remapped_objects = [o for o in query_model]
assert iterations > 0, 'Must have at least 1 iteration'
for i in range(iterations):
# Reorder and estimate the rotor
reordered_list_a = [reference_model[i] for i in labels]
r_est_update, cost = estimate_rotor_objects_subsample(reordered_list_a, remapped_objects,
n_samples,
objects_per_sample,
pool_size=pool_size,
object_type=object_type,
motor=motor)
r_est = (r_est_update * r_est)
r_est = r_est.normal()
# Re map with our new rotor
remapped_objects = [apply_rotor(l,r_est).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects,
object_type=object_type, cuda=cuda)
current_cost = np.sum(costs)
if print_rotor:
print(r_est)
print(i, current_cost, covergence_threshold)
if current_cost < min_global_cost:
min_global_cost = current_cost
min_global_rotor = +r_est
if current_cost < covergence_threshold:
return labels, costs, r_est
# Re map with our new rotor
remapped_objects = [apply_rotor(l, min_global_rotor).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects, cuda=cuda)
return labels, costs, min_global_rotor
def REFORM_sequential(reference_model, query_model, n_samples=100, objects_per_sample=5,
iterations=100, covergence_threshold=0.00000001,
pool_size=1,object_type='generic', cuda=False, start_labels=None):
# Get the starting labels
if start_labels is None:
labels, costs = assign_measurements_to_objects_matrix(reference_model, query_model,
cuda=cuda)
else:
labels = [+l for l in start_labels]
min_global_cost = np.inf
min_global_rotor = 1.0 + 0.0 * e1
r_est = 1.0 + 0.0 * e1
remapped_objects = [o for o in query_model]
assert iterations > 0, 'Must have at least 1 iteration'
for i in range(iterations):
# Reorder and estimate the rotor
reordered_list_a = [reference_model[i] for i in labels]
r_est_update, cost = estimate_rotor_objects_subsample_sequential(reordered_list_a, remapped_objects,
n_samples,
objects_per_sample,
pool_size=pool_size,
object_type=object_type)
r_est = (r_est_update * r_est)
r_est = r_est.normal()
# Re map with our new rotor
remapped_objects = [apply_rotor(l,r_est).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects,
object_type=object_type, cuda=cuda)
current_cost = np.sum(costs)
print(i, current_cost, covergence_threshold)
if current_cost < min_global_cost:
min_global_cost = current_cost
min_global_rotor = +r_est
if current_cost < covergence_threshold:
return labels, costs, r_est
print('Finished iterations')
# Re map with our new rotor
remapped_objects = [apply_rotor(l, min_global_rotor).normal() for l in query_model]
# Get the new matching
print('Rematching')
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects, cuda=cuda)
print('REFORM complete')
return labels, costs, min_global_rotor
def REFORM_cuda(reference_model, query_model, n_samples=100, objects_per_sample=5, iterations=100,
covergence_threshold=0.00000001, mutation_probability=None, start_labels=None):
# Get the starting labels
if start_labels is None:
labels, costs = assign_measurements_to_objects_matrix(reference_model, query_model,
cuda=True)
else:
labels = [+l for l in start_labels]
min_global_cost = np.inf
min_global_rotor = 1.0 + 0.0 * e1
r_est = 1.0 + 0.0 * e1
remapped_objects = [o for o in query_model]
assert iterations > 0, 'Must have at least 1 iteration'
for i in range(iterations):
# Reorder and estimate the rotor
reordered_list_a = [reference_model[i] for i in labels]
r_list, cost_array = sequential_rotor_estimation_cuda_mvs(reordered_list_a,
remapped_objects,
n_samples,
objects_per_sample,
mutation_probability=mutation_probability)
min_cost_index = np.argmin(cost_array)
min_cost = cost_array[min_cost_index]
r_est_update = r_list[min_cost_index]
r_est = (r_est_update * r_est)
r_est = r_est.normal()
# Re map with our new rotor
remapped_objects = [apply_rotor(l,r_est).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects, cuda=True)
current_cost = np.sum(costs)
print(i, covergence_threshold, current_cost, min_global_cost)
if current_cost < min_global_cost:
min_global_cost = current_cost
min_global_rotor = +r_est
if current_cost < covergence_threshold:
return labels, costs, r_est
# Re map with our new rotor
remapped_objects = [apply_rotor(l, min_global_rotor).normal() for l in query_model]
# Get the new matching
labels, costs = assign_measurements_to_objects_matrix(reference_model, remapped_objects, cuda=True)
return labels, costs, min_global_rotor
| 48.093426
| 128
| 0.608029
| 1,738
| 13,899
| 4.563867
| 0.098389
| 0.026727
| 0.012859
| 0.015129
| 0.790721
| 0.743696
| 0.729324
| 0.722895
| 0.696546
| 0.69352
| 0
| 0.012757
| 0.328873
| 13,899
| 288
| 129
| 48.260417
| 0.837586
| 0.097921
| 0
| 0.666667
| 0
| 0
| 0.017864
| 0
| 0
| 0
| 0
| 0
| 0.023474
| 1
| 0.032864
| false
| 0
| 0.046948
| 0
| 0.13615
| 0.084507
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
187cc0c1865ec7d6574cb073437374fce5c0ebfe
| 206
|
py
|
Python
|
mknotebooks/extra_args_execute_preprocessor.py
|
lgeiger/mknotebooks
|
2a177167b231a8373a7305d4fc86c9f0034c3ba8
|
[
"MIT"
] | 105
|
2018-10-30T15:55:12.000Z
|
2022-03-31T07:50:30.000Z
|
mknotebooks/extra_args_execute_preprocessor.py
|
lgeiger/mknotebooks
|
2a177167b231a8373a7305d4fc86c9f0034c3ba8
|
[
"MIT"
] | 659
|
2020-09-15T10:30:10.000Z
|
2022-03-31T17:38:48.000Z
|
mknotebooks/extra_args_execute_preprocessor.py
|
lgeiger/mknotebooks
|
2a177167b231a8373a7305d4fc86c9f0034c3ba8
|
[
"MIT"
] | 20
|
2019-03-26T18:12:50.000Z
|
2021-12-15T22:29:13.000Z
|
from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import List, Unicode
class ExtraArgsExecutePreprocessor(ExecutePreprocessor):
extra_arguments = List(Unicode()).tag(config=True)
| 29.428571
| 56
| 0.834951
| 20
| 206
| 8.55
| 0.75
| 0.128655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097087
| 206
| 6
| 57
| 34.333333
| 0.919355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
43ee4e18c5c692c105a2d5e5517703c451b585ec
| 127
|
py
|
Python
|
C_Tut/contact/urls.py
|
jaydeep11/C-tutorial-web-application
|
3ec0225efda834fe93a678d887044906124de59b
|
[
"MIT"
] | null | null | null |
C_Tut/contact/urls.py
|
jaydeep11/C-tutorial-web-application
|
3ec0225efda834fe93a678d887044906124de59b
|
[
"MIT"
] | null | null | null |
C_Tut/contact/urls.py
|
jaydeep11/C-tutorial-web-application
|
3ec0225efda834fe93a678d887044906124de59b
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
app_name='contact'
urlpatterns=[
path('',views.contact,name='contact'),
]
| 18.142857
| 42
| 0.724409
| 17
| 127
| 5.352941
| 0.588235
| 0.241758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125984
| 127
| 7
| 43
| 18.142857
| 0.81982
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a1014904cadf6ccdc196457c2c7ab52e13ff7ccd
| 55
|
py
|
Python
|
test_func.py
|
DrewRust/lambdata-drewrust
|
7543b8a0a3937091646c43ee3648b2cec3669222
|
[
"MIT"
] | null | null | null |
test_func.py
|
DrewRust/lambdata-drewrust
|
7543b8a0a3937091646c43ee3648b2cec3669222
|
[
"MIT"
] | null | null | null |
test_func.py
|
DrewRust/lambdata-drewrust
|
7543b8a0a3937091646c43ee3648b2cec3669222
|
[
"MIT"
] | null | null | null |
from ds_util import enlarge
y = 5
print(y, enlarge(y))
| 13.75
| 27
| 0.727273
| 11
| 55
| 3.545455
| 0.727273
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.163636
| 55
| 4
| 28
| 13.75
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a10aee7ff4388768aad60f84fb16e44d55540a4a
| 169
|
py
|
Python
|
lib/stats.py
|
mtfuller/CS4242-project
|
a77f03b8786c2e7d2913ff15b75e09364195b314
|
[
"MIT"
] | null | null | null |
lib/stats.py
|
mtfuller/CS4242-project
|
a77f03b8786c2e7d2913ff15b75e09364195b314
|
[
"MIT"
] | null | null | null |
lib/stats.py
|
mtfuller/CS4242-project
|
a77f03b8786c2e7d2913ff15b75e09364195b314
|
[
"MIT"
] | null | null | null |
import numpy as np
def calculate_stats(dataset):
return (
np.mean(dataset),
np.std(dataset),
np.min(dataset),
np.max(dataset)
)
| 16.9
| 29
| 0.56213
| 21
| 169
| 4.47619
| 0.619048
| 0.287234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.319527
| 169
| 9
| 30
| 18.777778
| 0.817391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0.125
| 0.375
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
a1536825fc17c0c6e6a27a117f25bf137018b909
| 274
|
py
|
Python
|
operations_api/exceptions.py
|
Mirantis/python-operations-api
|
65cc9bfe04037f2b70d272a33d9729219ecdc116
|
[
"Apache-2.0"
] | null | null | null |
operations_api/exceptions.py
|
Mirantis/python-operations-api
|
65cc9bfe04037f2b70d272a33d9729219ecdc116
|
[
"Apache-2.0"
] | null | null | null |
operations_api/exceptions.py
|
Mirantis/python-operations-api
|
65cc9bfe04037f2b70d272a33d9729219ecdc116
|
[
"Apache-2.0"
] | 1
|
2018-10-04T16:46:25.000Z
|
2018-10-04T16:46:25.000Z
|
class ImproperlyConfigured(Exception):
""" Raised in the event ``operations-api`` has not been properly configured
"""
pass
class HTTPError(Exception):
""" Raised in the event ``operations-api`` was not able to get data from remote server
"""
pass
| 24.909091
| 90
| 0.678832
| 34
| 274
| 5.470588
| 0.705882
| 0.16129
| 0.182796
| 0.215054
| 0.408602
| 0.408602
| 0.408602
| 0
| 0
| 0
| 0
| 0
| 0.218978
| 274
| 10
| 91
| 27.4
| 0.869159
| 0.580292
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
a158a279a173e44b42baaf8fc97d4c1e34724db5
| 4,540
|
py
|
Python
|
gwlfe/Output/AvAnimalNSum/AnimalN.py
|
mudkipmaster/gwlf-e
|
9e058445537dd32d1916f76c4b73ca64261771cd
|
[
"Apache-2.0"
] | null | null | null |
gwlfe/Output/AvAnimalNSum/AnimalN.py
|
mudkipmaster/gwlf-e
|
9e058445537dd32d1916f76c4b73ca64261771cd
|
[
"Apache-2.0"
] | 6
|
2018-07-24T22:46:28.000Z
|
2018-07-29T19:13:09.000Z
|
gwlfe/Output/AvAnimalNSum/AnimalN.py
|
mudkipmaster/gwlf-e
|
9e058445537dd32d1916f76c4b73ca64261771cd
|
[
"Apache-2.0"
] | 1
|
2018-07-24T18:22:01.000Z
|
2018-07-24T18:22:01.000Z
|
from numpy import repeat
from numpy import reshape
from numpy import zeros
from gwlfe.AFOS.GrazingAnimals.Losses.GRLossN import GRLossN
from gwlfe.AFOS.GrazingAnimals.Losses.GRLossN import GRLossN_f
from gwlfe.AFOS.GrazingAnimals.Losses.GRLostBarnN import GRLostBarnN
from gwlfe.AFOS.GrazingAnimals.Losses.GRLostBarnN import GRLostBarnN_f
from gwlfe.AFOS.GrazingAnimals.Losses.GRLostManN import GRLostManN
from gwlfe.AFOS.GrazingAnimals.Losses.GRLostManN import GRLostManN_f
from gwlfe.AFOS.GrazingAnimals.Losses.GRStreamN import GRStreamN
from gwlfe.AFOS.GrazingAnimals.Losses.GRStreamN import GRStreamN_f
from gwlfe.AFOS.nonGrazingAnimals.Losses.NGLostBarnN import NGLostBarnN
from gwlfe.AFOS.nonGrazingAnimals.Losses.NGLostBarnN import NGLostBarnN_f
from gwlfe.AFOS.nonGrazingAnimals.Losses.NGLostManN import NGLostManN
from gwlfe.AFOS.nonGrazingAnimals.Losses.NGLostManN import NGLostManN_f
from gwlfe.Memoization import memoize
def AnimalN(NYrs, NGPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, NGAppNRate, Prec, DaysMonth,
NGPctSoilIncRate, GRPctManApp, GRAppNRate, GRPctSoilIncRate, NGBarnNRate, AWMSNgPct, NgAWMSCoeffN,
RunContPct, RunConCoeffN, PctGrazing, GRBarnNRate, AWMSGrPct, GrAWMSCoeffN, PctStreams, GrazingNRate):
result = zeros((NYrs, 12))
ng_lost_man_n = NGLostManN(NYrs, NGPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, NGAppNRate,
Prec, DaysMonth,
NGPctSoilIncRate)
gr_lost_man_n = GRLostManN(NYrs, GRPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, GRAppNRate,
Prec, DaysMonth, GRPctSoilIncRate)
ng_lost_barn_n = NGLostBarnN(NYrs, NGPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, NGBarnNRate,
Prec, DaysMonth, AWMSNgPct, NgAWMSCoeffN, RunContPct, RunConCoeffN)
gr_lost_barn_n = GRLostBarnN(NYrs, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, GRPctManApp, PctGrazing,
GRBarnNRate, Prec, DaysMonth, AWMSGrPct, GrAWMSCoeffN, RunContPct, RunConCoeffN)
gr_loss_n = GRLossN(NYrs, PctStreams, PctGrazing, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN,
GrazingNRate, Prec,
DaysMonth)
gr_stream_n = GRStreamN(PctStreams, PctGrazing, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN)
for Y in range(NYrs):
for i in range(12):
result[Y][i] = (ng_lost_man_n[Y][i]
+ gr_lost_man_n[Y][i]
+ ng_lost_barn_n[Y][i]
+ gr_lost_barn_n[Y][i]
+ gr_loss_n[Y][i]
+ gr_stream_n[i])
return result
@memoize
def AnimalN_f(NYrs, NGPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, NGAppNRate, Prec, DaysMonth,
NGPctSoilIncRate, GRPctManApp, GRAppNRate, GRPctSoilIncRate, NGBarnNRate, AWMSNgPct, NgAWMSCoeffN,
RunContPct, RunConCoeffN, PctGrazing, GRBarnNRate, AWMSGrPct, GrAWMSCoeffN, PctStreams, GrazingNRate):
ng_lost_man_n = NGLostManN_f(NYrs, NGPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, NGAppNRate,
Prec, DaysMonth, NGPctSoilIncRate)
gr_lost_man_n = GRLostManN_f(NYrs, GRPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN, GRAppNRate,
Prec, DaysMonth, GRPctSoilIncRate)
ng_lost_barn_n = NGLostBarnN_f(NYrs, NGPctManApp, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN,
NGBarnNRate, Prec, DaysMonth, AWMSNgPct, NgAWMSCoeffN, RunContPct, RunConCoeffN)
gr_lost_barn_n = GRLostBarnN_f(NYrs, Prec, DaysMonth, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN,
GRPctManApp, PctGrazing, GRBarnNRate, AWMSGrPct, GrAWMSCoeffN, RunContPct,
RunConCoeffN)
gr_loss_n = GRLossN_f(NYrs, PctStreams, PctGrazing, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN,
GrazingNRate, Prec,
DaysMonth)
gr_stream_n = reshape(
repeat(GRStreamN_f(PctStreams, PctGrazing, GrazingAnimal_0, NumAnimals, AvgAnimalWt, AnimalDailyN),
repeats=NYrs, axis=0), (NYrs, 12))
return ng_lost_man_n + gr_lost_man_n + ng_lost_barn_n + gr_lost_barn_n + gr_loss_n + gr_stream_n
| 66.764706
| 120
| 0.700881
| 458
| 4,540
| 6.748908
| 0.141921
| 0.06341
| 0.108703
| 0.158525
| 0.881915
| 0.861857
| 0.851828
| 0.808476
| 0.489809
| 0.489809
| 0
| 0.006005
| 0.229736
| 4,540
| 67
| 121
| 67.761194
| 0.877895
| 0
| 0
| 0.16129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.258065
| 0
| 0.322581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a1a54133900a8abc30ecdffed4ba6706381c08fc
| 75
|
py
|
Python
|
main.py
|
s-gabor/star-wars
|
e54405b2fb1b253a1b796f87b5b7b607cbdd89d1
|
[
"MIT"
] | null | null | null |
main.py
|
s-gabor/star-wars
|
e54405b2fb1b253a1b796f87b5b7b607cbdd89d1
|
[
"MIT"
] | null | null | null |
main.py
|
s-gabor/star-wars
|
e54405b2fb1b253a1b796f87b5b7b607cbdd89d1
|
[
"MIT"
] | null | null | null |
import swapi
luke = swapi.get_person(1)
print(luke.name, type(luke.name))
| 15
| 33
| 0.746667
| 13
| 75
| 4.230769
| 0.692308
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.106667
| 75
| 4
| 34
| 18.75
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a1a81b56a8eedd60a256e419de3a6363aacd9f18
| 75
|
py
|
Python
|
transfer_learning/__init__.py
|
YifanQie/Deep_Learning_for_Manufacturing
|
9ba19e41f69c561b04b8573ab9c52c0969f45bfd
|
[
"MIT"
] | 27
|
2019-10-31T15:16:13.000Z
|
2022-03-29T03:56:57.000Z
|
transfer_learning/__init__.py
|
YifanQie/Deep_Learning_for_Manufacturing
|
9ba19e41f69c561b04b8573ab9c52c0969f45bfd
|
[
"MIT"
] | 4
|
2020-03-25T14:18:04.000Z
|
2022-02-10T00:34:58.000Z
|
transfer_learning/__init__.py
|
YifanQie/Deep_Learning_for_Manufacturing
|
9ba19e41f69c561b04b8573ab9c52c0969f45bfd
|
[
"MIT"
] | 7
|
2020-02-23T22:12:37.000Z
|
2021-12-08T20:14:41.000Z
|
"""
A Libraray to initilize the avaiable modules and data structures
"""
| 12.5
| 64
| 0.733333
| 10
| 75
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186667
| 75
| 5
| 65
| 15
| 0.901639
| 0.853333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a1ac6003d596821c2afa24b925ff530735092711
| 130
|
py
|
Python
|
src/router.py
|
seven48/hubot-mongo-bridge
|
5e8dcaf22c120cc03a6cf9bc150adf3153434775
|
[
"Apache-2.0"
] | null | null | null |
src/router.py
|
seven48/hubot-mongo-bridge
|
5e8dcaf22c120cc03a6cf9bc150adf3153434775
|
[
"Apache-2.0"
] | 8
|
2019-07-01T15:17:09.000Z
|
2019-07-04T08:13:04.000Z
|
src/router.py
|
seven48/hubot-mongo-bridge
|
5e8dcaf22c120cc03a6cf9bc150adf3153434775
|
[
"Apache-2.0"
] | 1
|
2019-08-26T18:49:00.000Z
|
2019-08-26T18:49:00.000Z
|
""" Module for routing """
from aiohttp import web
from src.views import get_locale
ROUTER = [web.get('/locale', get_locale), ]
| 18.571429
| 43
| 0.707692
| 19
| 130
| 4.736842
| 0.631579
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 130
| 6
| 44
| 21.666667
| 0.818182
| 0.138462
| 0
| 0
| 0
| 0
| 0.067308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a1ae22f1402fb69031314f96842f954c646ecc1a
| 932
|
py
|
Python
|
omcp/diagnoses/models.py
|
shutogeorgio/omcp-service
|
4d4c2943d3393c77019a780a0caa1457e14b7d8d
|
[
"MIT"
] | null | null | null |
omcp/diagnoses/models.py
|
shutogeorgio/omcp-service
|
4d4c2943d3393c77019a780a0caa1457e14b7d8d
|
[
"MIT"
] | 8
|
2020-12-04T16:22:41.000Z
|
2020-12-09T12:29:39.000Z
|
omcp/diagnoses/models.py
|
shutogeorgio/omcp-service
|
4d4c2943d3393c77019a780a0caa1457e14b7d8d
|
[
"MIT"
] | null | null | null |
from django.db import models
from .register_status import RegisterStatus
from .diagnosis_type import DiagnosisType
from users.doctor import Doctor
from users.patient import Patient
class Diagnosis(models.Model):
doctor = models.ForeignKey(Doctor, on_delete=models.CASCADE)
patient = models.ForeignKey(Patient, on_delete=models.CASCADE, default=None, blank=True, null=True)
title = models.CharField(max_length=50, default="")
description = models.CharField(max_length=1000, default="")
video_link = models.CharField(max_length=100, default="")
video_password = models.CharField(max_length=50, default="")
type = models.CharField(max_length=50, default=DiagnosisType.MENTAL)
status = models.CharField(max_length=50, default=RegisterStatus.REGISTERED)
date = models.DateField(blank=True, null=True)
image = models.FileField(upload_to='diagnoses/', blank=True, default='diagnoses/no-img.jpg')
| 46.6
| 103
| 0.770386
| 119
| 932
| 5.92437
| 0.403361
| 0.12766
| 0.153191
| 0.204255
| 0.187234
| 0.187234
| 0
| 0
| 0
| 0
| 0
| 0.018226
| 0.116953
| 932
| 19
| 104
| 49.052632
| 0.838396
| 0
| 0
| 0
| 0
| 0
| 0.032189
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.0625
| 0.3125
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
a1b46e78d42327eab52cca338140e1c29891ebae
| 403
|
py
|
Python
|
elements/helpers.py
|
philsupertramp/wik
|
0650ae181926a5ccad8af70b8ae9a572a423e6f6
|
[
"MIT"
] | null | null | null |
elements/helpers.py
|
philsupertramp/wik
|
0650ae181926a5ccad8af70b8ae9a572a423e6f6
|
[
"MIT"
] | 19
|
2021-02-09T18:01:05.000Z
|
2021-08-25T04:50:44.000Z
|
elements/helpers.py
|
philsupertramp/wiki
|
b30ee58d63e55588ced06af4f6588c8dd6baba7e
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import Group
from django.shortcuts import render
from elements.models import Tag
def render_with_tags(request, template, context):
context.update({'tags': Tag.objects.all()})
return render(request, template, context=context)
def is_mod_user(user) -> bool:
mod_group = Group.objects.get_or_create(name='Mods')[0]
return mod_group in user.groups.all()
| 26.866667
| 59
| 0.751861
| 59
| 403
| 5
| 0.559322
| 0.067797
| 0.149153
| 0.19661
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002865
| 0.133995
| 403
| 14
| 60
| 28.785714
| 0.842407
| 0
| 0
| 0
| 0
| 0
| 0.019851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
a1cc356e824f61c68561ffb09ed2563925b9cfbb
| 43
|
py
|
Python
|
swapcase.py
|
RahulSinghazm/Python_Programs
|
98a7b78a1c274d006a9fa98fe0e8e8656a6d5892
|
[
"bzip2-1.0.6"
] | 1
|
2018-10-21T08:11:52.000Z
|
2018-10-21T08:11:52.000Z
|
swapcase.py
|
RahulSinghazm/Python_Programs
|
98a7b78a1c274d006a9fa98fe0e8e8656a6d5892
|
[
"bzip2-1.0.6"
] | null | null | null |
swapcase.py
|
RahulSinghazm/Python_Programs
|
98a7b78a1c274d006a9fa98fe0e8e8656a6d5892
|
[
"bzip2-1.0.6"
] | null | null | null |
mystr='Hell World'
print(mystr.swapcase())
| 14.333333
| 23
| 0.744186
| 6
| 43
| 5.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 43
| 2
| 24
| 21.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
b80c3ecf598d72a6b607171267843920f546e175
| 245
|
py
|
Python
|
vdl/core/file_parser.py
|
christianwaldmann/vdl
|
4d10fba7223cc4356e20cc4e6db62db6d56a22df
|
[
"MIT"
] | null | null | null |
vdl/core/file_parser.py
|
christianwaldmann/vdl
|
4d10fba7223cc4356e20cc4e6db62db6d56a22df
|
[
"MIT"
] | null | null | null |
vdl/core/file_parser.py
|
christianwaldmann/vdl
|
4d10fba7223cc4356e20cc4e6db62db6d56a22df
|
[
"MIT"
] | null | null | null |
class FileParser:
def __init__(self, filepath):
self.filepath = filepath
def GetNonEmptyLinesAsList(self):
with open(self.filepath, "r") as f:
return [line for line in f.readlines() if line and line != "\n"]
| 30.625
| 76
| 0.628571
| 31
| 245
| 4.83871
| 0.645161
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261224
| 245
| 7
| 77
| 35
| 0.828729
| 0
| 0
| 0
| 0
| 0
| 0.012245
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.