hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
14de46a159189620fb69184095362b44145d0760 | 566 | py | Python | tests/utils/test_check.py | Bloodielie/state_manager | 415e293c5fe85dd99648cacf727fd572c9d5df3f | [
"Apache-2.0"
] | 4 | 2020-07-13T15:07:11.000Z | 2021-10-30T17:11:44.000Z | tests/utils/test_check.py | Bloodielie/state_manager | 415e293c5fe85dd99648cacf727fd572c9d5df3f | [
"Apache-2.0"
] | null | null | null | tests/utils/test_check.py | Bloodielie/state_manager | 415e293c5fe85dd99648cacf727fd572c9d5df3f | [
"Apache-2.0"
] | null | null | null | import pytest
from state_manager.utils.check import is_coroutine_callable
from state_manager.utils.runers import check_function_and_run
async def coroutine_test():
return None
def func_test():
return None
class ClassTest:
pass
def test_is_coroutine_callable():
assert is_coroutine_callable(coroutine_test)
assert not is_coroutine_callable(ClassTest)
@pytest.mark.asyncio
async def test_check_function_and_run():
assert await check_function_and_run(coroutine_test) is None
assert await check_function_and_run(func_test) is None
| 20.214286 | 63 | 0.805654 | 82 | 566 | 5.207317 | 0.341463 | 0.103045 | 0.177986 | 0.177986 | 0.140515 | 0.140515 | 0 | 0 | 0 | 0 | 0 | 0 | 0.144876 | 566 | 27 | 64 | 20.962963 | 0.882231 | 0 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.125 | true | 0.0625 | 0.1875 | 0.0625 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
14df6a2f0343adf0dee8b7fa76c159d61448d3fc | 8,803 | py | Python | PLM/ui/base/SplashProperty.py | vtta2008/pipelineTool | 2431d2fc987e3b31f2a6a63427fee456fa0765a0 | [
"Apache-2.0"
] | 7 | 2018-09-09T01:14:34.000Z | 2020-01-26T10:06:08.000Z | PLM/ui/base/SplashProperty.py | vtta2008/pipelineTool | 2431d2fc987e3b31f2a6a63427fee456fa0765a0 | [
"Apache-2.0"
] | null | null | null | PLM/ui/base/SplashProperty.py | vtta2008/pipelineTool | 2431d2fc987e3b31f2a6a63427fee456fa0765a0 | [
"Apache-2.0"
] | 3 | 2019-03-11T21:54:52.000Z | 2019-11-25T11:23:17.000Z | # -*- coding: utf-8 -*-
"""
Script Name:
Author: Do Trinh/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
""" Import """
import sys
# PLM
from PLM.options import (FRAMELESS, SPLASHSCREEN, TRANSPARENT, TEXT_NORMAL, DAMG_LOGO_COLOR,
peacock, DARKBLUE, deep_blue)
from pyPLM.Widgets import SplashScreen, MessageBox
from pyPLM.Gui import Pixmap, Font, Palette
from PLM.configs import splashImagePth, configPropText
p = configPropText()
class SplashProperty(SplashScreen):
key = 'SplashProperty'
_count = 0
# the amount of solid circles will be drawed
_numOfitems = 15
# the radian of solid circle
_itemRadius = 25
_revolutionPerSec = 1.57079632679489661923
_num = float(_numOfitems)
_minOpacity = 31.4159265358979323846
_fadeRate = 25
_innerRadius = 70
_mainColor = deep_blue
_brushColor = None
_bufferH = 100
_bufferW = 200
progress = None
_currentP = 0
_fontFamily = 'UTM Avo'
_fontSize = 12.0
_fontAttr = TEXT_NORMAL
_currentFont = Font(_fontFamily, _fontSize, _fontAttr)
_textColor = peacock
_penColor = DARKBLUE
_textBrushColor = DAMG_LOGO_COLOR
_text = 'Running Configurations'
_pText = '0%'
_centerW = True
_bMargin = 10
_tMargin = 10
_lMargin = 10
_rMargin = 10
_running = False
def __init__(self, app=None):
SplashScreen.__init__(self)
# make sure there is an instance of application
if not app:
MessageBox(self, 'Application Error', 'critical', p['ERROR_APPLICATION'])
sys.exit()
self.app = app
# Query desktop resolution to define the center point
self.screenH = self.screen().size().height()
self.screenW = self.screen().size().width()
def applySetting(self):
""" setting layout """
# setting 100% transperiency background
palette = Palette(self.palette())
palette.setColor(palette.Background, TRANSPARENT)
self.setPalette(palette)
# make widget frameless like splash screen style
self.setWindowFlags(SPLASHSCREEN | FRAMELESS)
self.setEnabled(False)
# load splash image, this will remove the black background
self.splashPix = Pixmap(splashImagePth)
self.setPixmap(self.splashPix)
self.setMask(self.splashPix.mask())
# set new font
self.setFont(self.currentFont)
# Updates splash widget from the default font to the font has been set.
self.ensurePolished()
def updateSize(self):
""" Adjust size of the layout """
size = (self.innerR + self.itemR) * 3
self.setFixedSize(size, size)
def moveToCenter(self):
""" Move the splash screen to center of the monitor """
x = (self.screenW - self.width())/2
y = (self.screenH - self.height())/2
self.move(x, y)
@property
def fontFamily(self):
return self._fontFamily
@property
def fontSize(self):
return self._fontSize
@property
def fontAttr(self):
return self._fontAttr
@property
def currentFont(self):
return self._currentFont
@property
def text(self):
return self._text
@property
def textColor(self):
return self._textColor
@property
def textBrushColor(self):
return self._textBrushColor
@property
def penColor(self):
return self._penColor
@property
def centerW(self):
return self._centerW
@property
def bMargin(self):
return self._bMargin
@property
def tMargin(self):
return self._tMargin
@property
def lMargin(self):
return self._lMargin
@property
def rMargin(self):
return self._rMargin
@property
def pText(self):
return self._pText
@property
def numOfitems(self):
return self._numOfitems
@property
def itemR(self):
return self._itemRadius
@property
def mainColor(self):
return self._mainColor
@property
def minOpacity(self):
return self._minOpacity
@property
def fadeRate(self):
return self._fadeRate
@property
def innerR(self):
return self._innerRadius
@property
def brushColor(self):
return self._brushColor
@property
def count(self):
return self._count
@property
def num(self):
return self._num
@property
def revolutionPerSec(self):
return self._revolutionPerSec
@property
def bufferH(self):
return self._bufferH
@property
def bufferW(self):
return self._bufferW
@property
def currentP(self):
return self._currentP
@property
def running(self):
return self._running
@running.setter
def running(self, val):
self._running = val
@pText.setter
def pText(self, val):
self._pText = val
@rMargin.setter
def rMargin(self, val):
self._rMargin = val
@lMargin.setter
def lMargin(self, val):
self._lMargin = val
@tMargin.setter
def tMargin(self, val):
self._tMargin = val
@bMargin.setter
def bMargin(self, val):
self._bMargin = val
@centerW.setter
def centerW(self, val):
self._centerW = val
@text.setter
def text(self, val):
self._text = val
@currentFont.setter
def currentFont(self, val):
self._currentFont = val
@fontAttr.setter
def fontAttr(self, val):
self._fontAttr = val
@fontSize.setter
def fontSize(self, val):
self._fontSize = val
@fontFamily.setter
def fontFamily(self, val):
self._fontFamily = val
@textColor.setter
def textColor(self, val):
self._textColor = val
@textBrushColor.setter
def textBrushColor(self, val):
self._brushColor = val
@penColor.setter
def penColor(self, val):
self._penColor = val
@revolutionPerSec.setter
def revolutionPerSec(self, val):
self._revolutionPerSec = val
@num.setter
def num(self, val):
self._num = val
@count.setter
def count(self, val):
self._count = val
@brushColor.setter
def brushColor(self, val):
self._brushColor = val
@innerR.setter
def innerR(self, val):
self._innerRadius = val
@fadeRate.setter
def fadeRate(self, val):
self._fadeRate = val
@minOpacity.setter
def minOpacity(self, val):
self._minOpacity = val
@mainColor.setter
def mainColor(self, val):
self._mainColor = val
@itemR.setter
def itemR(self, val):
self._itemRadius = val
@numOfitems.setter
def numOfitems(self, val):
self._numOfitems = val
@currentP.setter
def currentP(self, val):
self._currentP = val
@bufferW.setter
def bufferW(self, val):
self._bufferW = val
@bufferH.setter
def bufferH(self, val):
self._bufferH = val
# -------------------------------------------------------------------------------------------------------------
# Created by Trinh Do on 5/6/2020 - 3:13 AM
# © 2017 - 2020 DAMGteam. All rights reserved
| 24.385042 | 115 | 0.5071 | 785 | 8,803 | 5.55414 | 0.240764 | 0.070642 | 0.089908 | 0.009633 | 0.011009 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017857 | 0.395661 | 8,803 | 360 | 116 | 24.452778 | 0.801504 | 0.10008 | 0 | 0.12931 | 0 | 0 | 0.011062 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.258621 | false | 0 | 0.021552 | 0.12069 | 0.534483 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
095211a504bbca40411aa6c61f9590c29eef96c2 | 470 | py | Python | swak/static/templates/tmpl_unittest.py | haje01/swak | 1f2de2dd285b29d6100bb89ae83ae8b05bafc05d | [
"MIT"
] | null | null | null | swak/static/templates/tmpl_unittest.py | haje01/swak | 1f2de2dd285b29d6100bb89ae83ae8b05bafc05d | [
"MIT"
] | 2 | 2018-03-05T00:42:30.000Z | 2021-06-01T22:26:37.000Z | swak/static/templates/tmpl_unittest.py | haje01/swak | 1f2de2dd285b29d6100bb89ae83ae8b05bafc05d | [
"MIT"
] | 1 | 2018-03-05T00:41:02.000Z | 2018-03-05T00:41:02.000Z | """Test {{type_names|join(', ')}} plugin{%if type_names|length > 1%}s{%endif%} of {{class_name}}."""
from swak import stdplugins as stp
{% if prefixes|length == 1 %}from .{{prefixes[0]}}_{{file_name}} import {{class_name}}
{% else %}{% for pr in prefixes %}from .{{prefixes[0]}}_{{file_name}} import {{class_name}} as {{pr}}_{{class_name}}
{% endfor %}{% endif %}
def test_{{class_name|lower}}_basic(router):
"""Test basic features of {{class_name}}."""
pass
| 39.166667 | 116 | 0.634043 | 66 | 470 | 4.287879 | 0.5 | 0.190813 | 0.077739 | 0.120141 | 0.254417 | 0.254417 | 0.254417 | 0.254417 | 0 | 0 | 0 | 0.00978 | 0.129787 | 470 | 11 | 117 | 42.727273 | 0.682152 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.166667 | 0.5 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 3 |
1179af505414f6ea6b98c4409b44ca54ecc54e28 | 694 | py | Python | employee_repository.py | rovout/birthdayGreetingsPy | 90ae5c5322ca78392cdbbe20e010776ea21f1e1f | [
"MIT"
] | null | null | null | employee_repository.py | rovout/birthdayGreetingsPy | 90ae5c5322ca78392cdbbe20e010776ea21f1e1f | [
"MIT"
] | null | null | null | employee_repository.py | rovout/birthdayGreetingsPy | 90ae5c5322ca78392cdbbe20e010776ea21f1e1f | [
"MIT"
] | null | null | null | class CSVEmployeeRepository:
def __init__(self, csv_intepreter):
self._anagraphic = csv_intepreter.employees()
def birthdayFor(self, month, day):
return self._anagraphic.bornOn(Birthday(month, day))
class Anagraphic:
def __init__(self, employees):
self._employees = employees
def bornOn(self, birthday):
return self._employees.get(birthday)
class Birthday:
def __init__(self, month, day):
self._month = month
self._day = day
def __key(self):
return (self._month, self._day)
def __eq__(self, other):
return self.__key() == other.__key()
def __hash__(self):
return hash(self.__key()) | 23.931034 | 60 | 0.649856 | 79 | 694 | 5.227848 | 0.253165 | 0.087167 | 0.079903 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.242075 | 694 | 29 | 61 | 23.931034 | 0.785171 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.25 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
117af30031673bb04e9b0306a9e420caafceb233 | 304 | py | Python | bangoo/media/models.py | slapec/bangoo | 34facf122f15943a4368d5c2f45fe178ff01edaa | [
"MIT"
] | null | null | null | bangoo/media/models.py | slapec/bangoo | 34facf122f15943a4368d5c2f45fe178ff01edaa | [
"MIT"
] | null | null | null | bangoo/media/models.py | slapec/bangoo | 34facf122f15943a4368d5c2f45fe178ff01edaa | [
"MIT"
] | null | null | null | from django.db import models
from easy_thumbnails.fields import ThumbnailerImageField
from taggit.managers import TaggableManager
class Image(models.Model):
file = ThumbnailerImageField(upload_to='media/%Y/%m')
tags = TaggableManager()
def __unicode__(self):
return self.file.name
| 25.333333 | 57 | 0.763158 | 36 | 304 | 6.277778 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.154605 | 304 | 11 | 58 | 27.636364 | 0.879377 | 0 | 0 | 0 | 0 | 0 | 0.036184 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.375 | 0.125 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 3 |
11ed75c5635ceea09035580447fece5399a185f1 | 84 | py | Python | test/login.py | y-y-d-w/qwe | e947240d58bf623d1fa272d3f831feb7f3584e6b | [
"MIT"
] | null | null | null | test/login.py | y-y-d-w/qwe | e947240d58bf623d1fa272d3f831feb7f3584e6b | [
"MIT"
] | null | null | null | test/login.py | y-y-d-w/qwe | e947240d58bf623d1fa272d3f831feb7f3584e6b | [
"MIT"
] | null | null | null | qwertyuio
[i for i in range(10)]
isinstance()
isdir.name
tensorboard --logdir logs
| 12 | 25 | 0.75 | 13 | 84 | 4.846154 | 0.923077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027778 | 0.142857 | 84 | 6 | 26 | 14 | 0.847222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
ee933044c4bf8020a8cbff7dddcd6d269dbe7423 | 258 | py | Python | CmixAPIClient/error.py | cmodzelewski-dynata/python-cmixapi-client | ba1a7b2b9c5d8664ac94ca8122c0a5259941d888 | [
"MIT"
] | 1 | 2020-01-21T14:42:06.000Z | 2020-01-21T14:42:06.000Z | CmixAPIClient/error.py | wogsland/python-cmixapi-client | b427bf3792ee897bb3d15cc4956d66b2684a56e9 | [
"MIT"
] | 102 | 2020-01-18T08:40:42.000Z | 2022-01-25T11:00:00.000Z | CmixAPIClient/error.py | wogsland/python-cmixapi-client | b427bf3792ee897bb3d15cc4956d66b2684a56e9 | [
"MIT"
] | 2 | 2020-01-15T12:47:37.000Z | 2020-07-10T22:06:45.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
class CmixError(Exception):
'''
This base error will help determine when CMIX returns a bad response or
otherwise raises an exception while using the API.
'''
pass
| 23.454545 | 79 | 0.674419 | 33 | 258 | 5.121212 | 0.969697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005181 | 0.251938 | 258 | 10 | 80 | 25.8 | 0.870466 | 0.562016 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 3 |
eea128116968302e65e505671d58bb0a18d65c8c | 1,956 | py | Python | appengine/predator/analysis/crash_match.py | allaparthi/monorail | e18645fc1b952a5a6ff5f06e0c740d75f1904473 | [
"BSD-3-Clause"
] | 2 | 2021-04-13T21:22:18.000Z | 2021-09-07T02:11:57.000Z | appengine/predator/analysis/crash_match.py | allaparthi/monorail | e18645fc1b952a5a6ff5f06e0c740d75f1904473 | [
"BSD-3-Clause"
] | 21 | 2020-09-06T02:41:05.000Z | 2022-03-02T04:40:01.000Z | appengine/predator/analysis/crash_match.py | allaparthi/monorail | e18645fc1b952a5a6ff5f06e0c740d75f1904473 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from collections import defaultdict
from collections import namedtuple
class CrashedGroup(namedtuple('CrashedGroup', ['value'])):
"""Represents a crashed group.
Properties:
value (str): The content of crashed group, for example, 'crashed_file_name',
'crashed_directory'.
name (str): The class name of the crashed group. It is mainly used by
sub classes to return the their class names, for example,
'CrashedComponent', 'CrashedDirectory'.
"""
__slots__ = ()
@property
def name(self): # pragma: no cover
return self.__class__.__name__
class CrashedFile(CrashedGroup):
"""Represents a crashed file in stacktrace."""
pass
class CrashedDirectory(CrashedGroup):
"""Represents a crashed directory, which has crashed files in stacktrace."""
pass
class CrashedComponent(CrashedGroup):
"""Represents a crashed component, for example, 'Blink>DOM'."""
pass
# TODO(wrengr): it's not clear why the ``priority`` is stored at all,
# given that every use in this file discards it. ``Result.file_to_stack_infos``
# should just store pointers directly to the frames themselves rather
# than needing this intermediate object.
# TODO(http://crbug.com/644476): this class needs a better name.
class FrameInfo(namedtuple('FrameInfo', ['frame', 'priority'])):
"""Represents a frame and information of the ``CallStack`` it belongs to."""
__slots__ = ()
class CrashMatch(namedtuple('CrashMatch',
['crashed_group', 'touched_files', 'frame_infos'])):
"""Represents a match between touched files with frames in stacktrace.
The ``touched_files`` and ``frame_infos`` are matched under the same
``crashed_group``, for example, CrashedFile('file.cc') or
CrashedDirectory('dir/').
"""
__slots__ = ()
| 31.047619 | 80 | 0.715746 | 249 | 1,956 | 5.493976 | 0.493976 | 0.048246 | 0.052632 | 0.065789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006215 | 0.177403 | 1,956 | 62 | 81 | 31.548387 | 0.844002 | 0.65184 | 0 | 0.315789 | 0 | 0 | 0.139159 | 0 | 0 | 0 | 0 | 0.016129 | 0 | 1 | 0.052632 | false | 0.157895 | 0.157895 | 0.052632 | 0.736842 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
eea8b86dcf6eff68bec87442c4840c58851062e0 | 715 | py | Python | artificial_neural_network/one_hot_encoder.py | mwesthelle/artificial-neural-network | 3ea2486da82d09fae38dfb28a0ddb72ad50715cb | [
"MIT"
] | null | null | null | artificial_neural_network/one_hot_encoder.py | mwesthelle/artificial-neural-network | 3ea2486da82d09fae38dfb28a0ddb72ad50715cb | [
"MIT"
] | 4 | 2020-11-13T22:34:39.000Z | 2020-11-18T00:55:51.000Z | artificial_neural_network/one_hot_encoder.py | mwesthelle/artificial-neural-network | 3ea2486da82d09fae38dfb28a0ddb72ad50715cb | [
"MIT"
] | null | null | null | import numpy as np
class OneHotEncoder:
def __init__(self):
self.int2label = dict()
self.label2int = dict()
self.one_hot_encoding = dict()
def encode(self, labels):
labels = [str(lab) for lab in labels]
self.int2label = {idx: label for idx, label in enumerate(labels)}
label2int = {label: idx for idx, label in enumerate(labels)}
for label in labels:
self.one_hot_encoding[label] = np.zeros(len(labels))
self.one_hot_encoding[label][label2int[label]] = 1
def label_to_decode(self, label):
return self.one_hot_encoding[label]
def decode(self, one_hot_array):
return self.int2label[one_hot_array[1]]
| 31.086957 | 73 | 0.641958 | 96 | 715 | 4.59375 | 0.322917 | 0.081633 | 0.113379 | 0.163265 | 0.310658 | 0.258503 | 0 | 0 | 0 | 0 | 0 | 0.015009 | 0.254545 | 715 | 22 | 74 | 32.5 | 0.812383 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.058824 | 0.117647 | 0.470588 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 3 |
eee84317de12119ddc1d50716f8222818770244a | 342 | py | Python | milestone_one/movie.py | farooq-teqniqly/pakt-complete-python-course | 01717bbe97181f70c38166b3dc82ba7b00098430 | [
"MIT"
] | null | null | null | milestone_one/movie.py | farooq-teqniqly/pakt-complete-python-course | 01717bbe97181f70c38166b3dc82ba7b00098430 | [
"MIT"
] | null | null | null | milestone_one/movie.py | farooq-teqniqly/pakt-complete-python-course | 01717bbe97181f70c38166b3dc82ba7b00098430 | [
"MIT"
] | null | null | null | from datetime import date
import jsonpickle
class Movie:
def __init__(self, title: str, director: str, release_date: date):
self.title = title
self.director = director
self.release_date = release_date
def json(self):
return jsonpickle.encode(self)
def __str__(self):
return self.json()
| 21.375 | 70 | 0.657895 | 42 | 342 | 5.095238 | 0.404762 | 0.154206 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.260234 | 342 | 15 | 71 | 22.8 | 0.84585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0.181818 | 0.181818 | 0.727273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
eef61886fae83146def703569c73b1fdd01e2ca1 | 337 | py | Python | paqr/predictor.py | qrithm/paqr | f0f9f6e99d396deb09a750ae9f1777c043d2e3e9 | [
"Apache-2.0"
] | null | null | null | paqr/predictor.py | qrithm/paqr | f0f9f6e99d396deb09a750ae9f1777c043d2e3e9 | [
"Apache-2.0"
] | null | null | null | paqr/predictor.py | qrithm/paqr | f0f9f6e99d396deb09a750ae9f1777c043d2e3e9 | [
"Apache-2.0"
] | null | null | null | from abc import ABC, abstractmethod
class PredictorInterface(ABC):
def __init__(self):
pass
@abstractmethod
def predict(self):
"""
The main prediction method the model should implement to generate
a single prediction. The inputs to this method will vary so are left unspecified
"""
| 24.071429 | 88 | 0.667656 | 40 | 337 | 5.525 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.278932 | 337 | 13 | 89 | 25.923077 | 0.909465 | 0.433234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.166667 | 0.166667 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
eef8e094c6ac88c3ac3a3190492e9bb8bd313269 | 161 | py | Python | profil_penyedia/apps.py | PBP-A07/pedulilindungi2.0 | d94d680cc04cedc363db2a889a1f6a1e10177b1e | [
"Unlicense"
] | 1 | 2021-12-18T04:14:43.000Z | 2021-12-18T04:14:43.000Z | profil_penyedia/apps.py | ridjkytgr/pedulilindungi2.0 | 7dcaf90b99d301800bf3c019fd373cedb1734678 | [
"Unlicense"
] | 2 | 2021-11-03T10:04:17.000Z | 2021-11-04T18:15:51.000Z | profil_penyedia/apps.py | ridjkytgr/pedulilindungi2.0 | 7dcaf90b99d301800bf3c019fd373cedb1734678 | [
"Unlicense"
] | 2 | 2021-10-13T06:42:47.000Z | 2021-11-07T11:43:42.000Z | from django.apps import AppConfig
class ProfilPenyediaConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'profil_penyedia'
| 23 | 56 | 0.782609 | 18 | 161 | 6.833333 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136646 | 161 | 6 | 57 | 26.833333 | 0.884892 | 0 | 0 | 0 | 0 | 0 | 0.273292 | 0.180124 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
e125a00e0aad9a1c97b8ff7ee77075c203d543f7 | 98 | py | Python | tests/logging/test_logging.py | Darless/GLockManager | efac37a7ef87e48a19d6aa89f1e48dd836ce6761 | [
"Apache-2.0"
] | 2 | 2017-03-02T08:50:43.000Z | 2017-10-30T15:38:58.000Z | tests/TEMPLATE_DIR/test.py | Darless/GLockManager | efac37a7ef87e48a19d6aa89f1e48dd836ce6761 | [
"Apache-2.0"
] | 3 | 2017-01-03T14:36:30.000Z | 2017-10-13T13:57:45.000Z | tests/read_write/test_rw.py | Darless/GLockManager | efac37a7ef87e48a19d6aa89f1e48dd836ce6761 | [
"Apache-2.0"
] | null | null | null | import os
import subprocess
import shlex
def test_main(utils):
utils.compile_and_run(__file__)
| 14 | 33 | 0.816327 | 15 | 98 | 4.866667 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.122449 | 98 | 6 | 34 | 16.333333 | 0.848837 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.6 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
e12baadb3fbaaaf30d70ad7fa5d7694a852e1e41 | 72 | py | Python | skecon/__init__.py | vansh-kamdar/skecon-prototype | 98d365e9707c01f33252693dbfe3b0077cb3bc09 | [
"MIT"
] | null | null | null | skecon/__init__.py | vansh-kamdar/skecon-prototype | 98d365e9707c01f33252693dbfe3b0077cb3bc09 | [
"MIT"
] | 1 | 2021-03-19T15:12:09.000Z | 2021-03-19T15:12:09.000Z | skecon/__init__.py | vansh-kamdar/skecon-prototype | 98d365e9707c01f33252693dbfe3b0077cb3bc09 | [
"MIT"
] | 3 | 2020-08-13T10:35:03.000Z | 2020-08-13T10:48:28.000Z | from .stats import *
from .data import *
__version__ = "0.0.1a1"
| 12 | 24 | 0.625 | 10 | 72 | 4.1 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074074 | 0.25 | 72 | 5 | 25 | 14.4 | 0.685185 | 0 | 0 | 0 | 0 | 0 | 0.106061 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
010719164ec8565bfcf5efbf4f774a1026593755 | 1,752 | py | Python | pikuli/uia/adapter/helper_types.py | NVoronchev/pikuli | b67e33fa51a7bb7252c5ac11651e2f005542f955 | [
"MIT"
] | null | null | null | pikuli/uia/adapter/helper_types.py | NVoronchev/pikuli | b67e33fa51a7bb7252c5ac11651e2f005542f955 | [
"MIT"
] | 1 | 2021-05-11T13:40:52.000Z | 2021-05-13T19:42:26.000Z | pikuli/uia/adapter/helper_types.py | NVoronchev/pikuli | b67e33fa51a7bb7252c5ac11651e2f005542f955 | [
"MIT"
] | 2 | 2021-03-31T14:10:15.000Z | 2022-01-24T02:16:04.000Z | # -*- coding: utf-8 -*-
from enum import Enum, EnumMeta
class ApiEnumAutoval(Enum):
def __new__(cls, default_val):
obj = object.__new__(cls)
obj._value_ = len(cls.__members__)
return obj
@property
def _c_name(self):
return self.name
class ApiEnumExplicit(int, Enum):
@property
def _c_name(self):
"""
Python disallow to use some identifiers as enum field (`None` for example).
This method translate pythonic names to C-style API.
In case of API's name `None` it means the `None_` as pythonic editon.
"""
name = self.name if self.name != 'None_' else 'None'
return name
class Enums(object):
def _add(self, enum):
if not self.is_enum(enum):
raise Exception('{} is not Enum'.format(enum))
setattr(self, enum.__name__, enum)
def get_collection(self):
return {n: e for n, e in self.__dict__.items() if Enums.is_enum(e)}
@classmethod
def is_enum(cls, obj):
return isinstance(obj, EnumMeta)
def __str__(self):
return str(self.get_collection().keys())
class IdNameMap(object):
def __init__(self, map_builder, names):
self._name2id = map_builder(names)
self._id2name = {v: k for k, v in self._name2id.items()}
def name2id(self, name):
return self._name2id[name]
def try_name2id(self, name):
return self._name2id.get(name, None)
def try_id2name(self, id_):
return self._id2name.get(id_, None)
def items(self):
for name, id_ in self._name2id.items():
yield name, id_
def names(self):
return self._name2id.keys()
def ids(self):
return self._name2id.values()
| 24 | 83 | 0.614726 | 235 | 1,752 | 4.32766 | 0.361702 | 0.075713 | 0.066863 | 0.031465 | 0.102262 | 0.06293 | 0 | 0 | 0 | 0 | 0 | 0.010212 | 0.273402 | 1,752 | 72 | 84 | 24.333333 | 0.788688 | 0.126142 | 0 | 0.093023 | 0 | 0 | 0.015426 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.325581 | false | 0 | 0.023256 | 0.209302 | 0.697674 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
01223fc447fec8ddc92ea1dcf6b2b860b66db7c3 | 3,534 | py | Python | tests/_responses.py | adriancaruana/refman | b66688e86ea670d484c941630867b83dda2eed2d | [
"MIT"
] | null | null | null | tests/_responses.py | adriancaruana/refman | b66688e86ea670d484c941630867b83dda2eed2d | [
"MIT"
] | null | null | null | tests/_responses.py | adriancaruana/refman | b66688e86ea670d484c941630867b83dda2eed2d | [
"MIT"
] | 1 | 2021-11-03T23:44:42.000Z | 2021-11-03T23:44:42.000Z | from pathlib import Path
from refman._constants import (
EDITOR,
REFMAN_DIR,
PAPER_DIR,
BIB_DB,
BIB_REF,
META_NAME,
BIBTEX_NAME,
CROSSREF_URL,
ARXIV_BIBTEX_URL,
ARXIV_PDF_URL,
FMT_BIBTEX,
FMT_CITEPROC,
)
DOI = "10.1146/annurev-statistics-031017-100045"
DOI_RESPONSES = {
CROSSREF_URL.format(
doi=DOI, fmt=FMT_CITEPROC
): b'{"indexed":{"date-parts":[[2021,5,14]],"date-time":"2021-05-14T07:47:03Z","timestamp":1620978423643},"reference-count":139,"publisher":"Annual Reviews","issue":"1","content-domain":{"domain":[],"crossmark-restriction":false},"published-print":{"date-parts":[[2018,3,7]]},"DOI":"10.1146\\/annurev-statistics-031017-100045","type":"article-journal","created":{"date-parts":[[2017,12,14]],"date-time":"2017-12-14T00:09:23Z","timestamp":1513210163000},"page":"501-532","source":"Crossref","is-referenced-by-count":93,"title":"Topological Data Analysis","prefix":"10.1146","volume":"5","author":[{"given":"Larry","family":"Wasserman","sequence":"first","affiliation":[{"name":"Department of Statistics and Data Science, Carnegie Mellon University, Pittsburgh, Pennsylvania 15217, USA;"}]}],"member":"22","container-title":"Annual Review of Statistics and Its Application","original-title":[],"language":"en","link":[{"URL":"http:\\/\\/www.annualreviews.org\\/doi\\/pdf\\/10.1146\\/annurev-statistics-031017-100045","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2019,10,8]],"date-time":"2019-10-08T00:22:44Z","timestamp":1570494164000},"score":1.0,"subtitle":[],"short-title":[],"issued":{"date-parts":[[2018,3,7]]},"references-count":139,"journal-issue":{"published-print":{"date-parts":[[2018,3,7]]},"issue":"1"},"alternative-id":["10.1146\\/annurev-statistics-031017-100045"],"URL":"http:\\/\\/dx.doi.org\\/10.1146\\/annurev-statistics-031017-100045","relation":{},"ISSN":["2326-8298","2326-831X"],"subject":["Statistics, Probability and Uncertainty","Statistics and Probability"],"container-title-short":"Annu. Rev. Stat. Appl."}',
CROSSREF_URL.format(
doi=DOI, fmt=FMT_BIBTEX
): b"@article{Wasserman_2018,\n\tdoi = {10.1146/annurev-statistics-031017-100045},\n\turl = {https://doi.org/10.1146%2Fannurev-statistics-031017-100045},\n\tyear = 2018,\n\tmonth = {mar},\n\tpublisher = {Annual Reviews},\n\tvolume = {5},\n\tnumber = {1},\n\tpages = {501--532},\n\tauthor = {Larry Wasserman},\n\ttitle = {Topological Data Analysis},\n\tjournal = {Annual Review of Statistics and Its Application}\n}",
}
ARXIV = "2104.13478"
ARXIV_RESPONSES = {
ARXIV_BIBTEX_URL.format(
arxiv=ARXIV
): b"@misc{bronstein2021geometric,\n title={Geometric Deep Learning: Grids, Groups, Graphs, Geodesics, and Gauges}, \n author={Michael M. Bronstein and Joan Bruna and Taco Cohen and Petar Veli\xc4\x8dkovi\xc4\x87},\n year={2021},\n eprint={2104.13478},\n archivePrefix={arXiv},\n primaryClass={cs.LG}\n}",
ARXIV_PDF_URL.format(arxiv=ARXIV): open(
str(Path(__file__).parent / "test.pdf"), "rb"
).read(),
}
BIBTEX = """
@article{Wasserman_2018,
doi = {10.1146/annurev-statistics-031017-100045},
url = {https://doi.org/10.1146%2Fannurev-statistics-031017-100045},
year = 2018,
month = {mar},
publisher = {Annual Reviews},
volume = {5},
number = {1},
pages = {501--532},
author = {Larry Wasserman},
title = {Topological Data Analysis},
journal = {Annual Review of Statistics and Its Application}
}
"""
| 64.254545 | 1,720 | 0.683079 | 466 | 3,534 | 5.11588 | 0.444206 | 0.025168 | 0.083054 | 0.067534 | 0.255872 | 0.249581 | 0.205537 | 0.040268 | 0.040268 | 0 | 0 | 0.124097 | 0.099321 | 3,534 | 54 | 1,721 | 65.444444 | 0.624882 | 0 | 0 | 0.041667 | 0 | 0.083333 | 0.818902 | 0.536503 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.041667 | 0 | 0.041667 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
015b4df28dae5dd25ded6e5f1f9ef738275e312e | 3,864 | py | Python | sciquence/postprocessing/binarizer.py | krzjoa/sciquence | 6a5f758c757200fffeb0fdc9206462f1f89e2444 | [
"MIT"
] | 8 | 2017-10-23T17:59:35.000Z | 2021-05-10T03:01:30.000Z | sciquence/postprocessing/binarizer.py | krzjoa/sciquence | 6a5f758c757200fffeb0fdc9206462f1f89e2444 | [
"MIT"
] | 2 | 2019-08-25T19:24:12.000Z | 2019-09-05T12:16:10.000Z | sciquence/postprocessing/binarizer.py | krzjoa/sciquence | 6a5f758c757200fffeb0fdc9206462f1f89e2444 | [
"MIT"
] | 2 | 2018-02-28T09:47:53.000Z | 2019-08-25T19:24:16.000Z | # -*- coding: utf-8 -*-
# Krzysztof Joachimiak 2018
# sciquence: Time series & sequences in Pythonn
#
# Binarizers
# Author: Krzysztof Joachimiak
#
# License: MIT
import sys
sys.path.append("..")
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
import copy
#from sciquence.utils.docstring import inherit_docstring
#@inherit_docstring
class ClasswiseBinarizer(BaseEstimator, TransformerMixin):
'''
Performing binarization classwise.
It may be used for binarize independently multiple class in the tagging tasks.
Parameters
----------
thresholds: list of float or numpy.ndarray
Binarization thresholds for all the classes
'''
def __init__(self, thresholds):
# TODO: axis?
self.thresholds=thresholds
def fit(self, X, y=None):
'''Does nothing'''
return self
def transform(self, X, y=None, copy=False):
'''
Perform classwise binarization, i.e. every column has
own specific binarization thresholds.
Parameters
----------
X: numpy.ndarray
Probabilities vector
y: None
Nothing, argument for API compatibility
copy: bool
Copy or make transformation inplace
Returns
-------
binarized_X: numpy.ndarray
Binarized output
Examples
---------
>>> import numpy as np
>>> X = np.array(
>>> [[ 0.04344385 0.24317802 0.81423947],
>>> [ 0.30503777 0.08385118 0.48402043],
>>> [ 0.38695257 0.64501778 0.19023201],
>>> [ 0.49452506 0.35440145 0.74149338],
>>> [ 0.25147325 0.14294654 0.6648142 ],
>>> [ 0.99852846 0.75026559 0.43106003],
>>> [ 0.33369685 0.41158767 0.86865335],
>>> [ 0.07741532 0.90428353 0.87152301],
>>> [ 0.79609158 0.47617837 0.1890651 ],
>>> [ 0.14287567 0.52800364 0.10957203]]
>>> )
>>> X_binarized = ClasswiseBinarizer(thresholds=[.5, .4, .3]).transform(X)
>>> print X_binarized
>>> [[ 0. 0. 1.],
>>> [ 0. 0. 1.],
>>> [ 0. 1. 0.],
>>> [ 0. 0. 1.],
>>> [ 0. 0. 1.],
>>> [ 1. 1. 1.],
>>> [ 0. 1. 1.],
>>> [ 0. 1. 1.],
>>> [ 1. 1. 0.],
>>> [ 0. 1. 0.]]
'''
return (X >= self.thresholds).astype(float)
def binarize_classwise(X, thresholds):
'''
Binarization performed classwise.
Parameters
----------
X: numpy.ndarray
Probabilities vector
thresholds: list of float or numpy.ndarray
Binarization thresholds for all the classes
Examples
--------
>>> import numpy as np
>>> X = np.array(
>>> [[ 0.04344385 0.24317802 0.81423947],
>>> [ 0.30503777 0.08385118 0.48402043],
>>> [ 0.38695257 0.64501778 0.19023201],
>>> [ 0.49452506 0.35440145 0.74149338],
>>> [ 0.25147325 0.14294654 0.6648142 ],
>>> [ 0.99852846 0.75026559 0.43106003],
>>> [ 0.33369685 0.41158767 0.86865335],
>>> [ 0.07741532 0.90428353 0.87152301],
>>> [ 0.79609158 0.47617837 0.1890651 ],
>>> [ 0.14287567 0.52800364 0.10957203]]
>>> )
>>> X_binarized = ClasswiseBinarizer(thresholds=[.5, .4, .3]).transform(X)
>>> print X_binarized
>>> [[ 0. 0. 1.],
>>> [ 0. 0. 1.],
>>> [ 0. 1. 0.],
>>> [ 0. 0. 1.],
>>> [ 0. 0. 1.],
>>> [ 1. 1. 1.],
>>> [ 0. 1. 1.],
>>> [ 0. 1. 1.],
>>> [ 1. 1. 0.],
>>> [ 0. 1. 0.]]
'''
return (X >= thresholds).astype(float)
## TODO: ClasswiseMeanBinarizer
if __name__== '__main__':
# Dummy data
X = np.random.rand(10, 3)
print X
# Binarizing
bX = ClasswiseBinarizer(thresholds=[.5, .4, .3]).transform(X)
print bX
| 25.932886 | 82 | 0.528727 | 428 | 3,864 | 4.726636 | 0.331776 | 0.015818 | 0.014829 | 0.015818 | 0.533861 | 0.533861 | 0.492338 | 0.492338 | 0.4696 | 0.4696 | 0 | 0.228561 | 0.305901 | 3,864 | 148 | 83 | 26.108108 | 0.525727 | 0.072981 | 0 | 0 | 0 | 0 | 0.01368 | 0 | 0 | 0 | 0 | 0.013514 | 0 | 0 | null | null | 0 | 0.210526 | null | null | 0.105263 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
01713c38bfffe6e5ab50055e74c5915aed6e76f7 | 528 | py | Python | test/view_helpers/Test_Google_Charts_Js.py | clowdi/OSBot-browser | 615dea146f0ab2d2badcfa552efb9b6045d3ca69 | [
"Apache-2.0"
] | null | null | null | test/view_helpers/Test_Google_Charts_Js.py | clowdi/OSBot-browser | 615dea146f0ab2d2badcfa552efb9b6045d3ca69 | [
"Apache-2.0"
] | null | null | null | test/view_helpers/Test_Google_Charts_Js.py | clowdi/OSBot-browser | 615dea146f0ab2d2badcfa552efb9b6045d3ca69 | [
"Apache-2.0"
] | 1 | 2019-11-08T10:36:45.000Z | 2019-11-08T10:36:45.000Z | from unittest import TestCase
from osbot_browser.view_helpers.Google_Charts_Js import Google_Charts_Js
class Test_Google_Charts_Js(TestCase):
def setUp(self):
self.png_data = None
self.google_charts = Google_Charts_Js()
# def tearDown(self):
# if self.png_data:
# Browser_Lamdba_Helper().save_png_data(self.png_data)
def test_load_page(self):
self.google_charts.load_page(True)
def test_create_data_table(self):
self.google_charts.create_data_table()
| 25.142857 | 72 | 0.714015 | 74 | 528 | 4.702703 | 0.391892 | 0.241379 | 0.16092 | 0.114943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.206439 | 528 | 20 | 73 | 26.4 | 0.830549 | 0.193182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3 | false | 0 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
0174e2b71c273005b9e08abca4d2536f96a4e7d9 | 2,240 | py | Python | recordwhat/records/scanparm.py | mrakitin/recordwhat | c68b8fca69836bdba0075726e829325f2c8918a8 | [
"BSD-3-Clause"
] | 1 | 2016-06-08T15:14:15.000Z | 2016-06-08T15:14:15.000Z | recordwhat/records/scanparm.py | mrakitin/recordwhat | c68b8fca69836bdba0075726e829325f2c8918a8 | [
"BSD-3-Clause"
] | 12 | 2016-02-11T15:01:05.000Z | 2019-09-23T17:28:32.000Z | recordwhat/records/scanparm.py | mrakitin/recordwhat | c68b8fca69836bdba0075726e829325f2c8918a8 | [
"BSD-3-Clause"
] | 4 | 2016-06-08T15:03:07.000Z | 2019-09-23T17:05:38.000Z | from ophyd import (EpicsSignal, EpicsSignalRO)
from .. import (RecordBase, _register_record_type,
FieldComponent as Cpt)
@_register_record_type('scanparm')
class ScanparmRecord(RecordBase):
alarm_status = Cpt(EpicsSignalRO, '.STAT')
code_version = Cpt(EpicsSignalRO, '.VERS')
last_stepsize = Cpt(EpicsSignalRO, '.LSTP')
maxpts = Cpt(EpicsSignalRO, '.MP')
scanactive = Cpt(EpicsSignalRO, '.ACT')
stepsize = Cpt(EpicsSignalRO, '.STEP')
# - common
after_outlink = Cpt(EpicsSignalRO, '.OAFT$', string=True)
acquire_time_outlink = Cpt(EpicsSignalRO, '.OAQT$', string=True)
ar_outlink = Cpt(EpicsSignalRO, '.OAR$', string=True)
after = Cpt(EpicsSignal, '.AFT')
d1pv_outlink = Cpt(EpicsSignalRO, '.ODPV$', string=True)
detpvname = Cpt(EpicsSignal, '.DPV$', string=True)
ep_outlink = Cpt(EpicsSignalRO, '.OEP$', string=True)
go_outlink = Cpt(EpicsSignalRO, '.OGO$', string=True)
inlink = Cpt(EpicsSignalRO, '.IACT$', string=True)
load_outlink = Cpt(EpicsSignalRO, '.OLOAD$', string=True)
mp_inlink = Cpt(EpicsSignalRO, '.IMP$', string=True)
np_outlink = Cpt(EpicsSignalRO, '.ONP$', string=True)
p1pv_outlink = Cpt(EpicsSignalRO, '.OPPV$', string=True)
pre_write_outlink = Cpt(EpicsSignalRO, '.OPRE$', string=True)
positionerpvname = Cpt(EpicsSignal, '.PPV$', string=True)
r1pv_outlink = Cpt(EpicsSignalRO, '.ORPV$', string=True)
readbackpvname = Cpt(EpicsSignal, '.RPV$', string=True)
sc_outlink = Cpt(EpicsSignalRO, '.OSC$', string=True)
sm_outlink = Cpt(EpicsSignalRO, '.OSM$', string=True)
sp_outlink = Cpt(EpicsSignalRO, '.OSP$', string=True)
stepmode = Cpt(EpicsSignal, '.SM')
t1pv_outlink = Cpt(EpicsSignalRO, '.OTPV$', string=True)
trigpvname = Cpt(EpicsSignal, '.TPV$', string=True)
absrel = Cpt(EpicsSignal, '.AR')
# - display
acquire_time = Cpt(EpicsSignal, '.AQT')
display_precision = Cpt(EpicsSignal, '.PREC')
endpos = Cpt(EpicsSignal, '.EP')
go = Cpt(EpicsSignal, '.GO')
load = Cpt(EpicsSignal, '.LOAD')
pre_write_command = Cpt(EpicsSignal, '.PRE')
startcmd = Cpt(EpicsSignal, '.SC')
startpos = Cpt(EpicsSignal, '.SP')
npts = Cpt(EpicsSignal, '.NP')
| 43.076923 | 68 | 0.669196 | 246 | 2,240 | 5.96748 | 0.357724 | 0.250681 | 0.235014 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002155 | 0.171429 | 2,240 | 51 | 69 | 43.921569 | 0.788793 | 0.008036 | 0 | 0 | 0 | 0 | 0.086976 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.954545 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
018e28998be47fdec211cf54b655847fb43b866d | 600 | py | Python | src/politics.py | getcityhub/politics | 410ac79071c77bd52a8438bb9d9923ee096917b2 | [
"MIT"
] | null | null | null | src/politics.py | getcityhub/politics | 410ac79071c77bd52a8438bb9d9923ee096917b2 | [
"MIT"
] | null | null | null | src/politics.py | getcityhub/politics | 410ac79071c77bd52a8438bb9d9923ee096917b2 | [
"MIT"
] | null | null | null | from consts import *
from credentials import get_credential
from politicians import get_politicians
import boto3
import mysql.connector
import sys
s3 = boto3.client(
"s3",
aws_access_key_id = get_credential("AWS_ACCESS_KEY_ID"),
aws_secret_access_key = get_credential("AWS_SECRET_ACCESS_KEY")
)
api_key = get_credential("GOOGLE_API_KEY")
conn = mysql.connector.connect(user='root', password='cityhub', host='localhost', database='cityhub')
for zipcode in NYC_ZIPCODES:
get_politicians(api_key, conn, s3, zipcode)
print "Retrieving politicians from %d" % zipcode
conn.close()
| 26.086957 | 101 | 0.771667 | 83 | 600 | 5.301205 | 0.457831 | 0.118182 | 0.054545 | 0.063636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009597 | 0.131667 | 600 | 22 | 102 | 27.272727 | 0.834933 | 0 | 0 | 0 | 0 | 0 | 0.185 | 0.035 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.058824 | 0.352941 | null | null | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 3 |
0192ce739b66113b88f3edecf2a02fd15abeb5f1 | 243 | py | Python | Exercicios/ex031.py | mauriciozago/CursoPython3 | cbcff9ebfd4d5f5e3a32a369dac8521c6758bfe5 | [
"MIT"
] | null | null | null | Exercicios/ex031.py | mauriciozago/CursoPython3 | cbcff9ebfd4d5f5e3a32a369dac8521c6758bfe5 | [
"MIT"
] | null | null | null | Exercicios/ex031.py | mauriciozago/CursoPython3 | cbcff9ebfd4d5f5e3a32a369dac8521c6758bfe5 | [
"MIT"
] | null | null | null | distancia = float(input('Entre com a distancia da sua viagem, em Km: '))
if distancia > 200:
print('O valor da sua passagem é R${:.2f}!'.format(distancia*0.45))
else:
print('O valor da sua passagem é R${:.2f}!'.format(distancia*0.5))
| 34.714286 | 72 | 0.662551 | 42 | 243 | 3.833333 | 0.595238 | 0.093168 | 0.136646 | 0.161491 | 0.546584 | 0.546584 | 0.546584 | 0.546584 | 0.546584 | 0.546584 | 0 | 0.049261 | 0.164609 | 243 | 6 | 73 | 40.5 | 0.743842 | 0 | 0 | 0 | 0 | 0 | 0.469136 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.4 | 0 | 0 | 0 | 0.4 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
019575ece53612bbf7ec56943861d9ed23ddd82a | 131 | py | Python | gym_ds3/__init__.py | anonymous1958342/DS3Gym | 71fbff5ea92ae9349ad440e2c25497d1d363e97b | [
"MIT"
] | 1 | 2022-01-12T14:44:51.000Z | 2022-01-12T14:44:51.000Z | gym_ds3/__init__.py | anonymous1958342/DS3Gym | 71fbff5ea92ae9349ad440e2c25497d1d363e97b | [
"MIT"
] | null | null | null | gym_ds3/__init__.py | anonymous1958342/DS3Gym | 71fbff5ea92ae9349ad440e2c25497d1d363e97b | [
"MIT"
] | null | null | null | from gym.envs.registration import register
register(
id='Ds3gym-v0',
entry_point='gym_ds3.envs.core.ds3_env:DS3GymEnv',
)
| 18.714286 | 54 | 0.740458 | 19 | 131 | 4.947368 | 0.789474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04386 | 0.129771 | 131 | 6 | 55 | 21.833333 | 0.780702 | 0 | 0 | 0 | 0 | 0 | 0.335878 | 0.267176 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6d68b154dcc307ce898a121f75ca95110c7fde4f | 167 | py | Python | django webapp/sih/forms.py | ishan-chaudhary/anomalousAIS | 7fb709edb754031c750030e394c659919d527ce0 | [
"MIT"
] | 5 | 2020-05-26T02:59:39.000Z | 2021-09-06T18:50:52.000Z | django webapp/sih/forms.py | Team-Storm/anomalousAIS | 7fb709edb754031c750030e394c659919d527ce0 | [
"MIT"
] | null | null | null | django webapp/sih/forms.py | Team-Storm/anomalousAIS | 7fb709edb754031c750030e394c659919d527ce0 | [
"MIT"
] | 5 | 2018-03-26T14:57:45.000Z | 2021-12-14T08:49:33.000Z | #-*- coding: utf-8 -*-
from django import forms
class DataForm(forms.Form):
name = forms.CharField(max_length = 100)
file = forms.FileField(required=False) | 27.833333 | 44 | 0.688623 | 22 | 167 | 5.181818 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028986 | 0.173653 | 167 | 6 | 45 | 27.833333 | 0.797101 | 0.125749 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
6d75c544791780af86b39806c0d1c840de08ae1f | 23 | py | Python | tests/utils/__init__.py | mh393/cb-threatconnect-connector | acda2a1dbdcfa4b9ff086b2d0c2da46b90ec4e2f | [
"MIT"
] | 22 | 2015-04-03T02:21:05.000Z | 2021-07-23T03:57:31.000Z | tests/__init__.py | carbonblack/cb-fireeye-connector | f01519e19ee4591042d25bf1fdeb33a8e81db086 | [
"MIT"
] | 17 | 2016-03-02T21:09:23.000Z | 2020-04-03T00:01:07.000Z | tests/__init__.py | carbonblack/cb-fireeye-connector | f01519e19ee4591042d25bf1fdeb33a8e81db086 | [
"MIT"
] | 12 | 2015-04-09T17:37:14.000Z | 2021-07-23T03:57:42.000Z | __author__ = 'jgarman'
| 11.5 | 22 | 0.73913 | 2 | 23 | 6.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 23 | 1 | 23 | 23 | 0.65 | 0 | 0 | 0 | 0 | 0 | 0.304348 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6d798bf49bc8058940d3180503b6d79ab91ec2a2 | 898 | py | Python | yo_fluq/_queries/query_class.py | okulovsky/yo_ds | 9e1fa2e7a1b9746c3982afc152c024169fec45ca | [
"MIT"
] | 16 | 2019-09-26T09:05:42.000Z | 2021-02-04T01:39:09.000Z | yo_fluq/_queries/query_class.py | okulovsky/yo_ds | 9e1fa2e7a1b9746c3982afc152c024169fec45ca | [
"MIT"
] | 2 | 2019-10-23T19:01:23.000Z | 2020-06-11T09:08:45.000Z | yo_fluq/_queries/query_class.py | okulovsky/yo_ds | 9e1fa2e7a1b9746c3982afc152c024169fec45ca | [
"MIT"
] | 2 | 2019-09-26T09:05:50.000Z | 2019-10-23T18:46:11.000Z | from .queryable import Queryable
from .._push_queries import PushQuery
from typing import *
from collections import Sized
from .._common import *
from .helpers import loop_maker
class QueryClass:
def en(self, en: Iterable) -> Queryable:
length = None
if isinstance(en,Sized):
length = len(en)
return FlupFactory.QueryableFactory(en,length)
def args(self, *args) -> Queryable:
return FlupFactory.QueryableFactory(args,len(args))
def dict(self, dictionary: Dict) -> Queryable:
return FlupFactory.QueryableFactory(dictionary.items(),len(dictionary)).select(lambda z: KeyValuePair(z[0],z[1]))
def push(self) -> PushQuery:
return PushQuery()
def loop(self, begin: Any, delta: Any, end: Any = None, endtype=LoopEndType.NotEqual):
lp = loop_maker(begin,delta,end,endtype)
return Queryable(lp.make())
| 30.965517 | 121 | 0.682628 | 108 | 898 | 5.62963 | 0.425926 | 0.083882 | 0.162829 | 0.138158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002809 | 0.207127 | 898 | 28 | 122 | 32.071429 | 0.851124 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.238095 | false | 0 | 0.285714 | 0.142857 | 0.809524 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
6d80e55d9b60e64f2eb86fd92d692b5c76754819 | 199 | py | Python | Python/Dot and Cross/solution.py | arpitran/HackerRank_solutions | a3a77c858edd3955ea38530916db9051b1aa93f9 | [
"MIT"
] | null | null | null | Python/Dot and Cross/solution.py | arpitran/HackerRank_solutions | a3a77c858edd3955ea38530916db9051b1aa93f9 | [
"MIT"
] | null | null | null | Python/Dot and Cross/solution.py | arpitran/HackerRank_solutions | a3a77c858edd3955ea38530916db9051b1aa93f9 | [
"MIT"
] | null | null | null | import numpy as np
a = int(input())
arr1 = np.array([list(map(int,input().split())) for _ in range(a)])
arr2 = np.array([list(map(int,input().split())) for _ in range(a)])
print(np.dot(arr1,arr2)) | 24.875 | 67 | 0.643216 | 36 | 199 | 3.5 | 0.5 | 0.190476 | 0.174603 | 0.222222 | 0.603175 | 0.603175 | 0.603175 | 0.603175 | 0.603175 | 0.603175 | 0 | 0.022857 | 0.120603 | 199 | 8 | 68 | 24.875 | 0.697143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6db2ae39f97539acefa99a6f4b622fc2b63243d8 | 160 | py | Python | curso-em-video/aula_13/ex51.py | talysonxx/python | 520b108731e28c7dc1fca3523b925be506fd8340 | [
"MIT"
] | null | null | null | curso-em-video/aula_13/ex51.py | talysonxx/python | 520b108731e28c7dc1fca3523b925be506fd8340 | [
"MIT"
] | null | null | null | curso-em-video/aula_13/ex51.py | talysonxx/python | 520b108731e28c7dc1fca3523b925be506fd8340 | [
"MIT"
] | null | null | null | a1 = int(input('Digite o primeiro termo: '))
razão = int(input('Digite a razão: '))
for c in range(1, 11):
print(f'a{c} = {a1}', end=' | ')
a1 += razão
| 26.666667 | 44 | 0.55625 | 27 | 160 | 3.296296 | 0.666667 | 0.179775 | 0.314607 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.048 | 0.21875 | 160 | 5 | 45 | 32 | 0.664 | 0 | 0 | 0 | 0 | 0 | 0.34375 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.2 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6dc6e4780c9d85a698de998dd63c250567e54764 | 71 | py | Python | lets-warm-up/scripts/solve.py | L1ghtDream/picoctf | fa1832497b01c92c9acc26af9314c61b1d4f4d67 | [
"MIT"
] | null | null | null | lets-warm-up/scripts/solve.py | L1ghtDream/picoctf | fa1832497b01c92c9acc26af9314c61b1d4f4d67 | [
"MIT"
] | null | null | null | lets-warm-up/scripts/solve.py | L1ghtDream/picoctf | fa1832497b01c92c9acc26af9314c61b1d4f4d67 | [
"MIT"
] | null | null | null | code = input("> ")
print(bytearray.fromhex(code).decode())
print("\n") | 17.75 | 39 | 0.647887 | 9 | 71 | 5.111111 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.084507 | 71 | 4 | 40 | 17.75 | 0.707692 | 0 | 0 | 0 | 0 | 0 | 0.055556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
6ddbacceb3dd559ea238c461873a0fb566164deb | 12,444 | py | Python | data_structures/trees/tests/test_binary_search_tree.py | vinta/fuck-coding-interviews | 915ff55963430e81134a35f65f511e5684c52f11 | [
"MIT"
] | 590 | 2020-06-17T08:26:47.000Z | 2022-03-30T18:47:32.000Z | data_structures/trees/tests/test_binary_search_tree.py | parvathirajan/fuck-coding-interviews | 915ff55963430e81134a35f65f511e5684c52f11 | [
"MIT"
] | 12 | 2020-07-14T09:24:32.000Z | 2020-11-02T03:43:47.000Z | data_structures/trees/tests/test_binary_search_tree.py | parvathirajan/fuck-coding-interviews | 915ff55963430e81134a35f65f511e5684c52f11 | [
"MIT"
] | 75 | 2020-07-29T06:50:13.000Z | 2022-03-13T16:14:57.000Z | # coding: utf-8
import unittest
import random
import binarytree
import pythonds3
from data_structures.trees.binary_search_tree import BinarySearchTree
from data_structures.trees.binary_search_tree import TreeNode
class TreeNodeTest(unittest.TestCase):
def setUp(self):
self.node = TreeNode(2)
def test__repr__(self):
self.assertTrue(repr(self.node))
def test__str__(self):
self.assertTrue(str(self.node))
def test__eq__(self):
other_node = TreeNode(2)
self.assertEqual(self.node, other_node)
node_a = TreeNode(2, left=TreeNode(1), right=TreeNode(3))
node_b = TreeNode(2, left=TreeNode(1), right=TreeNode(3))
self.assertEqual(node_a, node_b)
other_node = TreeNode(22)
self.assertNotEqual(self.node, other_node)
class BinarySearchTreeTest(unittest.TestCase):
def setUp(self):
self.empty_bst = BinarySearchTree()
self.one_node_bst = BinarySearchTree()
self.one_node_bst.insert(1)
# ______8
# / \
# 3__ 10___
# / \ \
# 1 6 _14
# / \ /
# 4 7 13
self.insert_items = [8, 3, 10, 1, 6, 14, 4, 7, 13]
self.bst = BinarySearchTree()
for i in self.insert_items:
self.bst.insert(i)
array = binarytree.bst(is_perfect=True).values
self.perfect_bst = BinarySearchTree.from_array_representation(array)
def test__eq__(self):
tree_1 = BinarySearchTree()
for i in self.insert_items:
tree_1.insert(i)
self.assertEqual(self.bst, tree_1)
tree_2 = BinarySearchTree()
for i in [1, 2, 3]:
tree_2.insert(i)
self.assertNotEqual(self.bst, tree_2)
self.assertNotEqual(self.bst, self.empty_bst)
def test__len__(self):
self.assertEqual(len(self.empty_bst), 0)
self.assertEqual(len(self.bst), len(self.insert_items))
def test__iter__(self):
items = list(self.bst)
expected = [8, 3, 10, 1, 6, 14, 4, 7, 13]
self.assertEqual(items, expected)
def test__contains__(self):
self.assertIn(random.choice(self.insert_items), self.bst)
self.assertNotIn(100, self.bst)
def test_is_valid(self):
self.assertEqual(self.empty_bst.is_valid(), True)
self.assertEqual(self.one_node_bst.is_valid(), True)
self.assertEqual(self.bst.is_valid(), True)
def test_is_full(self):
self.assertEqual(self.empty_bst.is_full(), True)
self.assertEqual(self.one_node_bst.is_full(), True)
self.assertEqual(self.bst.is_full(), False)
self.assertEqual(self.perfect_bst.is_full(), True)
def test_is_complate(self):
self.assertEqual(self.empty_bst.is_complate(), True)
self.assertEqual(self.one_node_bst.is_complate(), True)
self.assertEqual(self.bst.is_complate(), False)
self.assertEqual(self.perfect_bst.is_complate(), True)
def test_is_balanced(self):
self.assertEqual(self.empty_bst.is_balanced(), True)
self.assertEqual(self.one_node_bst.is_balanced(), True)
self.assertEqual(self.bst.is_balanced(), False)
self.assertEqual(self.perfect_bst.is_balanced(), True)
def test_is_perfect(self):
self.assertEqual(self.empty_bst.is_perfect(), True)
self.assertEqual(self.one_node_bst.is_perfect(), True)
self.assertEqual(self.bst.is_perfect(), False)
self.assertEqual(self.perfect_bst.is_perfect(), True)
def test_is_root(self):
self.assertEqual(self.bst.is_root(self.bst.root), True)
self.assertEqual(self.bst.is_root(self.bst.root.left), False)
self.assertEqual(self.bst.is_root(self.bst.root.right), False)
def test_children(self):
self.assertEqual(list(self.bst.children(self.bst.root)), [self.bst.root.left, self.bst.root.right])
self.assertEqual(list(self.bst.children(self.bst.root.left)), [self.bst.root.left.left, self.bst.root.left.right])
self.assertEqual(list(self.bst.children(self.bst.root.right)), [self.bst.root.right.right, ])
self.assertEqual(list(self.bst.children(self.bst.root.left.left)), [])
def test_num_children(self):
self.assertEqual(self.bst.num_children(self.bst.root), 2)
self.assertEqual(self.bst.num_children(self.bst.root.left), 2)
self.assertEqual(self.bst.num_children(self.bst.root.right), 1)
self.assertEqual(self.bst.num_children(self.bst.root.left.left), 0)
def test_is_leaf(self):
self.assertEqual(self.bst.is_leaf(self.bst.root.left.left), True)
self.assertEqual(self.bst.is_leaf(self.bst.root.left.right.left), True)
self.assertEqual(self.bst.is_leaf(self.bst.root.left.right.right), True)
self.assertEqual(self.bst.is_leaf(self.bst.root.right.right.left), True)
self.assertEqual(self.bst.is_leaf(self.bst.root), False)
self.assertEqual(self.bst.is_leaf(self.bst.root.left), False)
self.assertEqual(self.bst.is_leaf(self.bst.root.right), False)
def test_is_external(self):
self.assertEqual(self.bst.is_external(self.bst.root.left.left), True)
self.assertEqual(self.bst.is_external(self.bst.root.left.right.left), True)
self.assertEqual(self.bst.is_external(self.bst.root.left.right.right), True)
self.assertEqual(self.bst.is_external(self.bst.root.right.right.left), True)
self.assertEqual(self.bst.is_external(self.bst.root), False)
self.assertEqual(self.bst.is_external(self.bst.root.left), False)
self.assertEqual(self.bst.is_external(self.bst.root.right), False)
def test_is_internal(self):
self.assertEqual(self.bst.is_internal(self.bst.root.left.left), False)
self.assertEqual(self.bst.is_internal(self.bst.root.left.right.left), False)
self.assertEqual(self.bst.is_internal(self.bst.root.left.right.right), False)
self.assertEqual(self.bst.is_internal(self.bst.root.right.right.left), False)
self.assertEqual(self.bst.is_internal(self.bst.root), True)
self.assertEqual(self.bst.is_internal(self.bst.root.left), True)
self.assertEqual(self.bst.is_internal(self.bst.root.right), True)
def test_height(self):
self.assertEqual(self.empty_bst.height(), -1)
self.assertEqual(self.one_node_bst.height(), 0)
self.assertEqual(self.bst.height(), 3)
self.assertEqual(self.bst.height(self.bst.root.left.left), 0)
self.assertEqual(self.bst.height(self.bst.root.left.right), 1)
self.assertEqual(self.bst.height(self.bst.root.right), 2)
self.bst.insert(15)
self.bst.insert(30)
self.assertEqual(self.bst.height(), 4)
def test_depth(self):
self.assertEqual(self.bst.depth(self.bst.root), 0)
self.assertEqual(self.bst.depth(self.bst.root.right), 1)
self.assertEqual(self.bst.depth(self.bst.root.right.right), 2)
self.assertEqual(self.bst.depth(self.bst.root.left.right.left), 3)
self.bst.insert(15)
self.bst.insert(30)
self.assertEqual(self.bst.depth(self.bst.root.right.right.right.right), 4)
def test_level(self):
self.assertEqual(self.bst.level(self.bst.root), 1)
self.assertEqual(self.bst.level(self.bst.root.right), 2)
self.assertEqual(self.bst.level(self.bst.root.right.right), 3)
self.assertEqual(self.bst.level(self.bst.root.left.right.left), 4)
self.bst.insert(15)
self.bst.insert(30)
self.assertEqual(self.bst.level(self.bst.root.right.right.right.right), 5)
def test_num_edges(self):
self.assertEqual(self.empty_bst.num_edges(), 0)
self.assertEqual(self.bst.num_edges(), len(self.bst) - 1)
def test_insert(self):
with self.assertRaises(ValueError):
self.bst.insert(random.choice(self.insert_items))
def test_search(self):
self.assertEqual(self.bst.search(8), self.bst.root)
self.assertEqual(self.bst.search(1), self.bst.root.left.left)
self.assertEqual(self.bst.search(13), self.bst.root.right.right.left)
self.assertEqual(self.bst.search(100), None)
def test_get_min_node(self):
self.assertEqual(self.bst.get_min_node().value, min(self.insert_items))
def test_get_max_node(self):
self.assertEqual(self.bst.get_max_node().value, max(self.insert_items))
def test_get_k_th_value(self):
sorted_values = sorted(self.insert_items)
for k in range(1, len(self.insert_items) + 1):
with self.subTest(k=k):
self.assertEqual(self.bst.get_k_th_smallest(k), sorted_values[k - 1])
def test_delete(self):
with self.assertRaises(ValueError):
self.bst.delete(100)
bst2 = pythonds3.BinarySearchTree()
for i in self.insert_items:
bst2.put(key=i, value=i)
# ______8
# / \
# 3__ 10___
# \ \
# 6 _14
# / \ /
# 4 7 13
self.bst.delete(1)
bst2.delete(1)
self.assertEqual(len(self.bst), len(bst2))
self.assertEqual(list(self.bst.traverse()), list(bst2))
self.assertEqual(self.bst.is_valid(), True)
# ______10___
# / \
# 3__ _14
# \ /
# 6 13
# / \
# 4 7
self.bst.delete(8)
bst2.delete(8)
self.assertEqual(len(self.bst), len(bst2))
self.assertEqual(list(self.bst.traverse()), list(bst2))
self.assertEqual(self.bst.is_valid(), True)
# ____10___
# / \
# 3__ _14
# \ /
# 7 13
# /
# 4
self.bst.delete(6)
bst2.delete(6)
self.assertEqual(len(self.bst), len(bst2))
self.assertEqual(list(self.bst.traverse()), list(bst2))
self.assertEqual(self.bst.is_valid(), True)
self.bst.delete(10)
self.bst.delete(4)
self.bst.delete(7)
self.bst.delete(14)
self.bst.delete(3)
self.bst.delete(13)
self.assertEqual(len(self.bst), 0)
self.assertFalse(self.bst)
self.assertEqual(self.bst.is_valid(), True)
def test_inorder_traverse(self):
items = list(self.bst.inorder_traverse())
expected = [1, 3, 4, 6, 7, 8, 10, 13, 14]
self.assertEqual(items, expected)
items = list(self.bst.inorder_traverse(self.bst.root.right.left))
expected = []
self.assertEqual(items, expected)
def test_preorder_traverse(self):
items = list(self.bst.preorder_traverse())
expected = [8, 3, 1, 6, 4, 7, 10, 14, 13]
self.assertEqual(items, expected)
def test_postorder_traverse(self):
items = list(self.bst.postorder_traverse())
expected = [1, 4, 7, 6, 3, 13, 14, 10, 8]
self.assertEqual(items, expected)
def test_levelorder_traverse(self):
items = list(self.bst.levelorder_traverse())
expected = [8, 3, 10, 1, 6, 14, 4, 7, 13]
self.assertEqual(items, expected)
def test_traverse(self):
with self.assertRaises(ValueError):
self.bst.traverse('NOT EXIST')
def test_invert(self):
self.assertEqual(self.empty_bst.invert(), None)
self.assertEqual(self.bst.is_valid(), True)
self.bst.invert()
items = list(self.bst.levelorder_traverse())
expected = [8, 10, 3, 14, 6, 1, 13, 7, 4]
self.assertEqual(items, expected)
self.assertEqual(self.bst.is_valid(), False)
def test_to_array_representation(self):
array = self.empty_bst.to_array_representation()
self.assertEqual(array, [])
array = self.bst.to_array_representation()
root = binarytree.build(array)
self.assertEqual(array, root.values)
def test_from_array_representation(self):
bst = BinarySearchTree.from_array_representation([])
self.assertEqual(bst, self.empty_bst)
array = binarytree.bst(height=random.randint(0, 9), is_perfect=random.choice([True, False])).values
bst = BinarySearchTree.from_array_representation(array)
self.assertEqual(array, bst.to_array_representation())
if __name__ == '__main__':
unittest.main()
| 38.055046 | 122 | 0.637014 | 1,680 | 12,444 | 4.548214 | 0.073214 | 0.151158 | 0.2039 | 0.18139 | 0.720194 | 0.674519 | 0.556995 | 0.439602 | 0.35676 | 0.29381 | 0 | 0.023946 | 0.228142 | 12,444 | 326 | 123 | 38.171779 | 0.771577 | 0.032064 | 0 | 0.166667 | 0 | 0 | 0.001415 | 0 | 0 | 0 | 0 | 0 | 0.504274 | 1 | 0.162393 | false | 0 | 0.025641 | 0 | 0.196581 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6dde14ee6ed8902107fa73767442f625515a8816 | 157 | py | Python | fahrenheitToCelsius.py | CarvalhoBarberino/Python | 4f06cac2c4e9de52081356b77e7040b916b13ad9 | [
"MIT"
] | null | null | null | fahrenheitToCelsius.py | CarvalhoBarberino/Python | 4f06cac2c4e9de52081356b77e7040b916b13ad9 | [
"MIT"
] | null | null | null | fahrenheitToCelsius.py | CarvalhoBarberino/Python | 4f06cac2c4e9de52081356b77e7040b916b13ad9 | [
"MIT"
] | null | null | null | print("----\n")
f = float(input("digite a temperatura em fahrenheit"))
c = (f - 32) * 5 / 9
print("a temperatura é de ", c, "graus celcius")
print("----\n")
| 26.166667 | 54 | 0.585987 | 25 | 157 | 3.68 | 0.72 | 0.130435 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030534 | 0.165605 | 157 | 5 | 55 | 31.4 | 0.671756 | 0 | 0 | 0.4 | 0 | 0 | 0.496815 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.6 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
6de39b7136616ba2787cf28863899c7801acba28 | 405 | py | Python | backend/src/baserow/api/applications/errors.py | cjh0613/baserow | 62871f5bf53c9d25446976031aacb706c0abe584 | [
"MIT"
] | 839 | 2020-07-20T13:29:34.000Z | 2022-03-31T21:09:16.000Z | backend/src/baserow/api/applications/errors.py | cjh0613/baserow | 62871f5bf53c9d25446976031aacb706c0abe584 | [
"MIT"
] | 28 | 2020-08-07T09:23:58.000Z | 2022-03-01T22:32:40.000Z | backend/src/baserow/api/applications/errors.py | cjh0613/baserow | 62871f5bf53c9d25446976031aacb706c0abe584 | [
"MIT"
] | 79 | 2020-08-04T01:48:01.000Z | 2022-03-27T13:30:54.000Z | from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
ERROR_APPLICATION_DOES_NOT_EXIST = (
"ERROR_APPLICATION_DOES_NOT_EXIST",
HTTP_404_NOT_FOUND,
"The requested application does not exist.",
)
ERROR_APPLICATION_NOT_IN_GROUP = (
"ERROR_APPLICATION_NOT_IN_GROUP",
HTTP_400_BAD_REQUEST,
"The application id {e.application_id} does not belong to the group.",
)
| 28.928571 | 74 | 0.787654 | 59 | 405 | 4.898305 | 0.40678 | 0.221453 | 0.186851 | 0.238754 | 0.453287 | 0.269896 | 0 | 0 | 0 | 0 | 0 | 0.034682 | 0.145679 | 405 | 13 | 75 | 31.153846 | 0.800578 | 0 | 0 | 0 | 0 | 0 | 0.419753 | 0.153086 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6de9c841b9c986d86121c59c9d859f58cdde538f | 1,994 | py | Python | notario/tests/test_exceptions.py | alfredodeza/notario | 036bdc8435778c6f20f059d3789c8eb8242cff92 | [
"MIT"
] | 4 | 2015-08-20T20:14:55.000Z | 2018-06-01T14:39:29.000Z | notario/tests/test_exceptions.py | alfredodeza/notario | 036bdc8435778c6f20f059d3789c8eb8242cff92 | [
"MIT"
] | 9 | 2016-02-04T21:46:12.000Z | 2018-11-14T04:43:10.000Z | notario/tests/test_exceptions.py | alfredodeza/notario | 036bdc8435778c6f20f059d3789c8eb8242cff92 | [
"MIT"
] | 4 | 2015-04-29T20:40:12.000Z | 2018-11-14T04:08:20.000Z | from notario import exceptions
def foo(): return True
class Object(object): pass
class TestInvalid(object):
def test_include_the_key(self):
error = exceptions.Invalid('key', ['foo', 'bar', 'key'])
assert 'key' in error._format_path()
def test_include_the_path_in_str(self):
error = exceptions.Invalid('key', ['path'])
assert 'path' in error.__str__()
def test_include_the_key_in_str(self):
error = exceptions.Invalid('key', ['path'])
assert 'key' in error.__str__()
def test_multiple_keys_in_format_path(self):
error = exceptions.Invalid('schema', ['key', 'subkey', 'bar'])
assert '-> key -> subkey -> bar' in error._format_path()
def test_full_message(self):
error = exceptions.Invalid('3', ['foo', 'bar', 'baz'])
result = error.__str__()
assert "-> foo -> bar -> baz key did not match '3'" == result
def test_full_message_for_callable(self):
error = exceptions.Invalid(foo, ['foo', 'bar', 'baz'])
result = error.__str__()
assert "-> foo -> bar -> baz key did not pass validation against callable: foo" == result
def test_full_message_for_value(self):
error = exceptions.Invalid('3', ['foo', 'bar', 'baz'], pair='value')
result = error.__str__()
assert "-> foo -> bar -> baz did not match '3'" == result
def test_full_message_for_callable_with_value(self):
error = exceptions.Invalid(foo, ['foo', 'bar', 'baz'], pair='value')
result = error.__str__()
assert "-> foo -> bar -> baz did not pass validation against callable: foo" == result
class TestSchemaError(object):
def test_reason_has_no_args(self):
class Foo(object):
def __repr__(self):
return "some reason"
reason = Foo()
reason.args = []
error = exceptions.SchemaError(foo, ['foo'], reason=reason, pair='value')
assert "some reason" == repr(error.reason)
| 33.79661 | 97 | 0.614343 | 247 | 1,994 | 4.676113 | 0.210526 | 0.054545 | 0.131602 | 0.180087 | 0.634632 | 0.542857 | 0.483117 | 0.483117 | 0.320346 | 0.244156 | 0 | 0.002637 | 0.239218 | 1,994 | 58 | 98 | 34.37931 | 0.758734 | 0 | 0 | 0.146341 | 0 | 0 | 0.186058 | 0 | 0 | 0 | 0 | 0 | 0.219512 | 1 | 0.268293 | false | 0.073171 | 0.02439 | 0.04878 | 0.414634 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
6df9234771c2ff87e68154f598ee911c0fdf6f7a | 174 | py | Python | tests/abstract.py | limiear/solarbot | 2596f0b2bddceacf9d04bae134e30cec5c6fa965 | [
"MIT"
] | null | null | null | tests/abstract.py | limiear/solarbot | 2596f0b2bddceacf9d04bae134e30cec5c6fa965 | [
"MIT"
] | 10 | 2015-04-13T04:13:54.000Z | 2022-01-13T00:48:59.000Z | tests/abstract.py | limiear/solarbot | 2596f0b2bddceacf9d04bae134e30cec5c6fa965 | [
"MIT"
] | null | null | null | import unittest
import os
import glob
class TestCase(unittest.TestCase):
def remove(self, filename):
files = glob.glob(filename)
map(os.remove, files)
| 15.818182 | 35 | 0.683908 | 22 | 174 | 5.409091 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.224138 | 174 | 10 | 36 | 17.4 | 0.881481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.428571 | 0 | 0.714286 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
6dfdcfb0c13e8e795ba890ebb114e9b23e3a0467 | 141 | py | Python | src/end_to_end.py | alleriali/medical-relation-extraction-from-pdf | c10f2b57a8c65c7ef9af054241cc715576d56229 | [
"Apache-2.0"
] | null | null | null | src/end_to_end.py | alleriali/medical-relation-extraction-from-pdf | c10f2b57a8c65c7ef9af054241cc715576d56229 | [
"Apache-2.0"
] | null | null | null | src/end_to_end.py | alleriali/medical-relation-extraction-from-pdf | c10f2b57a8c65c7ef9af054241cc715576d56229 | [
"Apache-2.0"
] | 1 | 2021-01-24T18:45:24.000Z | 2021-01-24T18:45:24.000Z |
from BioBERT_NER import ner_lib
result = ner_lib.get_annotated_sents(sent='cancer can not be cancer treated by dienogest.')
print(result)
| 20.142857 | 91 | 0.801418 | 23 | 141 | 4.695652 | 0.782609 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12766 | 141 | 6 | 92 | 23.5 | 0.878049 | 0 | 0 | 0 | 0 | 0 | 0.330935 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
09aa77a6f52030fedd25991d3bfd5808027e1a02 | 350 | py | Python | seekret/apitest/pytest_plugin/newhooks.py | seek-ret/tavernrtl | 707bff027e778f662e62c25601835b8ef8d42938 | [
"MIT"
] | 4 | 2021-04-11T08:39:28.000Z | 2021-04-25T20:39:08.000Z | seekret/apitest/pytest_plugin/newhooks.py | seek-ret/tavernrtl | 707bff027e778f662e62c25601835b8ef8d42938 | [
"MIT"
] | null | null | null | seekret/apitest/pytest_plugin/newhooks.py | seek-ret/tavernrtl | 707bff027e778f662e62c25601835b8ef8d42938 | [
"MIT"
] | 1 | 2021-04-11T12:28:02.000Z | 2021-04-11T12:28:02.000Z | """
This module contains the hooks specifications to hooks added by the Seekret plugin.
"""
from seekret.apitest.context.session import Session
def pytest_seekret_session_initialized(session: Session):
"""
Called when a run profile is loaded.
Can be used to register auth methods that rely on information from the run profile.
"""
| 26.923077 | 87 | 0.751429 | 49 | 350 | 5.306122 | 0.734694 | 0.076923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.185714 | 350 | 12 | 88 | 29.166667 | 0.912281 | 0.582857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0.5 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
09e691cc94c8defca0984f95195b21621e347f05 | 1,352 | py | Python | python/testData/inspections/PyUnresolvedReferencesInspection/importExceptImportError.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | null | null | null | python/testData/inspections/PyUnresolvedReferencesInspection/importExceptImportError.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | 11 | 2017-02-27T22:35:32.000Z | 2021-12-24T08:07:40.000Z | python/testData/inspections/PyUnresolvedReferencesInspection/importExceptImportError.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | 1 | 2020-11-27T10:36:50.000Z | 2020-11-27T10:36:50.000Z | # PY-3639
def f(x):
from <error descr="Unresolved reference 'foo'">foo</error> import <error descr="Unresolved reference 'StringIO'">StringIO</error>
return StringIO(x)
def f(x):
try:
from <error descr="Unresolved reference 'foo'">foo</error> import <warning descr="Module 'StringIO' not found">StringIO</warning>
except Exception:
pass
return x
def f(x):
try:
from foo import <warning descr="'StringIO' in try block with 'except ImportError' should also be defined in except block">StringIO</warning>
except ImportError:
pass
return StringIO(x)
def f(x):
try:
from lib1 import StringIO
except ImportError:
StringIO = lambda x: x
return StringIO(x)
# PY-3675
try:
import foo as bar
except ImportError:
import <warning descr="Module 'bar' not found">bar</warning>
# PY-3678
def f():
try:
from foo import bar #pass
except ImportError:
import <warning descr="Module 'bar' not found">bar</warning> #fail
finally:
pass
# PY-3869
def f(x):
try:
from foo import bar #pass
except ImportError:
def bar(x):
return x
return bar(x)
# PY-3919
def f(x):
try:
from foo import Bar #pass
except ImportError:
class Bar(object):
pass
return Bar()
| 22.533333 | 148 | 0.621302 | 180 | 1,352 | 4.666667 | 0.233333 | 0.033333 | 0.035714 | 0.047619 | 0.486905 | 0.486905 | 0.485714 | 0.460714 | 0.37381 | 0.254762 | 0 | 0.021429 | 0.275148 | 1,352 | 59 | 149 | 22.915254 | 0.835714 | 0.04068 | 0 | 0.695652 | 0 | 0 | 0.18818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.086957 | 0.347826 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 3 |
61cc85475c35aae4dde87294a171615145d3d528 | 249 | py | Python | youtuber/admin.py | shhusband0810/youclear | 711a9c89e138bdbfc07f595565c9c708ed19897b | [
"MIT"
] | null | null | null | youtuber/admin.py | shhusband0810/youclear | 711a9c89e138bdbfc07f595565c9c708ed19897b | [
"MIT"
] | null | null | null | youtuber/admin.py | shhusband0810/youclear | 711a9c89e138bdbfc07f595565c9c708ed19897b | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Youtuber)
admin.site.register(MyYoutuber)
admin.site.register(Video)
admin.site.register(YoutuberList)
admin.site.register(MyYoutuberList)
| 16.6 | 35 | 0.803213 | 32 | 249 | 6.25 | 0.46875 | 0.225 | 0.425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096386 | 249 | 14 | 36 | 17.785714 | 0.888889 | 0.104418 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
61d2d8f0b6801219211edd12653fb92d32184ae2 | 126 | py | Python | Suluoya/__init__.py | Su-luoya/Suluoya | a2b5abd08ca0e9b9b4a762567fd3759abf80363a | [
"MIT"
] | 1 | 2021-11-17T07:46:28.000Z | 2021-11-17T07:46:28.000Z | Suluoya/__init__.py | Su-luoya/Suluoya | a2b5abd08ca0e9b9b4a762567fd3759abf80363a | [
"MIT"
] | null | null | null | Suluoya/__init__.py | Su-luoya/Suluoya | a2b5abd08ca0e9b9b4a762567fd3759abf80363a | [
"MIT"
] | null | null | null | name = "Suluoya"
author = 'Suluoya'
author_email = '1931960436@qq.com'
qq = '1931960436'
__all__ = ['sly']
from .sly import * | 18 | 34 | 0.68254 | 16 | 126 | 5.0625 | 0.6875 | 0.320988 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.186916 | 0.150794 | 126 | 7 | 35 | 18 | 0.570093 | 0 | 0 | 0 | 0 | 0 | 0.346457 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
61e6e6e1a270b5d9793ba8fad823751066e41a21 | 283 | py | Python | optalg/line_search/fixed.py | ShkalikovOleh/OptAlg | 03399eee50203dcba834a4d9ab48751142a6de2b | [
"MIT"
] | null | null | null | optalg/line_search/fixed.py | ShkalikovOleh/OptAlg | 03399eee50203dcba834a4d9ab48751142a6de2b | [
"MIT"
] | 3 | 2021-01-31T09:34:50.000Z | 2021-02-21T09:01:42.000Z | optalg/line_search/fixed.py | ShkalikovOleh/OptAlg | 03399eee50203dcba834a4d9ab48751142a6de2b | [
"MIT"
] | 1 | 2022-03-31T14:02:20.000Z | 2022-03-31T14:02:20.000Z | import numpy as np
from typing import Callable
from .line_searcher import LineSearcher
class FixedStep(LineSearcher):
def __init__(self, step) -> None:
self.__step = step
def optimize(self, f: Callable, xk: np.ndarray, pk: np.ndarray):
return self.__step
| 21.769231 | 68 | 0.70318 | 38 | 283 | 5 | 0.605263 | 0.126316 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.212014 | 283 | 12 | 69 | 23.583333 | 0.852018 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.375 | 0.125 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 3 |
61ec238a840f0c5a646f72b720b57ef607e30190 | 359 | py | Python | optimizer/environment/spark/predictionsparkmodel.py | lgasyou/spark-scheduler-configuration-optimizer | 05c0ea9411db642c7c7e675a6949ffcc6814947a | [
"MIT"
] | 1 | 2019-11-26T08:10:52.000Z | 2019-11-26T08:10:52.000Z | optimizer/environment/spark/predictionsparkmodel.py | lgasyou/spark-scheduler-configuration-optimizer | 05c0ea9411db642c7c7e675a6949ffcc6814947a | [
"MIT"
] | 1 | 2019-12-24T06:40:07.000Z | 2019-12-24T06:40:07.000Z | optimizer/environment/spark/predictionsparkmodel.py | lgasyou/spark-scheduler-configuration-optimizer | 05c0ea9411db642c7c7e675a6949ffcc6814947a | [
"MIT"
] | 2 | 2019-08-28T15:10:10.000Z | 2019-11-26T08:10:36.000Z | import dataclasses
from typing import List
@dataclasses.dataclass
class Stage(object):
name: str
input_ratio: float
block_size: int
@dataclasses.dataclass
class Application(object):
name: str
average_action_process_rates: dict = dataclasses.field(default_factory=dict)
stages: List[Stage] = dataclasses.field(default_factory=list)
| 21.117647 | 80 | 0.766017 | 44 | 359 | 6.090909 | 0.613636 | 0.149254 | 0.186567 | 0.223881 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.155989 | 359 | 16 | 81 | 22.4375 | 0.884488 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.166667 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
61fe4e8346d136b7752a8e1454c2a8abf6ab2d63 | 55 | py | Python | src/firebasil/auth/__init__.py | k2bd/firebased | dccecd9ac1781899792f0b2343ce4891b9116cb2 | [
"MIT"
] | 1 | 2022-03-17T20:52:41.000Z | 2022-03-17T20:52:41.000Z | src/firebasil/auth/__init__.py | k2bd/firebased | dccecd9ac1781899792f0b2343ce4891b9116cb2 | [
"MIT"
] | 15 | 2022-02-16T22:33:21.000Z | 2022-03-18T11:30:47.000Z | src/firebasil/auth/__init__.py | k2bd/firebased | dccecd9ac1781899792f0b2343ce4891b9116cb2 | [
"MIT"
] | null | null | null | from .auth import AuthClient
__all__ = ["AuthClient"]
| 13.75 | 28 | 0.745455 | 6 | 55 | 6.166667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.145455 | 55 | 3 | 29 | 18.333333 | 0.787234 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
11150c5259d3d16eaf27e26db0d2ebee78e4da35 | 565 | py | Python | FITBGenerator/SequenceLabeling/lossfunction.py | shivammehta007/NLPinEnglishLearning | ae869d868e39df9b1787134ba6e964acd385dd2e | [
"Apache-2.0"
] | 1 | 2020-05-27T22:21:33.000Z | 2020-05-27T22:21:33.000Z | FITBGenerator/SequenceLabeling/lossfunction.py | shivammehta007/NLPinEnglishLearning | ae869d868e39df9b1787134ba6e964acd385dd2e | [
"Apache-2.0"
] | null | null | null | FITBGenerator/SequenceLabeling/lossfunction.py | shivammehta007/NLPinEnglishLearning | ae869d868e39df9b1787134ba6e964acd385dd2e | [
"Apache-2.0"
] | null | null | null | """
Custom Loss Function
This loss function is binary cross entropy with logit loss
But it also supports masking of weights
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
class BCEWithLogitLossWithMask(nn.Module):
def __init__(self):
super().__init__()
def forward(self, prediction, y, mask):
"""
@param prediction: Model Prediced values
@param y: Label Values
@param mask: Mask of input lengths
"""
return F.binary_cross_entropy_with_logits(prediction, y, weight=mask[0])
| 24.565217 | 80 | 0.684956 | 75 | 565 | 5 | 0.6 | 0.088 | 0.096 | 0.117333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002304 | 0.231858 | 565 | 22 | 81 | 25.681818 | 0.861751 | 0.387611 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.375 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
113828594bac5cb50d8af4b65671ff95e393e01e | 266 | py | Python | files/Entrada/p26.py | heltonricardo/estudo-python | e82eb8ebc15378175b03d367a6eeea66e8858cff | [
"MIT"
] | null | null | null | files/Entrada/p26.py | heltonricardo/estudo-python | e82eb8ebc15378175b03d367a6eeea66e8858cff | [
"MIT"
] | null | null | null | files/Entrada/p26.py | heltonricardo/estudo-python | e82eb8ebc15378175b03d367a6eeea66e8858cff | [
"MIT"
] | null | null | null | from random import choice
n1 = input('Entre o nome 1: ')
n2 = input('Entre o nome 2: ')
n3 = input('Entre o nome 3: ')
n4 = input('Entre o nome 4: ')
lista = [n1, n2, n3, n4]
escolha = choice(lista)
print()
print('Aluno(a) escolhido(a): {}'.format(escolha))
input()
| 24.181818 | 50 | 0.635338 | 45 | 266 | 3.755556 | 0.511111 | 0.236686 | 0.260355 | 0.35503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054545 | 0.172932 | 266 | 10 | 51 | 26.6 | 0.713636 | 0 | 0 | 0 | 0 | 0 | 0.334586 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.1 | 0.2 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
1162801d7f17d3a41edc33435057e410f807fba6 | 203 | py | Python | bookorbooks/book/api/serializers/book_level_serializers.py | talhakoylu/SummerInternshipBackend | 4ecedf5c97f73e3d32d5a534769e86aac3e4b6d3 | [
"MIT"
] | 1 | 2021-08-10T22:24:17.000Z | 2021-08-10T22:24:17.000Z | bookorbooks/book/api/serializers/book_level_serializers.py | talhakoylu/SummerInternshipBackend | 4ecedf5c97f73e3d32d5a534769e86aac3e4b6d3 | [
"MIT"
] | null | null | null | bookorbooks/book/api/serializers/book_level_serializers.py | talhakoylu/SummerInternshipBackend | 4ecedf5c97f73e3d32d5a534769e86aac3e4b6d3 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from book.models import BookLevel
class BookLevelSerializer(serializers.ModelSerializer):
class Meta:
model = BookLevel
fields = "__all__"
| 16.916667 | 55 | 0.738916 | 20 | 203 | 7.25 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.211823 | 203 | 11 | 56 | 18.454545 | 0.90625 | 0 | 0 | 0 | 0 | 0 | 0.035 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
116985eca39bd912e7d599db22e3c5455baf7727 | 536 | py | Python | map_as_object/map_as_object.py | tom-010/map-as-object | f92322c11d4004b5e03e97984edad9ffecf539e6 | [
"Apache-2.0"
] | null | null | null | map_as_object/map_as_object.py | tom-010/map-as-object | f92322c11d4004b5e03e97984edad9ffecf539e6 | [
"Apache-2.0"
] | null | null | null | map_as_object/map_as_object.py | tom-010/map-as-object | f92322c11d4004b5e03e97984edad9ffecf539e6 | [
"Apache-2.0"
] | null | null | null | class MapAsObject:
def __init__(self, wrapped):
self.wrapped = wrapped
def __getattr__(self, key):
try:
return self.wrapped[key]
except KeyError:
raise AttributeError("%r object has no attribute %r" %
(self.__class__.__name__, key))
def get(self, *args, **kwargs):
return self.wrapped.get(*args, **kwargs)
def __str__(self):
return str(self.wrapped)
def as_object(wrapped_map):
return MapAsObject(wrapped_map)
| 25.52381 | 66 | 0.585821 | 59 | 536 | 4.932203 | 0.440678 | 0.189003 | 0.116838 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.311567 | 536 | 20 | 67 | 26.8 | 0.788618 | 0 | 0 | 0 | 0 | 0 | 0.054104 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.2 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
fec873607c5af7fb5230b6523d222bc452c7fbe3 | 196 | py | Python | project/lib/celery_app.py | yohayg/python-example-project | fc5f30ccf18fab6a3eb968c6de98c2fe44f48f45 | [
"MIT"
] | 1 | 2018-05-23T09:15:37.000Z | 2018-05-23T09:15:37.000Z | project/lib/celery_app.py | yohayg/python-generator | fc5f30ccf18fab6a3eb968c6de98c2fe44f48f45 | [
"MIT"
] | null | null | null | project/lib/celery_app.py | yohayg/python-generator | fc5f30ccf18fab6a3eb968c6de98c2fe44f48f45 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from celery import Celery
app = Celery('test_celery', broker='amqp://localhost:5672', backend='rpc://',
include=['project.lib.celery_tasks'])
| 28 | 77 | 0.709184 | 24 | 196 | 5.5 | 0.708333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023952 | 0.147959 | 196 | 6 | 78 | 32.666667 | 0.766467 | 0 | 0 | 0 | 0 | 0 | 0.316327 | 0.229592 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
fed574e19404258036dfb237286662280d11d5a8 | 42,236 | py | Python | ANBO/main.py | blademoon/DATA_ENGINEER_FINAL_WORK | 775ddc2bb5100bee21e1efe8fcaec7949922fd29 | [
"Apache-2.0"
] | null | null | null | ANBO/main.py | blademoon/DATA_ENGINEER_FINAL_WORK | 775ddc2bb5100bee21e1efe8fcaec7949922fd29 | [
"Apache-2.0"
] | null | null | null | ANBO/main.py | blademoon/DATA_ENGINEER_FINAL_WORK | 775ddc2bb5100bee21e1efe8fcaec7949922fd29 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import logging
import os
import re
import sys
import jaydebeapi
import pandas as pd
# Отладочный режим
# Значение True - включается автокомит после каждой транзакции (autocommit = true), выдается больше отладочной
# информации на экран.
# Значение False - Режим работы в продакшене. Автокоммит после каждой транзакции выключен. На экран работы ничего не
# выдается. Транзакция коммитится только в конце успешной загрузки всех данных.
# ВАЖНЫЙ МОМЕНТ! Сообщения в журнал работы (файл "main.log") пишутся в обоих режимах.
DEBUG = True
# Функция загружает указанный файл в pandas data frame
def file_to_df(directory_full_path, file_template):
# Ищем нужный файл в указанной директории
for filename in os.listdir(directory_full_path):
if DEBUG:
print("Current file in loop: {}".format(filename))
# Если файл соответствует переданному шаблону, то обрабатываем его
if re.match(file_template, filename):
if DEBUG:
print("File detected! {}".format(filename))
# Если это xlsx файл c черным списком паспортов, то загружаем соответсвующим образом
if (filename.lower().endswith(".xlsx")) and ("passport" in filename):
# Считываем файл в dataframe
func_result = pd.read_excel(filename, sheet_name="blacklist", header=0, index_col=None)
# Переименовываем файл
os.rename(filename, ".//archive//" + filename + ".backup")
return func_result
if (filename.lower().endswith(".xlsx")) and ("terminals" in filename):
# Считываем файл в dataframe
func_result = pd.read_excel(filename, sheet_name="terminals", header=0, index_col=None)
# Получаем дату из имени файла
temp_date = (re.search('\d{8}', filename)).group(0)
date_str = temp_date[0:2] + "." + temp_date[2:4] + "." + temp_date[4:8]
# Добавляем к полученному датафрейму серию (колонку) с датой файла
func_result["date"] = date_str
# Переименовываем файл
os.rename(filename, ".//archive//" + filename + ".backup")
return func_result
if (filename.lower().endswith(".txt")):
# Считываем файл в dataframe
func_result = pd.read_csv(filename, sep=';', decimal=',', header=0)
# Переименовываем файл
os.rename(filename, ".//archive//" + filename + ".backup")
return func_result
# Если до текущего момента мы не вернули дата фрейм с первым найденным по шаблону файлом,
# то файлов соответсвующих шаблону не существует. Кинем исключение.
raise Exception("Can't find a data flat file that matches the pattern \"{}\"".format(file_template))
# Функция для подключения к серверу DWH
def connect_to_dwh(username, password, server, port, ojdbc8_jar_file_path):
connection = jaydebeapi.connect('oracle.jdbc.driver.OracleDriver',
'jdbc:oracle:thin:{usr}/{passwd}@{serv}:{port}/deoracle'.format(usr=username,
passwd=password,
serv=server,
port=port),
[username, password],
ojdbc8_jar_file_path)
return connection
# Функция для очистки таблиц стейджинга
def clear_all_tables(stg_table_names, sql_curs) -> None:
# Перебираем все таблицы стейджинга из списка и удаляем из них строки.
for table_name in stg_table_names:
# Формируем запрос.
sql_req = "DELETE FROM {tbl_nm}".format(tbl_nm=table_name)
# Выполняем запрос
sql_curs.execute(sql_req)
return None
# Небольшой обработчик выхода с закрытием соединения с сервером.
def exit_hadler(sql_server_curs, sql_server_connection):
# Закрываем курсор
sql_server_curs.close()
# Если не включен отладочный режим, выполняем откат изменений сделанных в хранилище.
# Сделано специально чтобы при работе в продакшене не повредить информацию уже хранящуюся в хранилище.
if not DEBUG:
# В случае завершения работы с ошибкой - откатываетм все изменения (транзакцию).
sql_server_connection.rollback()
# Закрываем соединение
sql_server_connection.close()
# Выходим из скрипта
sys.exit()
# Функция выполняющая множественную вставку значений из Pandas DataFrame в SQL таблицу.
def load_flat_file_to_stg(user_name, table_name, table_fields, sql_curs, flat_file_dataframe) -> None:
# Расчитаем кол-во значение передаваемых в запрос
n = len(table_fields)
# Соберем шаблон запроса.
sql_req1 = """insert into {usr}.{tbl} ( {fields} ) values ( {val_cnt} ) """.format(usr=user_name,
tbl=table_name,
fields=(', '.join(table_fields)),
val_cnt=("?, " * (n - 1) + "?"))
# Отладка
if DEBUG:
print(sql_req1)
# Выполним запрос.
sql_curs.executemany(sql_req1, flat_file_dataframe.values.tolist())
# Возвращяем None
return None
# Получаем текущий путь
cwd = os.getcwd()
# Получаем имя скрипта, для открытия одноимённого файла журнала
(script_name, ext) = os.path.splitext(os.path.basename(__file__))
try:
logging.basicConfig(filename=(script_name + '.log'), filemode='a', level=logging.DEBUG, encoding='utf-8',
format='%(asctime)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
except Exception as exc:
# Сообщаем об исключении и выходим.
print("Can\'t create or open log file! Abnormal termination of script execution. \n{}".format(exc))
exit()
# Сообщаем об успешном запуске скрипта.
logging.info('The script was launched successfully.')
# Проверим существование и возможность записи в архивную папку где храняться обранные плоские файлы.
ArchiveSubDir = "archive"
ArchiveFullPath = os.path.join(cwd, ArchiveSubDir)
# Если архивная директория не существует, создадим ее. Иначе проверим права доступа.
if not os.path.isdir(ArchiveFullPath):
try:
# Создаём необходимую директорию.
os.mkdir(ArchiveFullPath)
# Сообщаем об успехе, продолжаем.
logging.info("Archive directory \"{}\" created successfully.".format(ArchiveFullPath))
except Exception as exc:
# Сообщаем об исключении и выходим.
logging.error("Can't create archive directory! {} Abnormal termination of script execution.".format(exc))
exit()
else:
if not os.access(ArchiveFullPath, os.W_OK):
# Если прав нет, то сообщаем об ощибке в журнал и выходим.
logging.error("Can't write to archive directory. Abnormal termination of script execution.")
exit()
else:
logging.info("Archive directory \"{}\" is exist and available for writing.".format(ArchiveFullPath))
# Подключимся к хранилищу данных DWH используя следующие параметры соединения.
stg_user_name = "DEMIPT"
password = "gandalfthegrey"
server = "de-oracle.chronosavant.ru"
port = "1521"
path = "/home/demipt/anbo/ojdbc8.jar"
try:
conn = connect_to_dwh(stg_user_name, password, server, port, path)
# Если не включен отладочный режим, то отключаем autocommit
if not DEBUG:
conn.jconn.setAutoCommit(False)
curs = conn.cursor()
# Сообщаем об успешном подключении к серверу.
logging.info("Connection to the server \"{}\" was established successfully.".format(server))
except Exception as exc:
logging.error(
"Can't connect to DWH server. Abnormal termination of script execution. \n Detailed information: {}".format(
exc))
exit()
# --------------------------------------------- 1. Очистка данных из STG -----------------------------------------------
# SCD-2, этап 1. Очиста стейджинговых таблиц.
staging_table_names = ["ANBO_STG_PSSPRT_BLCKLST",
"ANBO_STG_TRANSACTIONS",
"ANBO_STG_TERMINALS",
"ANBO_STG_TERMINALS_DRFT",
"ANBO_STG_BANK_ACCOUNTS",
"ANBO_STG_BANK_CARDS",
"ANBO_STG_BANK_CLIENTS",
"ANBO_STG_BANK_ACCOUNTS_DEL",
"ANBO_STG_BANK_CARDS_DEL",
"ANBO_STG_BANK_CLIENTS_DEL",
"ANBO_STG_TERMINALS_DEL"]
# Выполняем очистку всех стейджинговых таблиц.
try:
clear_all_tables(staging_table_names, curs)
# Сообщаем об успехе, продолжаем.
logging.info("SCD-2.1: All staging tables have been cleared successfully.".format(ArchiveFullPath))
except Exception as exc:
# Сообщаем об исключении и выходим.
logging.error(
"SCD-2.1: Can't clear the staging tables. Abnormal termination of script execution. \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# ---------------------------------------- 2. Захват данных из источника в STG -----------------------------------------
# Загружаем данные из файлов в стейджинг
flat_files_templates = ["passport_blacklist_\d{8}.xlsx", "transactions_\d{8}.txt", "terminals_\d{8}.xlsx"]
# Список словарей содержащих информацию о стейджинг таблицах для загрузки плоских файлов.
staging_tables_descriptions = [{"ANBO_STG_PSSPRT_BLCKLST": ['ENTRY_DT', 'PASSPORT_NUM']},
{"ANBO_STG_TRANSACTIONS": ['TRANSACTION_ID', 'TRANSACTION_DATE', 'AMOUNT', 'CARD_NUM',
'OPER_TYPE', 'OPER_RESULT', 'TERMINAL']},
{"ANBO_STG_TERMINALS_DRFT": ['TERMINAL_ID', 'TERMINAL_TYPE', 'TERMINAL_CITY',
'TERMINAL_ADDRESS', 'UPLOAD_DT']}]
# Перебираем два списка одновременно.
for (file_name_template, current_table_desc) in zip(flat_files_templates, staging_tables_descriptions):
# Загружаем данные из плоского файла в дата фрейм для текущего шаблона.
try:
df = file_to_df(cwd, file_name_template)
# Для дата фрейма с черным списком паспортов, меняем тип колонки на string
if "passport_blacklist_" in file_name_template:
# Преобразуем тип timestamp в строковый тип. Чтобы не получить проблемы при вставке данных
df['date'] = df['date'].astype(str)
logging.info(
"SCD-2.2: The file matching the pattern \"{}\" was read to dataframe successfully.".format(
file_name_template))
except Exception as exc:
# Сообщаем об исключении и выходим.
logging.error(
"SCD-2.2: Can't load data from a flat file that matches the pattern \"{}\" into a data frame \n Detailed information: {}".format(
file_name_template, exc))
exit_hadler(curs, conn)
# Получаем данные о имени стейджинг таблицы и ее полях.
[[table_name, table_fields]] = current_table_desc.items()
# Вставляем данные в соответствующую стейджинг таблицу.
try:
load_flat_file_to_stg("DEMIPT",
table_name,
table_fields,
curs,
df)
logging.info(
"SCD-2.2: The data from the flat file was successfully loaded into the \"{}\" table.".format(table_name))
except Exception as exc:
# Сообщаем об исключении и выходим.
logging.error(
"SCD-2.2: Can't load data into the staging table for a file with the \"{}\" template. \n Detailed information: {}".format(
file_name_template, exc))
exit_hadler(curs, conn)
# Загружаем данные из SQL источников в стейджинг таблицы.
# BANK.ACCOUNTS -> ANBO_STG_BANK_ACCOUNTS
try:
sql_req = """
insert into DEMIPT.ANBO_STG_BANK_ACCOUNTS( ACCOUNT, VALID_TO, CLIENT, CREATE_DT, UPDATE_DT )
select ACCOUNT, VALID_TO, CLIENT, CREATE_DT, UPDATE_DT
from BANK.ACCOUNTS
where COALESCE(UPDATE_DT,CREATE_DT) > (select LAST_UPDATE from DEMIPT.ANBO_META_LOADING where DBNAME = 'DEMIPT' and TABLENAME = 'ANBO_DWH_DIM_ACCOUNTS_HIST')
"""
curs.execute(sql_req)
logging.info("SCD-2.2: BANK.ACCOUNTS -> ANBO_STG_BANK_ACCOUNTS OK.")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.2: BANK.ACCOUNTS -> ANBO_STG_BANK_ACCOUNTS. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# BANK.CARDS -> ANBO_STG_BANK_CARDS
# Сразу уберем лишние пробелы из ключа таблицы пластиковых карт, чтобы потом небыло проблем.
try:
sql_req = """
insert into DEMIPT.ANBO_STG_BANK_CARDS( CARD_NUM, ACCOUNT, CREATE_DT, UPDATE_DT )
select TRIM(CARD_NUM), ACCOUNT, CREATE_DT, UPDATE_DT
from BANK.CARDS
where COALESCE(UPDATE_DT,CREATE_DT) > (
select LAST_UPDATE from DEMIPT.ANBO_META_LOADING where DBNAME = 'DEMIPT' and TABLENAME = 'ANBO_DWH_DIM_CARDS_HIST')
"""
curs.execute(sql_req)
logging.info("SCD-2.2: BANK.CARDS -> ANBO_STG_BANK_CARDS.")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.2: BANK.CARDS -> ANBO_STG_BANK_CARDS. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# BANK.CLIENTS -> ANBO_STG_BANK_CLIENTS
try:
sql_req = """
insert into DEMIPT.ANBO_STG_BANK_CLIENTS ( CLIENT_ID, LAST_NAME, FIRST_NAME, PATRONYMIC, DATE_OF_BIRTH, PASSPORT_NUM, PASSPORT_VALID_TO, PHONE, CREATE_DT, UPDATE_DT )
select CLIENT_ID, LAST_NAME, FIRST_NAME, PATRONYMIC, DATE_OF_BIRTH, PASSPORT_NUM, PASSPORT_VALID_TO, PHONE, CREATE_DT, UPDATE_DT
from BANK.CLIENTS
where COALESCE(UPDATE_DT,CREATE_DT) > ( select LAST_UPDATE from DEMIPT.ANBO_META_LOADING where DBNAME = 'DEMIPT' and TABLENAME = 'ANBO_DWH_DIM_CLIENTS_HIST')
"""
curs.execute(sql_req)
logging.info("SCD-2.2: BANK.CLIENTS -> ANBO_STG_BANK_CLIENTS OK.")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.2: BANK.CLIENTS -> ANBO_STG_BANK_CLIENTS. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS
# Изменившиеся строки.
try:
sql_req = """
INSERT INTO ANBO_STG_TERMINALS ( TERMINAL_ID, TERMINAL_TYPE, TERMINAL_CITY, TERMINAL_ADDRESS, CREATE_DT, UPDATE_DT )
SELECT
t1.TERMINAL_ID,
t1.TERMINAL_TYPE,
t1.TERMINAL_CITY,
t1.TERMINAL_ADDRESS,
MIN(t2.EFFECTIVE_FROM) OVER (PARTITION BY t2.TERMINAL_ID ORDER BY t2.EFFECTIVE_FROM) as CREATE_DT,
TO_DATE(t1.UPLOAD_DT,'DD.MM.YYYY')
FROM ANBO_STG_TERMINALS_DRFT t1
LEFT JOIN ANBO_DWH_DIM_TERMINALS_HIST t2
ON t1.terminal_id = t2.TERMINAL_ID
AND t2.EFFECTIVE_TO = TO_DATE('31.12.2999','DD.MM.YYYY')
WHERE
t1.terminal_type != t2.terminal_type
OR
t1.terminal_city != t2.terminal_city
OR
t1.terminal_type != t2.terminal_type
OR
t1.TERMINAL_ADDRESS != t2.terminal_address
"""
curs.execute(sql_req)
logging.info(
"SCD-2.2: (CHANGED ROWS) ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS OK.")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.2: (CHANGED ROWS) ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS. \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS
# Новые строки.
try:
sql_req = """
INSERT INTO ANBO_STG_TERMINALS ( TERMINAL_ID, TERMINAL_TYPE, TERMINAL_CITY, TERMINAL_ADDRESS, CREATE_DT, UPDATE_DT )
SELECT
t1.TERMINAL_ID,
t1.TERMINAL_TYPE,
t1.TERMINAL_CITY,
t1.TERMINAL_ADDRESS,
TO_DATE(t1.UPLOAD_DT, 'DD.MM.YYYY'),
NULL
FROM ANBO_STG_TERMINALS_DRFT t1
LEFT JOIN ANBO_DWH_DIM_TERMINALS_HIST t2
ON t1.terminal_id = t2.TERMINAL_ID
WHERE
t2.terminal_id IS NULL
"""
curs.execute(sql_req)
logging.info(
"SCD-2.2: (NEW ROWS) ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS.")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.2: (NEW ROWS) ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS. \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# ------------------------------------ 3. Загрузка данных из стейджинга в хранилище ------------------------------------
# (FACT) ANBO_STG_TRANSACTIONS -> ANBO_DWH_FACT_TRANSACTIONS (INSERT)
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_DWH_FACT_TRANSACTIONS( TRANS_ID, TRANS_DATE, CARD_NUM, OPER_TYPE, AMT, OPER_RESULT, TERMINAL)
SELECT
TRANSACTION_ID,
TO_DATE(TRANSACTION_DATE,'YYYY-MM-DD HH24:MI:SS'),
CARD_NUM,
OPER_TYPE,
AMOUNT,
OPER_RESULT,
TERMINAL
FROM DEMIPT.ANBO_STG_TRANSACTIONS
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (FACT) ANBO_STG_TRANSACTIONS -> ANBO_DWH_FACT_TRANSACTIONS.")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (FACT) ANBO_STG_TRANSACTIONS -> ANBO_DWH_FACT_TRANSACTIONS. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# (FACT) ANBO_STG_PSSPRT_BLCKLST -> ANBO_DWH_FACT_PSSPRT_BLCKLST (INSERT)
try:
sql_req = """
INSERT INTO ANBO_DWH_FACT_PSSPRT_BLCKLST( PASSPORT_NUM, ENTRY_DT )
SELECT
PASSPORT_NUM,
TO_DATE(ENTRY_DT, 'YYYY-MM-DD')
FROM ANBO_STG_PSSPRT_BLCKLST
WHERE TO_DATE(ENTRY_DT, 'YYYY-MM-DD') > (
SELECT LAST_UPDATE FROM ANBO_META_LOADING WHERE DBNAME = 'DEMIPT' AND TABLENAME = 'ANBO_DWH_FACT_PSSPRT_BLCKLST'
)
"""
curs.execute(sql_req)
logging.info(
"SCD-2.3: (FACT) ANBO_STG_PSSPRT_BLCKLST -> ANBO_DWH_FACT_PSSPRT_BLCKLST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (FACT) ANBO_STG_PSSPRT_BLCKLST -> ANBO_DWH_FACT_PSSPRT_BLCKLST. \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) ANBO_STG_TERMINALS -> ANBO_DWH_DIM_TERMINALS_HIST (INSERT)
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_DWH_DIM_TERMINALS_HIST( TERMINAL_ID, TERMINAL_TYPE, TERMINAL_CITY, TERMINAL_ADDRESS, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
TERMINAL_ID,
TERMINAL_TYPE,
TERMINAL_CITY,
TERMINAL_ADDRESS,
COALESCE( UPDATE_DT, CREATE_DT ),
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'N'
FROM DEMIPT.ANBO_STG_TERMINALS
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM INSERT) ANBO_STG_TERMINALS -> ANBO_DWH_DIM_TERMINALS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM INSERT) ANBO_STG_TERMINALS -> ANBO_DWH_DIM_TERMINALS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) ANBO_STG_TERMINALS -> ANBO_DWH_DIM_TERMINALS_HIST (MERGE)
try:
sql_req = """
MERGE INTO DEMIPT.ANBO_DWH_DIM_TERMINALS_HIST tgt
USING DEMIPT.ANBO_STG_TERMINALS src
ON ( tgt.TERMINAL_ID = src.TERMINAL_ID and tgt.EFFECTIVE_FROM < COALESCE( src.UPDATE_DT, src.CREATE_DT ) )
WHEN matched THEN UPDATE SET tgt.EFFECTIVE_TO = COALESCE( src.UPDATE_DT, src.CREATE_DT ) - interval '1' second
WHERE tgt.EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM MERGE) ANBO_STG_TERMINALS -> ANBO_DWH_DIM_TERMINALS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM MERGE) ANBO_STG_TERMINALS -> ANBO_DWH_DIM_TERMINALS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) ANBO_STG_BANK_ACCOUNTS -> ANBO_DWH_DIM_ACCOUNTS_HIST (INSERT)
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_DWH_DIM_ACCOUNTS_HIST( ACCOUNT_NUM, VALID_TO, CLIENT, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT ACCOUNT, VALID_TO, CLIENT, COALESCE( UPDATE_DT, CREATE_DT ), TO_DATE( '2999-12-31', 'YYYY-MM-DD' ), 'N'
FROM DEMIPT.ANBO_STG_BANK_ACCOUNTS
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM INSERT) ANBO_STG_BANK_ACCOUNTS -> ANBO_DWH_DIM_ACCOUNTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM INSERT) ANBO_STG_BANK_ACCOUNTS -> ANBO_DWH_DIM_ACCOUNTS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) ANBO_STG_BANK_ACCOUNTS -> ANBO_DWH_DIM_ACCOUNTS_HIST (MERGE)
try:
sql_req = """
MERGE INTO DEMIPT.ANBO_DWH_DIM_ACCOUNTS_HIST tgt
USING DEMIPT.ANBO_STG_BANK_ACCOUNTS src
ON ( tgt.ACCOUNT_NUM = src.ACCOUNT and tgt.EFFECTIVE_FROM < COALESCE( src.UPDATE_DT, src.CREATE_DT ) )
WHEN matched THEN UPDATE SET tgt.EFFECTIVE_TO = COALESCE( src.UPDATE_DT, src.CREATE_DT ) - interval '1' second
WHERE tgt.EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM MERGE) ANBO_STG_BANK_ACCOUNTS -> ANBO_DWH_DIM_ACCOUNTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM MERGE) ANBO_STG_BANK_ACCOUNTS -> ANBO_DWH_DIM_ACCOUNTS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) ANBO_STG_BANK_CARDS -> ANBO_DWH_DIM_CARDS_HIST (INSERT)
# TRIM для того чтобы убрать лишние пробелы с поля ключа.
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_DWH_DIM_CARDS_HIST( CARD_NUM, ACCOUNT_NUM, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
TRIM(CARD_NUM),
ACCOUNT,
COALESCE( UPDATE_DT, CREATE_DT ),
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'N'
FROM DEMIPT.ANBO_STG_BANK_CARDS
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM INSERT TRIM) ANBO_STG_BANK_CARDS -> ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM INSERT) ANBO_STG_BANK_CARDS -> ANBO_DWH_DIM_CARDS_HIST \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# (DIM) ANBO_STG_BANK_CARDS -> ANBO_DWH_DIM_CARDS_HIST (MERGE)
# TRIM для того чтобы убрать лишние пробелы с поля ключа.
try:
sql_req = """
MERGE INTO DEMIPT.ANBO_DWH_DIM_CARDS_HIST tgt
USING DEMIPT.ANBO_STG_BANK_CARDS src
ON ( tgt.CARD_NUM = TRIM(src.CARD_NUM) and tgt.EFFECTIVE_FROM < COALESCE( src.UPDATE_DT, src.CREATE_DT ) )
WHEN matched THEN UPDATE SET tgt.EFFECTIVE_TO = COALESCE( src.UPDATE_DT, src.CREATE_DT ) - interval '1' second
WHERE tgt.EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM MERGE TRIM) ANBO_STG_BANK_CARDS -> ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM MERGE TRIM) ANBO_STG_BANK_CARDS -> ANBO_DWH_DIM_CARDS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) DEMIPT.ANBO_STG_BANK_CLIENTS -> ANBO_DWH_DIM_CLIENTS_HIST (INSERT)
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_DWH_DIM_CLIENTS_HIST( CLIENT_ID, LAST_NAME, FIRST_NAME, PATRONYMIC, DATE_OF_BIRTH, PASSPORT_NUM, PASSPORT_VALID_TO, PHONE, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
CLIENT_ID,
LAST_NAME, FIRST_NAME, PATRONYMIC, DATE_OF_BIRTH, PASSPORT_NUM, PASSPORT_VALID_TO, PHONE,
COALESCE( UPDATE_DT, CREATE_DT ),
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'N'
FROM DEMIPT.ANBO_STG_BANK_CLIENTS
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM INSERT) DEMIPT.ANBO_STG_BANK_CLIENTS -> ANBO_DWH_DIM_CLIENTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM INSERT) DEMIPT.ANBO_STG_BANK_CLIENTS -> ANBO_DWH_DIM_CLIENTS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# (DIM) DEMIPT.ANBO_STG_BANK_CLIENTS -> ANBO_DWH_DIM_CLIENTS_HIST (MERGE)
try:
sql_req = """
MERGE INTO DEMIPT.ANBO_DWH_DIM_CLIENTS_HIST tgt
USING DEMIPT.ANBO_STG_BANK_CLIENTS src
ON ( tgt.CLIENT_ID = src.CLIENT_ID and tgt.EFFECTIVE_FROM < COALESCE( src.UPDATE_DT, src.CREATE_DT ) )
WHEN matched THEN UPDATE SET tgt.EFFECTIVE_TO = COALESCE( src.UPDATE_DT, src.CREATE_DT ) - interval '1' second
WHERE tgt.EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.3: (DIM MERGE) ANBO_STG_BANK_CLIENTS -> ANBO_DWH_DIM_CLIENTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.3: (DIM MERGE) ANBO_STG_BANK_CLIENTS -> ANBO_DWH_DIM_CLIENTS_HIST \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# ------------------------------ 4. Захватываем ключи для проверки удалений (опционально) ------------------------------
# ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS_DEL
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_STG_TERMINALS_DEL ( TERMINAL_ID )
select TERMINAL_ID FROM DEMIPT.ANBO_STG_TERMINALS_DRFT
"""
curs.execute(sql_req)
logging.info("SCD-2.4: (DELETING, KEYS) ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS_DEL. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.4: (DELETING, KEYS) ANBO_STG_TERMINALS_DRFT -> ANBO_STG_TERMINALS_DEL. \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# BANK.ACCOUNTS -> ANBO_STG_BANK_ACCOUNTS_DEL
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_STG_BANK_ACCOUNTS_DEL ( ACCOUNT )
select ACCOUNT FROM BANK.ACCOUNTS
"""
curs.execute(sql_req)
logging.info("SCD-2.4: (DELETING, KEYS) BANK.ACCOUNTS -> ANBO_STG_BANK_ACCOUNTS_DEL. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.4: (DELETING, KEYS) BANK.ACCOUNTS -> ANBO_STG_BANK_ACCOUNTS_DEL. \n Detailed information: {}".format(
exc))
exit_hadler(curs, conn)
# BANK.CARDS -> ANBO_STG_BANK_CARDS_DEL
# TRIM для того чтобы убрать лишние пробелы с поля ключа.
# Дальше в проеверки удаленных строк будет LEFT JOIN с таблицей из DWH в которой TRIM уже сделан.
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_STG_BANK_CARDS_DEL ( CARD_NUM )
select TRIM(CARD_NUM) FROM BANK.CARDS
"""
curs.execute(sql_req)
logging.info("SCD-2.4: (DELETING, KEYS) BANK.CARDS -> ANBO_STG_BANK_CARDS_DEL. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.4: (DELETING, KEYS) BANK.CARDS -> ANBO_STG_BANK_CARDS_DEL. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# BANK.CLIENTS -> ANBO_STG_BANK_CLIENTS_DEL
try:
sql_req = """
INSERT INTO DEMIPT.ANBO_STG_BANK_CLIENTS_DEL ( CLIENT_ID )
select CLIENT_ID FROM BANK.CLIENTS
"""
curs.execute(sql_req)
logging.info("SCD-2.4: (DELETING, KEYS) BANK.CARDS -> ANBO_STG_BANK_CARDS_DEL. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.4: (DELETING, KEYS) BANK.CARDS -> ANBO_STG_BANK_CARDS_DEL. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ----------------------------------- 5. Удаляем удаленные записи в целевой таблице ------------------------------------
# ANBO_DWH_DIM_TERMINALS_HIST INSERT
try:
sql_req = """
INSERT INTO ANBO_DWH_DIM_TERMINALS_HIST( TERMINAL_ID, TERMINAL_TYPE, TERMINAL_CITY, TERMINAL_ADDRESS, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
t.TERMINAL_ID, t.TERMINAL_TYPE, t.TERMINAL_CITY, t.TERMINAL_ADDRESS,
sysdate,
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'Y'
FROM ANBO_DWH_DIM_TERMINALS_HIST t
LEFT JOIN ANBO_STG_TERMINALS_DEL s
ON t.TERMINAL_ID = s.TERMINAL_ID
AND deleted_flg = 'N'
WHERE s.TERMINAL_ID IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_TERMINALS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_TERMINALS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_TERMINALS_HIST UPDATE
try:
sql_req = """
UPDATE ANBO_DWH_DIM_TERMINALS_HIST
SET EFFECTIVE_TO = sysdate - interval '1' second
WHERE TERMINAL_ID in (
SELECT
t.TERMINAL_ID
FROM ANBO_DWH_DIM_TERMINALS_HIST t
LEFT JOIN ANBO_STG_TERMINALS_DEL s
ON t.TERMINAL_ID = s.TERMINAL_ID
WHERE s.TERMINAL_ID IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND deleted_flg = 'N')
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND EFFECTIVE_FROM < sysdate
AND deleted_flg = 'N'
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_TERMINALS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_TERMINALS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_ACCOUNTS_HIST INSERT
try:
sql_req = """
INSERT INTO ANBO_DWH_DIM_ACCOUNTS_HIST( ACCOUNT_NUM, VALID_TO, CLIENT, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
t.ACCOUNT_NUM, t.VALID_TO, t.CLIENT,
sysdate,
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'Y'
FROM ANBO_DWH_DIM_ACCOUNTS_HIST t
LEFT JOIN ANBO_STG_BANK_ACCOUNTS_DEL s
ON t.ACCOUNT_NUM = s.ACCOUNT
AND deleted_flg = 'N'
WHERE s.ACCOUNT IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_ACCOUNTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_ACCOUNTS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_ACCOUNTS_HIST UPDATE
try:
sql_req = """
UPDATE ANBO_DWH_DIM_ACCOUNTS_HIST
SET EFFECTIVE_TO = sysdate - interval '1' second
WHERE ACCOUNT_NUM in (
SELECT t.ACCOUNT_NUM
FROM ANBO_DWH_DIM_ACCOUNTS_HIST t
LEFT JOIN ANBO_STG_BANK_ACCOUNTS_DEL s
ON t.ACCOUNT_NUM = s.ACCOUNT
AND deleted_flg = 'N'
WHERE s.ACCOUNT IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND deleted_flg = 'N')
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND EFFECTIVE_FROM < sysdate
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_ACCOUNTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_ACCOUNTS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_CARDS_HIST INSERT
try:
sql_req = """
INSERT INTO ANBO_DWH_DIM_CARDS_HIST( CARD_NUM, ACCOUNT_NUM, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
t.CARD_NUM, t.ACCOUNT_NUM,
sysdate,
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'Y'
FROM ANBO_DWH_DIM_CARDS_HIST t
LEFT JOIN ANBO_STG_BANK_CARDS_DEL s
ON t.CARD_NUM = s.CARD_NUM
AND deleted_flg = 'N'
WHERE s.CARD_NUM IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_CARDS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_CARDS_HIST UPDATE
try:
sql_req = """
UPDATE ANBO_DWH_DIM_CARDS_HIST
SET EFFECTIVE_TO = sysdate - interval '1' second
WHERE CARD_NUM in (
SELECT t.CARD_NUM
FROM ANBO_DWH_DIM_CARDS_HIST t
LEFT JOIN ANBO_STG_BANK_CARDS_DEL s
ON t.CARD_NUM = s.CARD_NUM
WHERE s.CARD_NUM IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND deleted_flg = 'N')
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND EFFECTIVE_FROM < sysdate
AND deleted_flg = 'N'
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_CARDS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_CLIENTS_HIST INSERT
try:
sql_req = """
INSERT INTO ANBO_DWH_DIM_CLIENTS_HIST( CLIENT_ID, LAST_NAME, FIRST_NAME, PATRONYMIC, DATE_OF_BIRTH, PASSPORT_NUM, PASSPORT_VALID_TO, PHONE, EFFECTIVE_FROM, EFFECTIVE_TO, DELETED_FLG )
SELECT
t.CLIENT_ID, t.LAST_NAME, t.FIRST_NAME, t.PATRONYMIC, t.DATE_OF_BIRTH, t.PASSPORT_NUM, t.PASSPORT_VALID_TO, t.PHONE,
sysdate,
TO_DATE( '2999-12-31', 'YYYY-MM-DD' ),
'Y'
FROM ANBO_DWH_DIM_CLIENTS_HIST t
LEFT JOIN ANBO_STG_BANK_CLIENTS_DEL s
ON t.CLIENT_ID = s.CLIENT_ID
AND deleted_flg = 'N'
WHERE s.CLIENT_ID IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error(
"SCD-2.5: (DELETING, INSERT, FLAG = 'Y') ANBO_DWH_DIM_CLIENTS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_CLIENTS_HIST UPDATE
try:
sql_req = """
UPDATE ANBO_DWH_DIM_CLIENTS_HIST
SET EFFECTIVE_TO = sysdate - interval '1' second
WHERE CLIENT_ID in (
SELECT t.CLIENT_ID
FROM ANBO_DWH_DIM_CLIENTS_HIST t
LEFT JOIN ANBO_STG_BANK_CLIENTS_DEL s
ON t.CLIENT_ID = s.CLIENT_ID
WHERE s.CLIENT_ID IS NULL
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND deleted_flg = 'N')
AND EFFECTIVE_TO = TO_DATE( '2999-12-31', 'YYYY-MM-DD' )
AND EFFECTIVE_FROM < sysdate
AND deleted_flg = 'N'
"""
curs.execute(sql_req)
logging.info("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.5: (DELETING, UPDATE) ANBO_DWH_DIM_CLIENTS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ------------------------------- 6. Обновляем метаданные - дату максимальной загрузуки --------------------------------
# ANBO_DWH_DIM_TERMINALS_HIST
try:
sql_req = """
UPDATE
ANBO_META_LOADING
SET
last_update = ( SELECT
MAX( TO_DATE(UPLOAD_DT, 'DD.MM.YYYY') )
FROM
DEMIPT.ANBO_STG_TERMINALS_DRFT )
WHERE
dbname = 'DEMIPT'
AND tablename = 'ANBO_DWH_DIM_TERMINALS_HIST'
AND ( SELECT MAX( TO_DATE(UPLOAD_DT, 'DD.MM.YYYY') )
FROM DEMIPT.ANBO_STG_TERMINALS_DRFT ) IS NOT NULL
"""
curs.execute(sql_req)
logging.info("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_TERMINALS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_TERMINALS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_FACT_PSSPRT_BLCKLST
try:
sql_req = """
UPDATE
ANBO_META_LOADING
SET
last_update = ( SELECT
MAX( TO_DATE( ENTRY_DT, 'YYYY-MM-DD' ))
FROM
DEMIPT.ANBO_STG_PSSPRT_BLCKLST )
WHERE
dbname = 'DEMIPT'
AND tablename = 'ANBO_DWH_FACT_PSSPRT_BLCKLST'
AND (SELECT MAX( TO_DATE(ENTRY_DT, 'YYYY-MM-DD' ) )
FROM DEMIPT.ANBO_STG_PSSPRT_BLCKLST ) IS NOT NULL
"""
curs.execute(sql_req)
logging.info("SCD-2.6: (METADATA UPDATE) ANBO_DWH_FACT_PSSPRT_BLCKLST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.6: (METADATA UPDATE) ANBO_DWH_FACT_PSSPRT_BLCKLST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_ACCOUNTS_HIST
try:
sql_req = """
UPDATE
DEMIPT.ANBO_META_LOADING
SET
last_update = ( SELECT
MAX( COALESCE( UPDATE_DT, CREATE_DT ) )
FROM
DEMIPT.ANBO_STG_BANK_ACCOUNTS )
WHERE
dbname = 'DEMIPT'
AND tablename = 'ANBO_DWH_DIM_ACCOUNTS_HIST'
AND ( SELECT MAX( COALESCE( UPDATE_DT, CREATE_DT ) )
FROM DEMIPT.ANBO_STG_BANK_ACCOUNTS ) IS NOT NULL
"""
curs.execute(sql_req)
logging.info("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_ACCOUNTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_ACCOUNTS_HIST. \n Detailed information: {}".format(exc))
# ANBO_DWH_DIM_CLIENTS_HIST
try:
sql_req = """
UPDATE
DEMIPT.ANBO_META_LOADING
SET
last_update = ( SELECT
MAX( COALESCE( UPDATE_DT, CREATE_DT ) )
FROM
DEMIPT.ANBO_STG_BANK_CLIENTS )
WHERE
dbname = 'DEMIPT'
AND tablename = 'ANBO_DWH_DIM_CLIENTS_HIST'
AND ( SELECT MAX( COALESCE( UPDATE_DT, CREATE_DT ) )
FROM DEMIPT.ANBO_STG_BANK_CLIENTS ) IS NOT NULL
"""
curs.execute(sql_req)
logging.info("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_CLIENTS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_CLIENTS_HIST. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_FACT_TRANSACTIONS
try:
sql_req = """
UPDATE
DEMIPT.ANBO_META_LOADING
SET
last_update = ( SELECT
MAX( TO_DATE(TRANSACTION_DATE,'YYYY-MM-DD HH24:MI:SS') )
FROM
DEMIPT.ANBO_STG_TRANSACTIONS )
WHERE
dbname = 'DEMIPT'
AND tablename = 'ANBO_DWH_FACT_TRANSACTIONS'
AND ( SELECT MAX( TO_DATE(TRANSACTION_DATE,'YYYY-MM-DD HH24:MI:SS') )
FROM DEMIPT.ANBO_STG_TRANSACTIONS ) IS NOT NULL
"""
curs.execute(sql_req)
logging.info("SCD-2.6: (METADATA UPDATE) ANBO_DWH_FACT_TRANSACTIONS. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.6: (METADATA UPDATE) ANBO_DWH_FACT_TRANSACTIONS. \n Detailed information: {}".format(exc))
exit_hadler(curs, conn)
# ANBO_DWH_DIM_CARDS_HIST
try:
sql_req = """
UPDATE
DEMIPT.ANBO_META_LOADING
SET
last_update = ( SELECT
MAX( COALESCE( UPDATE_DT, CREATE_DT ) )
FROM
DEMIPT.ANBO_STG_BANK_CARDS )
WHERE
dbname = 'DEMIPT'
AND tablename = 'ANBO_DWH_DIM_CARDS_HIST'
AND ( SELECT MAX( COALESCE( UPDATE_DT, CREATE_DT ) )
FROM DEMIPT.ANBO_STG_BANK_CARDS ) IS NOT NULL
"""
curs.execute(sql_req)
logging.info("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_CARDS_HIST. OK")
except Exception as exc:
# Сообщаем об исключении и выходим.
if DEBUG:
print(sql_req)
logging.error("SCD-2.6: (METADATA UPDATE) ANBO_DWH_DIM_CARDS_HIST. \n Detailed information: {}".format(exc))
# ---------------------------------------------- 7. Фиксируем транзакцию -----------------------------------------------
# Если не включен отладочный режим, то фиксируем изменения в базе
if not DEBUG:
try:
conn.commit()
logging.info("SCD-2.7: All data loaded successfully. Transaction completed successfully. \n\n")
except Exception as exc:
logging.error(
"SCD-2.7:An error occurred while committing the transaction. "
"\n Detailed information: {EXCEPTION}".format(EXCEPTION=exc))
exit_hadler(curs, conn)
if DEBUG:
# Пока что фиксация транзации автоматическая, после выполнения каждого запроса.
logging.info("SCD-2.7: All data loaded successfully. Transaction completed successfully. \n\n")
# Закрываем курсор и соединение.
curs.close()
conn.close()
stg_user_name = "DEMIPT"
| 34.705012 | 194 | 0.65385 | 5,672 | 42,236 | 4.611777 | 0.103315 | 0.032648 | 0.035553 | 0.030583 | 0.734383 | 0.714886 | 0.69279 | 0.678913 | 0.660601 | 0.630706 | 0 | 0.013337 | 0.236646 | 42,236 | 1,216 | 195 | 34.733553 | 0.79799 | 0.17246 | 0 | 0.651332 | 0 | 0.038741 | 0.600575 | 0.145029 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006053 | false | 0.021792 | 0.007264 | 0 | 0.020581 | 0.044794 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
3a0a66a467711d816328c502d22ac9abc41bc4d5 | 71 | py | Python | db.py | ATolkachev/app | 63a52f0083f44101ecd5dd36258b65622df88e1d | [
"MIT"
] | null | null | null | db.py | ATolkachev/app | 63a52f0083f44101ecd5dd36258b65622df88e1d | [
"MIT"
] | null | null | null | db.py | ATolkachev/app | 63a52f0083f44101ecd5dd36258b65622df88e1d | [
"MIT"
] | null | null | null | dbname='postgres'
user='postgres'
password='q1w2e3r4'
host='127.0.0.1'
| 14.2 | 19 | 0.732394 | 11 | 71 | 4.727273 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.149254 | 0.056338 | 71 | 4 | 20 | 17.75 | 0.626866 | 0 | 0 | 0 | 0 | 0 | 0.464789 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.25 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
3a150397370ad65157366cc327d74ab638964742 | 201 | py | Python | backend/backend/urls.py | codepanda64/logs-and-metas-for-stations | c2fca7b149eaea25b4df45e6cb4af01cc8ad7204 | [
"Apache-2.0"
] | null | null | null | backend/backend/urls.py | codepanda64/logs-and-metas-for-stations | c2fca7b149eaea25b4df45e6cb4af01cc8ad7204 | [
"Apache-2.0"
] | null | null | null | backend/backend/urls.py | codepanda64/logs-and-metas-for-stations | c2fca7b149eaea25b4df45e6cb4af01cc8ad7204 | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from django.urls import path, include, re_path
urlpatterns = [
path("admin/", admin.site.urls),
path("api/v1/", include("backend.v1.urls", namespace="v1")),
]
| 25.125 | 64 | 0.691542 | 28 | 201 | 4.928571 | 0.535714 | 0.144928 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017341 | 0.139303 | 201 | 7 | 65 | 28.714286 | 0.780347 | 0 | 0 | 0 | 0 | 0 | 0.149254 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
3a150870e6339652dc4750aa553a169ca96dd692 | 122 | py | Python | canvas_todo/todo/update.py | ryansingman/canvas-todo | ec37cb06c03fe942a8c57b98bc7b9f0086c6e661 | [
"MIT"
] | null | null | null | canvas_todo/todo/update.py | ryansingman/canvas-todo | ec37cb06c03fe942a8c57b98bc7b9f0086c6e661 | [
"MIT"
] | null | null | null | canvas_todo/todo/update.py | ryansingman/canvas-todo | ec37cb06c03fe942a8c57b98bc7b9f0086c6e661 | [
"MIT"
] | null | null | null | from enum import Enum
class Update(Enum):
"""Enumerates task update types
"""
ADD = 0
MARK_COMPLETE = 1
| 13.555556 | 35 | 0.622951 | 16 | 122 | 4.6875 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022989 | 0.286885 | 122 | 8 | 36 | 15.25 | 0.83908 | 0.229508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
3a188a71970858dff957b7b2d5f99b0b3859f78c | 166 | py | Python | tests/conftest.py | gzxultra/pyradix | f28828194f2531f4ce010839ccdd85fcdf51b133 | [
"MIT"
] | 1 | 2020-11-16T22:35:43.000Z | 2020-11-16T22:35:43.000Z | tests/conftest.py | gzxultra/pyradix | f28828194f2531f4ce010839ccdd85fcdf51b133 | [
"MIT"
] | null | null | null | tests/conftest.py | gzxultra/pyradix | f28828194f2531f4ce010839ccdd85fcdf51b133 | [
"MIT"
] | 1 | 2020-11-16T22:35:44.000Z | 2020-11-16T22:35:44.000Z | import pytest
from random import randint
@pytest.fixture()
def array1():
length = randint(10000, 20000)
yield [randint(0, 99999) for i in range(0, length)]
| 18.444444 | 55 | 0.698795 | 24 | 166 | 4.833333 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 0.186747 | 166 | 8 | 56 | 20.75 | 0.725926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
28d05daa203e5171de019222b23186197f65d0fe | 221 | py | Python | geminidr/gsaoi/recipes/ql/recipes_IMAGE.py | DBerke/DRAGONS | cecf9a03970af95126bd17a227bd5214a5d6c64b | [
"BSD-3-Clause"
] | 19 | 2017-10-23T14:52:51.000Z | 2022-03-28T04:49:00.000Z | geminidr/gsaoi/recipes/ql/recipes_IMAGE.py | DBerke/DRAGONS | cecf9a03970af95126bd17a227bd5214a5d6c64b | [
"BSD-3-Clause"
] | 194 | 2017-11-01T17:32:45.000Z | 2022-03-31T21:32:59.000Z | geminidr/gsaoi/recipes/ql/recipes_IMAGE.py | DBerke/DRAGONS | cecf9a03970af95126bd17a227bd5214a5d6c64b | [
"BSD-3-Clause"
] | 16 | 2017-11-01T05:18:04.000Z | 2021-12-14T23:08:57.000Z | """
Recipes available to data with tags ['GSAOI', IMAGE'].
Default is "reduce_nostack".
"""
recipe_tags = {'GSAOI', 'IMAGE'}
from geminidr.gsaoi.recipes.sq.recipes_IMAGE import reduce_nostack
_default = reduce_nostack
| 20.090909 | 66 | 0.746606 | 29 | 221 | 5.482759 | 0.586207 | 0.245283 | 0.176101 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.122172 | 221 | 10 | 67 | 22.1 | 0.819588 | 0.375566 | 0 | 0 | 0 | 0 | 0.076923 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
28de0fafbd04a5e7936d12ab0efbd4c7473b328c | 124 | py | Python | datasets/dewiki_10000.py | ionicsolutions/kokolores | 179db18d384ce31645bcce3506924ac235723309 | [
"MIT"
] | 1 | 2021-06-15T14:38:45.000Z | 2021-06-15T14:38:45.000Z | datasets/dewiki_10000.py | ionicsolutions/kokolores | 179db18d384ce31645bcce3506924ac235723309 | [
"MIT"
] | null | null | null | datasets/dewiki_10000.py | ionicsolutions/kokolores | 179db18d384ce31645bcce3506924ac235723309 | [
"MIT"
] | null | null | null | from creator import Creator
c = Creator("dewiki")
dataset = c.create(10000, batch_size=100, fname="dewiki_10000.json.bz2")
| 24.8 | 72 | 0.758065 | 19 | 124 | 4.842105 | 0.736842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126126 | 0.104839 | 124 | 4 | 73 | 31 | 0.702703 | 0 | 0 | 0 | 0 | 0 | 0.217742 | 0.169355 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
e92225014fd4366e06f266caa955042ae1ff660c | 192 | py | Python | Data Structures/Stacks and Queues/queue/solutions/queues_arrays_solution_01.py | michal0janczyk/udacity_data_structures_and_algorithms_nanodegree | 3ec4bb94158d4dee59056703e63cb0fab07cb18c | [
"Unlicense"
] | 1 | 2021-09-27T10:18:14.000Z | 2021-09-27T10:18:14.000Z | Data Structures/Stacks and Queues/queue/solutions/queues_arrays_solution_01.py | michal0janczyk/udacity_data_structures_and_algorithms_nanodegree | 3ec4bb94158d4dee59056703e63cb0fab07cb18c | [
"Unlicense"
] | 1 | 2021-05-10T18:11:07.000Z | 2021-05-10T18:11:07.000Z | queue/solutions/queues_arrays_solution_01.py | henryto/ds | 514bd20c933cf05f8f6550add1fc3df28f3eac0b | [
"BSD-3-Clause"
] | null | null | null | class Queue:
def __init__(self, initial_size=10):
self.arr = [0 for _ in range(initial_size)]
self.next_index = 0
self.front_index = -1
self.queue_size = 0 | 27.428571 | 51 | 0.604167 | 28 | 192 | 3.785714 | 0.607143 | 0.207547 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044444 | 0.296875 | 192 | 7 | 52 | 27.428571 | 0.740741 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
e92a162d33fc0457794d97a159c5b401e115154f | 669 | py | Python | backend/nft_market/contracts/assets/helpers/state.py | DedicatedDev/OpenNFT | 218f6583316dcf8438789c1f6a51b110d53d22b7 | [
"MIT"
] | 87 | 2021-05-18T14:11:38.000Z | 2022-03-29T21:51:06.000Z | backend/nft_market/contracts/assets/helpers/state.py | DedicatedDev/OpenNFT | 218f6583316dcf8438789c1f6a51b110d53d22b7 | [
"MIT"
] | 17 | 2021-06-08T22:03:20.000Z | 2022-02-04T19:07:10.000Z | backend/nft_market/contracts/assets/helpers/state.py | DedicatedDev/OpenNFT | 218f6583316dcf8438789c1f6a51b110d53d22b7 | [
"MIT"
] | 69 | 2021-06-08T07:07:41.000Z | 2022-03-30T21:46:04.000Z | from pyteal import *
class State:
"""
Wrapper around state vars.
"""
def __init__(self, name: str):
self._name = name
def put(self, value) -> App:
raise NotImplementedError
def get(self) -> App:
raise NotImplementedError
class LocalState(State):
def put(self, value) -> App:
return App.localPut(Int(0), Bytes(self._name), value)
def get(self) -> App:
return App.localGet(Int(0), Bytes(self._name))
class GlobalState(State):
def put(self, value) -> App:
return App.globalPut(Bytes(self._name), value)
def get(self) -> App:
return App.globalGet(Bytes(self._name))
| 20.272727 | 61 | 0.61136 | 84 | 669 | 4.761905 | 0.345238 | 0.12 | 0.12 | 0.1125 | 0.4575 | 0.36 | 0.36 | 0.36 | 0.2 | 0.2 | 0 | 0.004032 | 0.258595 | 669 | 32 | 62 | 20.90625 | 0.802419 | 0.038864 | 0 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.388889 | false | 0 | 0.055556 | 0.222222 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
3aa41a76b466bc13ce2faeb327e0fcc6a9193fb3 | 46 | py | Python | utils/state_constants.py | xyqyear/MinecraftChunkCache | f0f16e798e69738eb754e7dd0ec4b392e88bc87f | [
"MIT-feh"
] | 1 | 2020-03-07T14:51:39.000Z | 2020-03-07T14:51:39.000Z | utils/state_constants.py | xyqyear/MinecraftChunkCache | f0f16e798e69738eb754e7dd0ec4b392e88bc87f | [
"MIT-feh"
] | null | null | null | utils/state_constants.py | xyqyear/MinecraftChunkCache | f0f16e798e69738eb754e7dd0ec4b392e88bc87f | [
"MIT-feh"
] | null | null | null | HANDSHAKING = 0
STATUS = 1
LOGIN = 2
PLAY = 3
| 9.2 | 15 | 0.652174 | 8 | 46 | 3.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 0.26087 | 46 | 4 | 16 | 11.5 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
3af782e7d16065c9fc8699b4c213e60df98b0ed9 | 145 | py | Python | test/test.py | kamzadias/Python1_WebScrapping | 4bd65836e1ae5f49f80fbe8d9a71a451b27c7ec1 | [
"MIT"
] | null | null | null | test/test.py | kamzadias/Python1_WebScrapping | 4bd65836e1ae5f49f80fbe8d9a71a451b27c7ec1 | [
"MIT"
] | null | null | null | test/test.py | kamzadias/Python1_WebScrapping | 4bd65836e1ae5f49f80fbe8d9a71a451b27c7ec1 | [
"MIT"
] | null | null | null | from Assigment2 import Scrapper
scrapper = Scrapper()
list = scrapper.get(input("Enter coin: "))
for item in list:
print(item.text.strip())
| 18.125 | 42 | 0.717241 | 20 | 145 | 5.2 | 0.75 | 0.307692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00813 | 0.151724 | 145 | 7 | 43 | 20.714286 | 0.837398 | 0 | 0 | 0 | 0 | 0 | 0.082759 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
aaeb56e8816eccac195796c8d3e3c8823fe07458 | 1,148 | py | Python | bqskit/ir/gates/parameterized/__init__.py | jkalloor3/bqskit | ad34a6eae3c0e62d2bd960cd4cd841ba8e845811 | [
"BSD-3-Clause-LBNL"
] | null | null | null | bqskit/ir/gates/parameterized/__init__.py | jkalloor3/bqskit | ad34a6eae3c0e62d2bd960cd4cd841ba8e845811 | [
"BSD-3-Clause-LBNL"
] | null | null | null | bqskit/ir/gates/parameterized/__init__.py | jkalloor3/bqskit | ad34a6eae3c0e62d2bd960cd4cd841ba8e845811 | [
"BSD-3-Clause-LBNL"
] | null | null | null | """This package contains parameterized gates."""
from __future__ import annotations
from bqskit.ir.gates.parameterized.crx import CRXGate
from bqskit.ir.gates.parameterized.cry import CRYGate
from bqskit.ir.gates.parameterized.crz import CRZGate
from bqskit.ir.gates.parameterized.pauli import PauliGate
from bqskit.ir.gates.parameterized.rx import RXGate
from bqskit.ir.gates.parameterized.rxx import RXXGate
from bqskit.ir.gates.parameterized.ry import RYGate
from bqskit.ir.gates.parameterized.ryy import RYYGate
from bqskit.ir.gates.parameterized.rz import RZGate
from bqskit.ir.gates.parameterized.rzz import RZZGate
from bqskit.ir.gates.parameterized.u1 import U1Gate
from bqskit.ir.gates.parameterized.u2 import U2Gate
from bqskit.ir.gates.parameterized.u3 import U3Gate
from bqskit.ir.gates.parameterized.u8 import U8Gate
from bqskit.ir.gates.parameterized.unitary import VariableUnitaryGate
__all__ = [
'CRXGate',
'CRYGate',
'CRZGate',
'PauliGate',
'RXGate',
'RXXGate',
'RYGate',
'RYYGate',
'RZGate',
'RZZGate',
'U1Gate',
'U2Gate',
'U3Gate',
'U8Gate',
'VariableUnitaryGate',
]
| 31.027027 | 69 | 0.769164 | 145 | 1,148 | 6.034483 | 0.289655 | 0.171429 | 0.205714 | 0.291429 | 0.514286 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012024 | 0.130662 | 1,148 | 36 | 70 | 31.888889 | 0.864729 | 0.036585 | 0 | 0 | 0 | 0 | 0.101818 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.484848 | 0 | 0.484848 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
c900dbdd0d9e81dafdd80784f2f5a8551ee6ece3 | 1,490 | py | Python | xenon_worker/commands/context.py | NicCardozo/xenon-worker | 90915e9738234db28a7a2dea63dd1f5fa7a5ecea | [
"MIT"
] | null | null | null | xenon_worker/commands/context.py | NicCardozo/xenon-worker | 90915e9738234db28a7a2dea63dd1f5fa7a5ecea | [
"MIT"
] | null | null | null | xenon_worker/commands/context.py | NicCardozo/xenon-worker | 90915e9738234db28a7a2dea63dd1f5fa7a5ecea | [
"MIT"
] | null | null | null | from ..connection.entities import Snowflake
class Context:
def __init__(self, client, shard_id, msg):
self.client = client
self.shard_id = shard_id
self.msg = msg
self.last_cmd = None # Filled by cmd.execute
@property
def bot(self):
return self.client
@property
def f(self):
return self.client.f
async def get_channel(self):
return await self.client.get_channel(self.msg.channel_id)
async def get_guild(self):
return await self.client.get_guild(self.msg.guild_id)
async def get_full_guild(self, cache=True):
return await self.client.get_full_guild(self.msg.guild_id)
async def get_bot_member(self):
return await self.client.get_bot_member(self.msg.guild_id)
async def get_guild_channels(self):
return await self.client.get_guild_channels(self.msg.guild_id)
async def get_guild_roles(self):
return await self.client.get_guild_roles(self.msg.guild_id)
def f_send(self, *args, **kwargs):
return self.bot.f_send(Snowflake(self.msg.channel_id), *args, **kwargs)
def send_message(self, *args, **kwargs):
return self.client.send_message(Snowflake(self.msg.channel_id), *args, **kwargs)
def send(self, *args, **kwargs):
return self.send_message(*args, **kwargs)
def invoke(self, cmd):
return self.client.invoke(self, cmd)
def __getattr__(self, item):
return getattr(self.msg, item)
| 29.215686 | 88 | 0.673826 | 214 | 1,490 | 4.481308 | 0.196262 | 0.12513 | 0.068822 | 0.131387 | 0.492179 | 0.423358 | 0.31074 | 0.212722 | 0.087591 | 0 | 0 | 0 | 0.218121 | 1,490 | 50 | 89 | 29.8 | 0.823176 | 0.014094 | 0 | 0.057143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.228571 | false | 0 | 0.028571 | 0.2 | 0.657143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
c9013be322d467e54d4aa0ef6cf564df389e42ce | 68 | py | Python | Beginner/2753.py | LorranSutter/URI-Online-Judge | aef885b9a7caa83484cf172e29eea8ec92fc3627 | [
"MIT"
] | null | null | null | Beginner/2753.py | LorranSutter/URI-Online-Judge | aef885b9a7caa83484cf172e29eea8ec92fc3627 | [
"MIT"
] | null | null | null | Beginner/2753.py | LorranSutter/URI-Online-Judge | aef885b9a7caa83484cf172e29eea8ec92fc3627 | [
"MIT"
] | null | null | null | for k in range(26):
print('{} e {}'.format(97 + k, chr(97 + k))) | 34 | 48 | 0.5 | 13 | 68 | 2.615385 | 0.769231 | 0.176471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113208 | 0.220588 | 68 | 2 | 48 | 34 | 0.528302 | 0 | 0 | 0 | 0 | 0 | 0.101449 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
c906776e9f6c6393fbf3195a157bf56ddeb29913 | 409 | py | Python | invenio_communities/views/__init__.py | effervescent-shot/invenio-communities | 449bda37cdfbc1a85b2831b9630a38f134b05cbd | [
"MIT"
] | null | null | null | invenio_communities/views/__init__.py | effervescent-shot/invenio-communities | 449bda37cdfbc1a85b2831b9630a38f134b05cbd | [
"MIT"
] | null | null | null | invenio_communities/views/__init__.py | effervescent-shot/invenio-communities | 449bda37cdfbc1a85b2831b9630a38f134b05cbd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2021 CERN.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Community views."""
from .api import create_communities_api_blueprint
from .ui import create_ui_blueprint
__all__ = (
'create_communities_api_blueprint',
'create_ui_blueprint'
)
| 22.722222 | 76 | 0.738386 | 57 | 409 | 5.052632 | 0.719298 | 0.083333 | 0.138889 | 0.201389 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014577 | 0.161369 | 409 | 17 | 77 | 24.058824 | 0.825073 | 0.537897 | 0 | 0 | 0 | 0 | 0.288136 | 0.180791 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 3 |
c919561bfcf220ed3a5c96d5a7b4315fbc55e917 | 539 | py | Python | 1_Bim/carlos_gabriel_03_trab1.py | eucgabriel/Faculdade-2--Semestre | d2994f94fe329fd94a862eddabd536c691ad796c | [
"MIT"
] | 1 | 2022-01-29T03:56:12.000Z | 2022-01-29T03:56:12.000Z | 1_Bim/carlos_gabriel_03_trab1.py | eucgabriel/Faculdade-2--Semestre | d2994f94fe329fd94a862eddabd536c691ad796c | [
"MIT"
] | null | null | null | 1_Bim/carlos_gabriel_03_trab1.py | eucgabriel/Faculdade-2--Semestre | d2994f94fe329fd94a862eddabd536c691ad796c | [
"MIT"
] | null | null | null | # Recebe notas
nado = int(input("Qual a idade do nadador?"))
# Ifs
if(nado >= 5 and nado <= 7):
print("A categoria do nadador é Infantil A!!")
elif(nado >= 8 and nado <= 10):
print("A categoria do nadador é Infantil B!!")
elif(nado >= 11 and nado <= 13):
print("A categoria do nadador é Juvenil A!!")
elif(nado >= 14 and nado <= 17):
print("A categoria do nadador é Juvenil B!!")
elif(nado >= 18):
print("A categoria do nadador é Adulto!!")
else:
print("O nadador é muito jovem ainda pra estar em uma categoria!!")
| 33.6875 | 71 | 0.641929 | 90 | 539 | 3.844444 | 0.433333 | 0.156069 | 0.216763 | 0.245665 | 0.447977 | 0.447977 | 0.375723 | 0 | 0 | 0 | 0 | 0.035211 | 0.209648 | 539 | 15 | 72 | 35.933333 | 0.776995 | 0.029685 | 0 | 0 | 0 | 0 | 0.501923 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.461538 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
c91e1171e8f0f747da101f86219c4f321ce1944b | 369 | py | Python | posts/admin.py | kbilak/Talker | ff1ed19d080e913da6852f4955602c920ac7411c | [
"MIT"
] | null | null | null | posts/admin.py | kbilak/Talker | ff1ed19d080e913da6852f4955602c920ac7411c | [
"MIT"
] | null | null | null | posts/admin.py | kbilak/Talker | ff1ed19d080e913da6852f4955602c920ac7411c | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import *
class PostAlbumAdmin(admin.StackedInline):
model = PostAlbum
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
inlines = [PostAlbumAdmin]
list_display = ['user', 'id', 'created']
class Meta:
model = Post
@admin.register(PostAlbum)
class PostAlbumAdmin(admin.ModelAdmin):
pass | 21.705882 | 44 | 0.718157 | 40 | 369 | 6.6 | 0.575 | 0.143939 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173442 | 369 | 17 | 45 | 21.705882 | 0.865574 | 0 | 0 | 0 | 0 | 0 | 0.035135 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.076923 | 0.153846 | 0 | 0.692308 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
c933d17b2929b836f4c5887c20d170a5aca5db3f | 243 | py | Python | locators/product.py | testsibirtsv/opncrt_taqc | f2f4264b8c52b0c6666dd6f3bd461973780f4b99 | [
"Apache-2.0"
] | null | null | null | locators/product.py | testsibirtsv/opncrt_taqc | f2f4264b8c52b0c6666dd6f3bd461973780f4b99 | [
"Apache-2.0"
] | null | null | null | locators/product.py | testsibirtsv/opncrt_taqc | f2f4264b8c52b0c6666dd6f3bd461973780f4b99 | [
"Apache-2.0"
] | null | null | null | """
TODO
"""
from selenium.webdriver.common.by import By
from .base import BasePageLocators
# pylint: disable=too-few-public-methods
class ProductPageLocators(BasePageLocators):
"""
TODO
"""
BTN_CART = (By.ID, 'button-cart')
| 17.357143 | 44 | 0.695473 | 28 | 243 | 6 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168724 | 243 | 13 | 45 | 18.692308 | 0.831683 | 0.201646 | 0 | 0 | 0 | 0 | 0.064327 | 0 | 0 | 0 | 0 | 0.153846 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
c950ce8a83e4f03bf83f9d17c7923c4513515e21 | 188 | py | Python | MetaStone/EnumFields/DatabaseModels/Metagenome/Gold.py | mrumming/MetaStonePhenoPointer | fc0b837b509ffec2db374723079a6fc97791f8e3 | [
"BSD-3-Clause"
] | null | null | null | MetaStone/EnumFields/DatabaseModels/Metagenome/Gold.py | mrumming/MetaStonePhenoPointer | fc0b837b509ffec2db374723079a6fc97791f8e3 | [
"BSD-3-Clause"
] | null | null | null | MetaStone/EnumFields/DatabaseModels/Metagenome/Gold.py | mrumming/MetaStonePhenoPointer | fc0b837b509ffec2db374723079a6fc97791f8e3 | [
"BSD-3-Clause"
] | null | null | null | GOLD_ANALYSIS_PROJECT_TYPE = (("undefined", "Unknown/Undefined"), ("metagenome_analysis", "Metagenome Analysis"),
("combined_assembly", "Combined Assembly"))
| 62.666667 | 113 | 0.643617 | 15 | 188 | 7.733333 | 0.6 | 0.310345 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.212766 | 188 | 2 | 114 | 94 | 0.783784 | 0 | 0 | 0 | 0 | 0 | 0.521277 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
c95468e6573f43dda4740f5677d1bb334981b4ed | 26,941 | py | Python | tests/_fmea/TestFMEA.py | rakhimov/rtk | adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63 | [
"BSD-3-Clause"
] | null | null | null | tests/_fmea/TestFMEA.py | rakhimov/rtk | adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63 | [
"BSD-3-Clause"
] | null | null | null | tests/_fmea/TestFMEA.py | rakhimov/rtk | adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63 | [
"BSD-3-Clause"
] | 2 | 2020-04-03T04:14:42.000Z | 2021-02-22T05:30:35.000Z | #!/usr/bin/env python -O
# -*- coding: utf-8 -*-
#
# rtk.tests.fmea.TestFMEA.py is part of The RTK Project
#
# All rights reserved.
# Copyright 2007 - 2017 Andrew Rowland andrew.rowland <AT> reliaqual <DOT> com
"""
This is the test class for testing the FMEA class.
"""
import unittest
from nose.plugins.attrib import attr
# We add this to ensure the imports within the rtk packages will work.
import sys
from os.path import dirname
sys.path.insert(
0,
dirname(dirname(dirname(__file__))) + "/rtk", )
from sqlalchemy.orm import scoped_session
from treelib import Tree
import Utilities as Utilities
from Configuration import Configuration
from dao import DAO
from dao import RTKMode
from dao import RTKMechanism
from dao import RTKCause
from dao import RTKControl
from dao import RTKAction
from analyses.fmea import dtcFMEA, dtmFMEA, dtmAction, dtmControl, dtmMode, dtmMechanism, dtmCause
__author__ = 'Andrew Rowland'
__email__ = 'andrew.rowland@reliaqual.com'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2014 Andrew "Weibullguy" Rowland'
class TestFMEADataModel(unittest.TestCase):
"""
Class for testing the FMEA model class.
"""
def setUp(self):
"""
Sets up the test fixture for the FMEA model class.
"""
self.Configuration = Configuration()
self.Configuration.RTK_BACKEND = 'sqlite'
self.Configuration.RTK_PROG_INFO = {
'host': 'localhost',
'socket': 3306,
'database': '/tmp/TestDB.rtk',
'user': '',
'password': ''
}
self.Configuration.DEBUG_LOG = \
Utilities.create_logger("RTK.debug", 'DEBUG', '/tmp/RTK_debug.log')
self.Configuration.USER_LOG = \
Utilities.create_logger("RTK.user", 'INFO', '/tmp/RTK_user.log')
# Create a data access object and connect to a test database.
self.dao = DAO()
_database = self.Configuration.RTK_BACKEND + ':///' + \
self.Configuration.RTK_PROG_INFO['database']
self.dao.db_connect(_database)
self.dao.RTK_SESSION.configure(
bind=self.dao.engine, autoflush=False, expire_on_commit=False)
self.session = scoped_session(self.dao.RTK_SESSION)
self.DUT = dtmFMEA(self.dao)
@attr(all=True, unit=True)
def test00_FMEA_create(self):
"""
(TestFMEAModel) __init__ should return instance of FMEA data model
"""
self.assertTrue(isinstance(self.DUT, dtmFMEA))
self.assertTrue(isinstance(self.DUT.dtm_mode, dtmMode))
# self.assertTrue(isinstance(self.DUT.dtm_mechanism, Mechanism))
# self.assertTrue(isinstance(self.DUT.dtm_cause, Cause))
self.assertTrue(isinstance(self.DUT.dtm_control, dtmControl))
self.assertTrue(isinstance(self.DUT.dtm_action, dtmAction))
@attr(all=True, unit=True)
def test01a_select_all_functional(self):
"""
(TestFMEAModel) select_all() should return a treelib Tree() on success when selecting a Functional FMEA
"""
_tree = self.DUT.select_all(3, functional=True)
self.assertTrue(isinstance(_tree, Tree))
@attr(all=True, unit=True)
def test01b_select_all_hardware(self):
"""
(TestFMEAModel) select_all() should return a treelib Tree() on success when selecting a Hardware FMEA
"""
_tree = self.DUT.select_all(3, functional=False)
self.assertTrue(isinstance(_tree, Tree))
@attr(all=True, unit=True)
def test01c_select_all_non_existent_hardware_id(self):
"""
(TestFMEAModel) select_all() should return an empty Tree() when passed a Hardware ID that doesn't exist.
"""
_tree = self.DUT.select_all(100, functional=False)
self.assertTrue(isinstance(_tree, Tree))
self.assertEqual(_tree.get_node(0).tag, 'FMEA')
self.assertEqual(_tree.get_node(1), None)
@attr(all=True, unit=True)
def test01d_select_all_non_existent_function_id(self):
"""
(TestFMEAModel) select_all() should return an empty Tree() when passed
a Function ID that doesn't exist.
"""
_tree = self.DUT.select_all(100, functional=True)
self.assertTrue(isinstance(_tree, Tree))
self.assertEqual(_tree.get_node(0).tag, 'FMEA')
self.assertEqual(_tree.get_node(1), None)
@attr(all=True, unit=True)
def test02a_select_mode(self):
"""
(TestFMEAModel) select() should return an instance of RTKMode on success.
"""
self.DUT.select_all(3, functional=True)
_entity = self.DUT.select('0.1')
self.assertTrue(isinstance(_entity, RTKMode))
self.assertEqual(_entity.description, 'Test Failure Mode #1')
@attr(all=True, unit=True)
def test02b_select_mechanism(self):
"""
(TestFMEAModel) select() should return an instance of RTKMechanism on success.
"""
self.DUT.select_all(3, functional=False)
_entity = self.DUT.select('0.1.1')
self.assertTrue(isinstance(_entity, RTKMechanism))
self.assertEqual(_entity.description, 'Test Failure Mechanism #1')
@attr(all=True, unit=True)
def test02c_select_cause(self):
"""
(TestFMEAModel) select() should return an instance of RTKCause on success.
"""
self.DUT.select_all(3, functional=False)
_entity = self.DUT.select('0.1.1.1')
self.assertTrue(isinstance(_entity, RTKCause))
self.assertEqual(_entity.description, 'Test Failure Cause #1')
@attr(all=True, unit=True)
def test02d_select_control_functional(self):
"""
(TestFMEAModel) select() should return an instance of RTKControl when selecting from a functional FMEA on success.
"""
self.DUT.select_all(3, functional=True)
_entity = self.DUT.select('0.1.01')
self.assertTrue(isinstance(_entity, RTKControl))
self.assertEqual(_entity.description, 'Functional FMEA control.')
@attr(all=True, unit=True)
def test02e_select_control_hardware(self):
"""
(TestFMEAModel) select() should return an instance of RTKControl when selecting from a hardware FMEA on success.
"""
self.DUT.select_all(3, functional=False)
_entity = self.DUT.select('0.1.1.1.01')
self.assertTrue(isinstance(_entity, RTKControl))
self.assertEqual(_entity.description, 'Functional FMEA control.')
@attr(all=True, unit=True)
def test02f_select_action_functional(self):
"""
(TestFMEAModel) select() should return an instance of RTKAction when selecting from a functional FMEA on success.
"""
self.DUT.select_all(3, functional=True)
_entity = self.DUT.select('0.1.1')
self.assertTrue(isinstance(_entity, RTKAction))
self.assertEqual(_entity.action_recommended,
'Do this stuff and do it now!!')
@attr(all=True, unit=True)
def test02g_select_action_hardware(self):
"""
(TestFMEAModel) select() should return an instance of RTKAction when selecting from a hardware FMEA on success.
"""
self.DUT.select_all(3, functional=False)
_entity = self.DUT.select('0.1.1.1.1')
self.assertTrue(isinstance(_entity, RTKAction))
self.assertEqual(_entity.action_recommended,
'Do this stuff and do it now!!')
@attr(all=True, unit=True)
def test03a_insert_mode_functional(self, functional=True):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Mode to a Functional FMEA.
"""
self.DUT.select_all(1, functional=True)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id=0, level='mode')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.' + str(self.DUT.dtm_mode.last_id)
_mode = self.DUT.select(_node_id)
self.assertTrue(isinstance(_mode, RTKMode))
self.assertEqual(_mode.function_id, 1)
self.assertEqual(_mode.hardware_id, -1)
_tree_mode = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_mode.data, RTKMode))
self.assertEqual(_mode, _tree_mode.data)
@attr(all=True, unit=True)
def test03b_insert_mode_hardware(self, functional=False):
"""(TestFMEAModel) insert() should return a zero error code on success when adding a new Mode to a Hardware FMEA."""
self.DUT.select_all(3, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id=0, level='mode')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.' + str(self.DUT.dtm_mode.last_id)
_mode = self.DUT.select(_node_id)
self.assertTrue(isinstance(_mode, RTKMode))
self.assertEqual(_mode.function_id, -1)
self.assertEqual(_mode.hardware_id, 1)
_tree_mode = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_mode.data, RTKMode))
self.assertEqual(_mode, _tree_mode.data)
@attr(all=True, unit=True)
def test03c_insert_mode_hardware_non_existant_level(self):
"""(TestFMEAModel) insert() should return a non-zero error code when trying to add a non-existant level."""
self.DUT.select_all(1, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id=0, level='juice')
self.assertEqual(_error_code, 2005)
self.assertEqual(_msg,
'RTK ERROR: Attempted to add an item to the FMEA '
'with an undefined indenture level. Level juice was '
'requested. Must be one of mode, mechanism, cause, '
'control, or action.')
@attr(all=True, unit=True)
def test03d_insert_mechanism(self):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Mechanism to a Hardware FMEA.
"""
self.DUT.select_all(1, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id='0.3', level='mechanism')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.3.' + str(self.DUT.dtm_mechanism.last_id)
_mechanism = self.DUT.select(_node_id)
self.assertTrue(isinstance(_mechanism, RTKMechanism))
self.assertEqual(_mechanism.mode_id, 1)
self.assertEqual(_mechanism.mechanism_id,
self.DUT.dtm_mechanism.last_id)
_tree_mechanism = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_mechanism.data, RTKMechanism))
self.assertEqual(_mechanism, _tree_mechanism.data)
@attr(all=True, unit=True)
def test03e_insert_cause(self):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Cause to a Hardware FMEA.
"""
self.DUT.select_all(3, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id='0.1.1', level='cause')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.1.1.' + str(self.DUT.dtm_cause.last_id)
_cause = self.DUT.select(_node_id)
self.assertTrue(isinstance(_cause, RTKCause))
self.assertEqual(_cause.mechanism_id, 1)
self.assertEqual(_cause.cause_id, self.DUT.dtm_cause.last_id)
_tree_cause = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_cause.data, RTKCause))
self.assertEqual(_tree_cause.data, _cause)
@attr(all=True, unit=True)
def test03f_insert_control_functional(self):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Control to a Functional FMEA.
"""
self.DUT.select_all(1, functional=True)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id='0.2', level='control')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.2.0' + str(self.DUT.dtm_control.last_id)
_control = self.DUT.select(_node_id)
self.assertTrue(isinstance(_control, RTKControl))
self.assertEqual(_control.mode_id, 1)
self.assertEqual(_control.cause_id, -1)
_tree_control = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_control.data, RTKControl))
self.assertEqual(_control, _tree_control.data)
@attr(all=True, unit=True)
def test03g_insert_control_hardware(self):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Control to a Hardware FMEA.
"""
self.DUT.select_all(3, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=2, parent_id='0.1.1.1', level='control')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.1.1.1.0' + str(self.DUT.dtm_control.last_id)
_control = self.DUT.select(_node_id)
self.assertTrue(isinstance(_control, RTKControl))
self.assertEqual(_control.mode_id, -1)
self.assertEqual(_control.cause_id, 2)
_tree_control = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_control.data, RTKControl))
self.assertEqual(_control, _tree_control.data)
@attr(all=True, unit=True)
def test03h_insert_action_functional(self):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Action to a Functional FMEA.
"""
self.DUT.select_all(1, functional=True)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id='0.2', level='action')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.2.' + str(self.DUT.dtm_action.last_id)
_action = self.DUT.select(_node_id)
self.assertTrue(isinstance(_action, RTKAction))
self.assertEqual(_action.mode_id, 1)
self.assertEqual(_action.cause_id, -1)
_tree_action = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_action.data, RTKAction))
self.assertEqual(_action, _tree_action.data)
@attr(all=True, unit=True)
def test03i_insert_action_hardware(self):
"""
(TestFMEAModel) insert() should return a zero error code on success when adding a new Action to a Hardware FMEA.
"""
self.DUT.select_all(3, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id='0.1.1.1', level='action')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Adding one or more items to the RTK "
"Program database.")
_node_id = '0.1.1.1.' + str(self.DUT.dtm_action.last_id)
_action = self.DUT.select(_node_id)
self.assertTrue(isinstance(_action, RTKAction))
self.assertEqual(_action.mode_id, -1)
self.assertEqual(_action.cause_id, 1)
_tree_action = self.DUT.tree.get_node(_node_id)
self.assertTrue(isinstance(_tree_action.data, RTKAction))
self.assertEqual(_action, _tree_action.data)
@attr(all=True, unit=True)
def test03j_insert_non_existent_type(self):
"""
(TestFMEAModel) insert() should return a 2005 error code when attempting to add something other than a Mode, Mechanism, Cause, Control, or Action.
"""
self.DUT.select_all(1, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=100, parent_id=0, level='scadamoosh')
self.assertEqual(_error_code, 2005)
self.assertEqual(_msg,
"RTK ERROR: Attempted to add an item to the FMEA " \
"with an undefined indenture level. Level " \
"scadamoosh was requested. Must be one of "
"mode, mechanism, cause, control, or action.")
@attr(all=True, unit=True)
def test03k_insert_no_parent_in_tree(self):
"""
(TestFMEAModel) insert() should return a 2005 error code when attempting to add something to a non-existant parent Node.
"""
self.DUT.select_all(3, functional=False)
_error_code, _msg = self.DUT.insert(
entity_id=1, parent_id='mode_1', level='action')
self.assertEqual(_error_code, 2005)
self.assertEqual(_msg, "RTK ERROR: Attempted to add an item under "
"non-existent Node ID: mode_1.")
@attr(all=True, unit=True)
def test04a_delete_control_functional(self):
"""
(TestFMEAModel) delete() should return a zero error code on success when removing a Control.
"""
self.DUT.select_all(1, functional=True)
self.DUT.insert(entity_id=1, parent_id='0.2', level='control')
_node_id = '0.2.0' + str(self.DUT.dtm_control.last_id)
_error_code, _msg = self.DUT.delete(_node_id)
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Deleting an item from the RTK Program "
"database.")
@attr(all=True, unit=True)
def test04b_delete_non_existent_node_id(self):
"""
(TestFMEAModel) delete() should return a 2105 error code when attempting to remove a non-existant item from the FMEA.
"""
self.DUT.select_all(1, functional=True)
_error_code, _msg = self.DUT.delete('scadamoosh_1')
self.assertEqual(_error_code, 2005)
self.assertEqual(_msg, " RTK ERROR: Attempted to delete non-existent "
"entity with Node ID scadamoosh_1 from the FMEA.")
@attr(all=True, unit=True)
def test05a_update(self):
"""
(TestFMEAModel) update() should return a zero error code on success.
"""
self.DUT.select_all(3, functional=True)
_error_code, _msg = self.DUT.update('0.1')
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
"RTK SUCCESS: Updating the RTK Program database.")
@attr(all=True, unit=True)
def test05b_update_non_existent_node_id(self):
"""
(TestFMEAModel) update() should return a 2106 error code when attempting to update a non-existent Node ID from a functional FMEA.
"""
self.DUT.select_all(3, functional=True)
_error_code, _msg = self.DUT.update('mode_1000')
self.assertEqual(_error_code, 2006)
self.assertEqual(_msg,
"RTK ERROR: Attempted to save non-existent entity "
"with Node ID mode_1000.")
@attr(all=True, unit=True)
def test06a_update_all(self):
"""
(TestFMEAModel) update_all() should return a zero error code on success.
"""
self.DUT.select_all(3, functional=True)
_error_code, _msg = self.DUT.update_all()
self.assertEqual(_error_code, 0)
self.assertEqual(_msg,
'RTK SUCCESS: Updating the RTK Program database.')
@attr(all=True, unit=True)
def test07a_calculate_criticality(self):
"""
(TestFMEAModel) calculate_criticality() returns a zero error code on success
"""
self.DUT.select_all(1, functional=False)
_mode = self.DUT.select('0.3')
_mode.mode_ratio = 0.4
_mode.mode_op_time = 100.0
_mode.effect_probability = 1.0
_mode = self.DUT.select('0.5')
_mode.mode_ratio = 0.5
_mode.mode_op_time = 100.0
_mode.effect_probability = 1.0
_error_code, _msg = self.DUT.calculate_criticality(0.00001)
self.assertEqual(_error_code, 0)
self.assertEqual(_msg, 'RTK SUCCESS: Calculating failure mode 5 '
'criticality.')
self.assertEqual(_mode.mode_criticality, 0.0005)
self.DUT.update('0.3')
self.DUT.update('0.5')
@attr(all=True, unit=True)
def test08a_calculate_mechanism_rpn(self):
"""
(TestFMEAModel) calculate_mechanism_rpn returns a zero error code on success
"""
self.DUT.select_all(3, functional=False)
for _node in self.DUT.tree.children('0.1'):
_mechanism = _node.data
_attributes = _mechanism.get_attributes()
_attributes['rpn_detection'] = 4
_attributes['rpn_occurrence'] = 7
_attributes['rpn_detection_new'] = 3
_attributes['rpn_occurrence_new'] = 5
_mechanism.set_attributes(_attributes)
_error_code, _msg = \
self.DUT.calculate_rpn('0.1', 7, 4)
self.assertEqual(_error_code, 0)
self.assertEqual(_msg, 'RTK SUCCESS: Calculating failure mechanism '
'{0:d} RPN.'.\
format(self.DUT.dtm_mechanism.last_id))
self.assertEqual(_mechanism.rpn, 196)
self.assertEqual(_mechanism.rpn_new, 60)
@attr(all=True, unit=True)
def test09a_calculate_cause_rpn(self):
"""
(TestFMEAModel) calculate_cause_rpn returns a zero error code on success
"""
self.DUT.select_all(3, functional=False)
for _node in self.DUT.tree.children('0.1.1'):
_cause = _node.data
_attributes = _cause.get_attributes()
_attributes['rpn_detection'] = 4
_attributes['rpn_occurrence'] = 7
_attributes['rpn_detection_new'] = 3
_attributes['rpn_occurrence_new'] = 5
_cause.set_attributes(_attributes)
_error_code, _msg = \
self.DUT.calculate_rpn('0.1.1', 7, 4)
self.assertEqual(_error_code, 0)
self.assertEqual(_msg, 'RTK SUCCESS: Calculating failure cause '
'{0:d} RPN.'.\
format(self.DUT.dtm_cause.last_id))
self.assertEqual(_cause.rpn, 196)
self.assertEqual(_cause.rpn_new, 60)
class TestFMEADataController(unittest.TestCase):
"""
Class for testing the FMEA data controller class.
"""
def setUp(self):
"""
Method to setup the test fixture for the FMEA Data Controller.
"""
self.Configuration = Configuration()
self.Configuration.RTK_BACKEND = 'sqlite'
self.Configuration.RTK_PROG_INFO = {
'host': 'localhost',
'socket': 3306,
'database': '/tmp/TestDB.rtk',
'user': '',
'password': ''
}
self.Configuration.RTK_DEBUG_LOG = \
Utilities.create_logger("RTK.debug", 'DEBUG',
'/tmp/RTK_debug.log')
self.Configuration.RTK_USER_LOG = \
Utilities.create_logger("RTK.user", 'INFO',
'/tmp/RTK_user.log')
# Create a data access object and connect to a test database.
self.dao = DAO()
_database = self.Configuration.RTK_BACKEND + ':///' + \
self.Configuration.RTK_PROG_INFO['database']
self.dao.db_connect(_database)
self.DUT = dtcFMEA(self.dao, self.Configuration, test='True')
@attr(all=True, unit=True)
def test00_create_controller(self):
"""
(TestFMEAController) __init__ should return instance of FMEA data controller
"""
self.assertTrue(isinstance(self.DUT, dtcFMEA))
self.assertTrue(isinstance(self.DUT._dtm_data_model, dtmFMEA))
@attr(all=True, unit=True)
def test01a_request_select_all_hardware(self):
"""(TestFMEAController) request_select_all() should return a treelib Tree() with the hardware FMEA."""
self.assertTrue(
isinstance(self.DUT.request_select_all(1, functional=False), Tree))
@attr(all=True, unit=True)
def test01b_request_select_all_functional(self):
"""(TestFMEAController) request_select_all() should return a treelib Tree() with the functional FMEA."""
self.assertTrue(
isinstance(self.DUT.request_select_all(3, functional=True), Tree))
@attr(all=True, unit=True)
def test03a_request_insert_mode_functional(self):
"""(TestFMEAController) request_insert() should return False on success when adding a mode to a functional FMEA."""
self.DUT.request_select_all(3, functional=True)
self.assertFalse(self.DUT.request_insert(1, 0, 'mode'))
@attr(all=True, unit=True)
def test_03b_request_insert_mode_hardware(self):
"""(TestFMEAController) request_insert() should return False on success when addin a mode to a hardware FMEA."""
self.DUT.request_select_all(1, functional=False)
self.assertFalse(self.DUT.request_insert(1, 0, 'mode'))
@attr(all=True, unit=True)
def test03c_request_insert_mechanism(self):
"""(TestFMEAController) request_insert() should return a False on success when adding a new Mechanism to a Hardware FMEA."""
self.DUT.request_select_all(3, functional=False)
self.assertFalse(self.DUT.request_insert(1, '0.1', 'mechanism'))
@attr(all=True, unit=True)
def test04a_request_delete_control_functional(self):
"""(TestFMEAController) request_delete() should return False on success when removing a Control from a functional FMEA."""
self.DUT.request_select_all(3, functional=True)
_node_id = '0.1.0' + str(self.DUT._dtm_data_model.dtm_control.last_id)
self.assertFalse(self.DUT.request_delete(_node_id))
@attr(all=True, unit=True)
def test05a_update_all(self):
"""(TestFMEAController) request_update_all() should return a zero error code on success."""
self.DUT.request_select_all(3, functional=True)
self.assertFalse(self.DUT.request_update_all())
| 38.160057 | 154 | 0.633755 | 3,333 | 26,941 | 4.906991 | 0.087309 | 0.04922 | 0.037359 | 0.035769 | 0.786548 | 0.769979 | 0.704677 | 0.665301 | 0.635096 | 0.620972 | 0 | 0.019728 | 0.258676 | 26,941 | 705 | 155 | 38.214184 | 0.799169 | 0.175643 | 0 | 0.530093 | 0 | 0 | 0.111553 | 0.00131 | 0 | 0 | 0 | 0 | 0.273148 | 1 | 0.094907 | false | 0.00463 | 0.034722 | 0 | 0.134259 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
c965c7dc9fc33b37dde91f125bb8ae37b44a74aa | 440 | py | Python | tests/unit/test_importer.py | mbachry/exxo | e4e56bce57fd0b128d428c58b40fc2cf404a680d | [
"0BSD"
] | 513 | 2016-01-08T10:45:02.000Z | 2022-01-25T02:15:52.000Z | tests/unit/test_importer.py | mbachry/exxo | e4e56bce57fd0b128d428c58b40fc2cf404a680d | [
"0BSD"
] | 12 | 2016-01-08T15:59:54.000Z | 2018-06-11T19:55:06.000Z | tests/unit/test_importer.py | mbachry/exxo | e4e56bce57fd0b128d428c58b40fc2cf404a680d | [
"0BSD"
] | 14 | 2015-11-26T10:24:11.000Z | 2021-05-30T19:44:09.000Z | def test_find_solib_in_zip(importer):
spec = importer.find_spec('spam', None)
assert spec is not None
def test_find_solib_in_zip_missing(importer):
spec = importer.find_spec('spam_missing', None)
assert spec is None
def test_import_solib_from_zip(importer):
import spam
assert spam.spam(2, 6) == 8
def test_import_rpath_solib_from_zip(importer):
from sub.sub2 import rpath
assert rpath.spam(2, 6) == 8
| 23.157895 | 51 | 0.731818 | 71 | 440 | 4.239437 | 0.309859 | 0.093023 | 0.07309 | 0.106312 | 0.352159 | 0.352159 | 0 | 0 | 0 | 0 | 0 | 0.019391 | 0.179545 | 440 | 18 | 52 | 24.444444 | 0.814404 | 0 | 0 | 0 | 0 | 0 | 0.036364 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | false | 0 | 0.666667 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
c9686ed8b2abbad434d9f4542740f603cdb049fd | 384 | py | Python | fdk_client/platform/models/LaunchPage.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | fdk_client/platform/models/LaunchPage.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | fdk_client/platform/models/LaunchPage.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | """Platform Models."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
class LaunchPage(BaseSchema):
# Configuration swagger.json
page_type = fields.Str(required=False)
params = fields.Dict(required=False)
query = fields.Dict(required=False)
| 14.769231 | 42 | 0.700521 | 42 | 384 | 6.380952 | 0.571429 | 0.145522 | 0.134328 | 0.171642 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.208333 | 384 | 25 | 43 | 15.36 | 0.881579 | 0.114583 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
c97afba6704c7a3dcf742cba0363228e317b3122 | 123 | py | Python | app/bda_core/entities/nlp/stemming.py | bda-19fs/bda-chatbot | 4fcbda813ff5d3854a4c2e12413775676bcba9e2 | [
"MIT"
] | 1 | 2019-05-25T12:12:39.000Z | 2019-05-25T12:12:39.000Z | app/bda_core/entities/nlp/stemming.py | bda-19fs/bda-chatbot | 4fcbda813ff5d3854a4c2e12413775676bcba9e2 | [
"MIT"
] | null | null | null | app/bda_core/entities/nlp/stemming.py | bda-19fs/bda-chatbot | 4fcbda813ff5d3854a4c2e12413775676bcba9e2 | [
"MIT"
] | null | null | null | def stemm(line, stemmer):
tokens = line.split(' ')
return str.join(' ', [stemmer.stem(token) for token in tokens])
| 30.75 | 67 | 0.642276 | 17 | 123 | 4.647059 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.186992 | 123 | 3 | 68 | 41 | 0.79 | 0 | 0 | 0 | 0 | 0 | 0.01626 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
c97fc9d5986ab9277510abad71c2863373c8a44a | 80 | py | Python | c3po/__init__.py | GuoJing/c3po-grpc-gateway | 2fd6c2d872b27e7b4acb5186787de7591b958fdd | [
"MIT"
] | 1 | 2016-04-28T12:49:21.000Z | 2016-04-28T12:49:21.000Z | c3po/__init__.py | GuoJing/c3po-grpc-gateway | 2fd6c2d872b27e7b4acb5186787de7591b958fdd | [
"MIT"
] | null | null | null | c3po/__init__.py | GuoJing/c3po-grpc-gateway | 2fd6c2d872b27e7b4acb5186787de7591b958fdd | [
"MIT"
] | null | null | null | version_info = (0, 0, 1)
__version__ = '.'.join([str(i) for i in version_info])
| 26.666667 | 54 | 0.65 | 14 | 80 | 3.285714 | 0.642857 | 0.478261 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044118 | 0.15 | 80 | 2 | 55 | 40 | 0.632353 | 0 | 0 | 0 | 0 | 0 | 0.0125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
a32e24ca582a76ae402be4613ce1a6e88ceeb91f | 2,232 | py | Python | test/test_framecurve_parser.py | guerilla-di/framecurve_python | 65b5b6a7cc41f4df95576f3671d07a9115a97490 | [
"MIT"
] | 2 | 2017-09-29T22:54:40.000Z | 2021-05-21T23:28:46.000Z | test/test_framecurve_parser.py | guerilla-di/framecurve_python | 65b5b6a7cc41f4df95576f3671d07a9115a97490 | [
"MIT"
] | null | null | null | test/test_framecurve_parser.py | guerilla-di/framecurve_python | 65b5b6a7cc41f4df95576f3671d07a9115a97490 | [
"MIT"
] | null | null | null | import os
from StringIO import StringIO
import framecurve
def test_parser():
data = "\r\n".join(["# Framecurve data", "10\t1293.12", "#Some useful info", "10\t145"])
elements = framecurve.parse(StringIO(data))
assert isinstance(elements, framecurve.Curve)
assert len(elements) == 4
assert isinstance(elements[0], framecurve.Comment)
assert isinstance(elements[1], framecurve.FrameCorrelation)
assert isinstance(elements[2], framecurve.Comment)
assert isinstance(elements[3], framecurve.FrameCorrelation)
assert elements[0].text == "Framecurve data"
assert elements[1] == framecurve.FrameCorrelation(10, 1293.12)
assert elements[2].text == "Some useful info"
def test_parse_with_neg_source_frame():
data = "10\t-1293.12"
elements = framecurve.Parser(StringIO(data)).parse()
assert isinstance(elements, framecurve.Curve)
assert len(elements) == 1
assert isinstance(elements[0], framecurve.FrameCorrelation)
assert framecurve.FrameCorrelation(10, -1293.12), elements[0]
def test_parse_with_neg_dest_frame():
data = "-123\t-1293.12"
elements = framecurve.parse(StringIO(data))
assert isinstance(elements, framecurve.Curve)
assert len(elements) == 1
assert isinstance(elements[0], framecurve.FrameCorrelation)
assert framecurve.FrameCorrelation(-123, -1293.12), elements[0]
def test_should_try_to_open_file_at_path_if_string_passed_to_parse():
assert not os.path.exists("/tmp/some_file.framecurve.txt")
try:
framecurve.parse("/tmp/some_file.framecurve.txt")
except IOError:
pass
else:
assert False
def test_should_pick_file_path_from_passed_file_object():
path = os.path.dirname(__file__) + "/fixtures/framecurves/sample_framecurve1.framecurve.txt"
curve = framecurve.parse(open(path))
assert "sample_framecurve1.framecurve.txt" == curve.filename
def test_parser_fails_on_malformed_lines():
data = "Sachlich gesehen\nbambam"
try:
framecurve.parse(StringIO(data))
except framecurve.MalformedError:
pass
else:
assert False
def test_parse_str_helper():
c1 = framecurve.parse_str("1\t2")
print c1
assert c1[0].at == 1
assert c1[0].value == 2.0
| 30.162162 | 96 | 0.723566 | 283 | 2,232 | 5.537102 | 0.289753 | 0.091895 | 0.137843 | 0.051691 | 0.527122 | 0.320357 | 0.259094 | 0.259094 | 0.259094 | 0.259094 | 0 | 0.043687 | 0.15905 | 2,232 | 73 | 97 | 30.575342 | 0.791156 | 0 | 0 | 0.314815 | 0 | 0 | 0.128584 | 0.065412 | 0 | 0 | 0 | 0 | 0.425926 | 0 | null | null | 0.074074 | 0.055556 | null | null | 0.018519 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
a337e0bdfdd67ae72464d3f47fcc67bb08c711f3 | 736 | py | Python | src/sage/libs/mwrank/all.py | switzel/sage | 7eb8510dacf61b691664cd8f1d2e75e5d473e5a0 | [
"BSL-1.0"
] | 5 | 2015-01-04T07:15:06.000Z | 2022-03-04T15:15:18.000Z | src/sage/libs/mwrank/all.py | switzel/sage | 7eb8510dacf61b691664cd8f1d2e75e5d473e5a0 | [
"BSL-1.0"
] | null | null | null | src/sage/libs/mwrank/all.py | switzel/sage | 7eb8510dacf61b691664cd8f1d2e75e5d473e5a0 | [
"BSL-1.0"
] | 10 | 2016-09-28T13:12:40.000Z | 2022-02-12T09:28:34.000Z | """
Cremona's mwrank C++ library
"""
__doc_exclude = [] # to include everything
from interface import (mwrank_EllipticCurve, mwrank_MordellWeil,
get_precision, set_precision)
def mwrank_initprimes(filename, verb=False):
"""
mwrank_initprimes(filename, verb=False):
INPUT:
- ``filename`` - (string) the name of a file of
primes
- ``verb`` - (bool: default False) verbose or not?
EXAMPLES::
sage: file = tmp_filename()
sage: open(file,'w').write(' '.join([str(p) for p in prime_range(10^6)]))
sage: mwrank_initprimes(file, verb=False)
"""
from mwrank import initprimes as mwrank_initprimes
return mwrank_initprimes(filename, verb)
| 21.028571 | 81 | 0.637228 | 87 | 736 | 5.229885 | 0.609195 | 0.175824 | 0.158242 | 0.184615 | 0.145055 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005386 | 0.243207 | 736 | 34 | 82 | 21.647059 | 0.81149 | 0.521739 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
a374335e617a9674711a761656918c43c51bf5f2 | 1,521 | py | Python | src/sima/simo/blueprints/soilfrictionelement.py | SINTEF/simapy | 650b8c2f15503dad98e2bfc0d0788509593822c7 | [
"MIT"
] | null | null | null | src/sima/simo/blueprints/soilfrictionelement.py | SINTEF/simapy | 650b8c2f15503dad98e2bfc0d0788509593822c7 | [
"MIT"
] | null | null | null | src/sima/simo/blueprints/soilfrictionelement.py | SINTEF/simapy | 650b8c2f15503dad98e2bfc0d0788509593822c7 | [
"MIT"
] | null | null | null | #
# Generated with SoilFrictionElementBlueprint
from dmt.blueprint import Blueprint
from dmt.dimension import Dimension
from dmt.attribute import Attribute
from dmt.enum_attribute import EnumAttribute
from dmt.blueprint_attribute import BlueprintAttribute
from sima.sima.blueprints.moao import MOAOBlueprint
class SoilFrictionElementBlueprint(MOAOBlueprint):
""""""
def __init__(self, name="SoilFrictionElement", package_path="sima/simo", description=""):
super().__init__(name,package_path,description)
self.attributes.append(Attribute("name","string","",default=""))
self.attributes.append(Attribute("description","string","",default=""))
self.attributes.append(Attribute("_id","string","",default=""))
self.attributes.append(BlueprintAttribute("scriptableValues","sima/sima/ScriptableValue","",True,Dimension("*")))
self.attributes.append(Attribute("dfric","number","Penetration relative to ZCONT (positive upwards)",default=0.0))
self.attributes.append(Attribute("ftipdo","number","Depth dependent friction force for DOWNward motion",default=0.0))
self.attributes.append(Attribute("ftipup","number","Depth dependent friction force for UPward motion",default=0.0))
self.attributes.append(Attribute("fwall","number","Depth dependent friction force for both upwards and\ndownwards motion",default=0.0))
self.attributes.append(Attribute("frich","number","Depth dependent friction force in horizontal direction (>=0)",default=0.0)) | 66.130435 | 143 | 0.750164 | 170 | 1,521 | 6.635294 | 0.364706 | 0.111702 | 0.159574 | 0.205674 | 0.379433 | 0.320922 | 0.150709 | 0.117021 | 0 | 0 | 0 | 0.008136 | 0.111111 | 1,521 | 23 | 144 | 66.130435 | 0.826183 | 0.028271 | 0 | 0 | 1 | 0 | 0.298162 | 0.017018 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.333333 | 0 | 0.444444 | 0.277778 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
a377b54a2240e26e1c0a6b5f47de64532bba96a1 | 1,780 | py | Python | elasticsearch_dsl/result.py | svisser/elasticsearch-dsl-py | 9116e45c24aad05132fbfe255c5a5ac306120543 | [
"Apache-2.0"
] | null | null | null | elasticsearch_dsl/result.py | svisser/elasticsearch-dsl-py | 9116e45c24aad05132fbfe255c5a5ac306120543 | [
"Apache-2.0"
] | null | null | null | elasticsearch_dsl/result.py | svisser/elasticsearch-dsl-py | 9116e45c24aad05132fbfe255c5a5ac306120543 | [
"Apache-2.0"
] | 1 | 2019-01-24T22:04:16.000Z | 2019-01-24T22:04:16.000Z | from six import iteritems, u
from .utils import AttrDict, AttrList
class Response(AttrDict):
def __iter__(self):
return iter(self.hits)
def __getitem__(self, key):
# for slicing etc
return self.hits[key]
def __repr__(self):
return '<Response: %r>' % self.hits
def success(self):
return not (self.timed_out or self._shards.failed)
@property
def hits(self):
if not hasattr(self, '_hits'):
h = self._d_['hits']
# avoid assigning _hits into self._d_
super(AttrDict, self).__setattr__('_hits', AttrList(map(Result, h['hits'])))
for k in h:
setattr(self._hits, k, h[k])
return self._hits
class ResultMeta(AttrDict):
def __init__(self, document):
d = dict((k[1:], v) for (k, v) in iteritems(document) if k.startswith('_') and k != '_source')
# make sure we are consistent everywhere in python
d['doc_type'] = d['type']
super(ResultMeta, self).__init__(d)
class Result(AttrDict):
def __init__(self, document):
if 'fields' in document:
super(Result, self).__init__(document['fields'])
elif '_source' in document:
super(Result, self).__init__(document['_source'])
else:
super(Result, self).__init__({})
# assign _meta as attribute and not as key in self._d_
super(AttrDict, self).__setattr__('_meta', ResultMeta(document))
def __dir__(self):
# be sure to expose _meta in dir(self)
return super(Result, self).__dir__() + ['_meta']
def __repr__(self):
return u('<Result(%s/%s/%s): %s>') % (
self._meta.index, self._meta.doc_type, self._meta.id, super(Result, self).__repr__())
| 32.363636 | 102 | 0.597753 | 226 | 1,780 | 4.327434 | 0.331858 | 0.04908 | 0.076687 | 0.058282 | 0.190184 | 0.134969 | 0.075665 | 0 | 0 | 0 | 0 | 0.000768 | 0.268539 | 1,780 | 54 | 103 | 32.962963 | 0.750384 | 0.106742 | 0 | 0.105263 | 0 | 0 | 0.069444 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.236842 | false | 0 | 0.052632 | 0.157895 | 0.552632 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
a3854918f41f644be6ff9b17e9648a6efcbc831e | 12,227 | py | Python | sdk/python/pulumi_alicloud/resourcemanager/resource_directory.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 42 | 2019-03-18T06:34:37.000Z | 2022-03-24T07:08:57.000Z | sdk/python/pulumi_alicloud/resourcemanager/resource_directory.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 152 | 2019-04-15T21:03:44.000Z | 2022-03-29T18:00:57.000Z | sdk/python/pulumi_alicloud/resourcemanager/resource_directory.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-08-26T17:30:07.000Z | 2021-07-05T01:37:45.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ResourceDirectoryArgs', 'ResourceDirectory']
@pulumi.input_type
class ResourceDirectoryArgs:
def __init__(__self__, *,
status: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ResourceDirectory resource.
:param pulumi.Input[str] status: The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class _ResourceDirectoryState:
def __init__(__self__, *,
master_account_id: Optional[pulumi.Input[str]] = None,
master_account_name: Optional[pulumi.Input[str]] = None,
root_folder_id: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ResourceDirectory resources.
:param pulumi.Input[str] master_account_id: The ID of the master account.
:param pulumi.Input[str] master_account_name: The name of the master account.
:param pulumi.Input[str] root_folder_id: The ID of the root folder.
:param pulumi.Input[str] status: The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
if master_account_id is not None:
pulumi.set(__self__, "master_account_id", master_account_id)
if master_account_name is not None:
pulumi.set(__self__, "master_account_name", master_account_name)
if root_folder_id is not None:
pulumi.set(__self__, "root_folder_id", root_folder_id)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="masterAccountId")
def master_account_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the master account.
"""
return pulumi.get(self, "master_account_id")
@master_account_id.setter
def master_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_account_id", value)
@property
@pulumi.getter(name="masterAccountName")
def master_account_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the master account.
"""
return pulumi.get(self, "master_account_name")
@master_account_name.setter
def master_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_account_name", value)
@property
@pulumi.getter(name="rootFolderId")
def root_folder_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the root folder.
"""
return pulumi.get(self, "root_folder_id")
@root_folder_id.setter
def root_folder_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "root_folder_id", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
class ResourceDirectory(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
status: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Resource Manager Resource Directory resource. Resource Directory enables you to establish an organizational structure for the resources used by applications of your enterprise. You can plan, build, and manage the resources in a centralized manner by using only one resource directory.
For information about Resource Manager Resource Directory and how to use it, see [What is Resource Manager Resource Directory](https://www.alibabacloud.com/help/en/doc-detail/94475.htm).
> **NOTE:** Available in v1.84.0+.
> **NOTE:** An account can only be used to enable a resource directory after it passes enterprise real-name verification. An account that only passed individual real-name verification cannot be used to enable a resource directory.
> **NOTE:** Before you destroy the resource, make sure that the following requirements are met:
- All member accounts must be removed from the resource directory.
- All folders except the root folder must be deleted from the resource directory.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.resourcemanager.ResourceDirectory("example", status="Enabled")
```
## Import
Resource Manager Resource Directory can be imported using the id, e.g.
```sh
$ pulumi import alicloud:resourcemanager/resourceDirectory:ResourceDirectory example rd-s3****
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] status: The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[ResourceDirectoryArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Resource Manager Resource Directory resource. Resource Directory enables you to establish an organizational structure for the resources used by applications of your enterprise. You can plan, build, and manage the resources in a centralized manner by using only one resource directory.
For information about Resource Manager Resource Directory and how to use it, see [What is Resource Manager Resource Directory](https://www.alibabacloud.com/help/en/doc-detail/94475.htm).
> **NOTE:** Available in v1.84.0+.
> **NOTE:** An account can only be used to enable a resource directory after it passes enterprise real-name verification. An account that only passed individual real-name verification cannot be used to enable a resource directory.
> **NOTE:** Before you destroy the resource, make sure that the following requirements are met:
- All member accounts must be removed from the resource directory.
- All folders except the root folder must be deleted from the resource directory.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.resourcemanager.ResourceDirectory("example", status="Enabled")
```
## Import
Resource Manager Resource Directory can be imported using the id, e.g.
```sh
$ pulumi import alicloud:resourcemanager/resourceDirectory:ResourceDirectory example rd-s3****
```
:param str resource_name: The name of the resource.
:param ResourceDirectoryArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ResourceDirectoryArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
status: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ResourceDirectoryArgs.__new__(ResourceDirectoryArgs)
__props__.__dict__["status"] = status
__props__.__dict__["master_account_id"] = None
__props__.__dict__["master_account_name"] = None
__props__.__dict__["root_folder_id"] = None
super(ResourceDirectory, __self__).__init__(
'alicloud:resourcemanager/resourceDirectory:ResourceDirectory',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
master_account_id: Optional[pulumi.Input[str]] = None,
master_account_name: Optional[pulumi.Input[str]] = None,
root_folder_id: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None) -> 'ResourceDirectory':
"""
Get an existing ResourceDirectory resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] master_account_id: The ID of the master account.
:param pulumi.Input[str] master_account_name: The name of the master account.
:param pulumi.Input[str] root_folder_id: The ID of the root folder.
:param pulumi.Input[str] status: The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ResourceDirectoryState.__new__(_ResourceDirectoryState)
__props__.__dict__["master_account_id"] = master_account_id
__props__.__dict__["master_account_name"] = master_account_name
__props__.__dict__["root_folder_id"] = root_folder_id
__props__.__dict__["status"] = status
return ResourceDirectory(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="masterAccountId")
def master_account_id(self) -> pulumi.Output[str]:
"""
The ID of the master account.
"""
return pulumi.get(self, "master_account_id")
@property
@pulumi.getter(name="masterAccountName")
def master_account_name(self) -> pulumi.Output[str]:
"""
The name of the master account.
"""
return pulumi.get(self, "master_account_name")
@property
@pulumi.getter(name="rootFolderId")
def root_folder_id(self) -> pulumi.Output[str]:
"""
The ID of the root folder.
"""
return pulumi.get(self, "root_folder_id")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The status of control policy. Valid values:`Enabled` and `Disabled`.
"""
return pulumi.get(self, "status")
| 42.162069 | 303 | 0.661405 | 1,442 | 12,227 | 5.377254 | 0.142164 | 0.070415 | 0.059582 | 0.059582 | 0.765153 | 0.72324 | 0.693578 | 0.666366 | 0.650761 | 0.647666 | 0 | 0.002281 | 0.247158 | 12,227 | 289 | 304 | 42.307958 | 0.840087 | 0.406477 | 0 | 0.50365 | 1 | 0 | 0.109998 | 0.012479 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153285 | false | 0.007299 | 0.036496 | 0 | 0.284672 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
a386c941f91d87bac9c51e3649254ca5c89b175d | 284 | py | Python | DataClean/redis_clean.py | DYC2016/zhaopin | eb3920d05160a9e5570c958e08e9b950db660f64 | [
"Apache-2.0"
] | null | null | null | DataClean/redis_clean.py | DYC2016/zhaopin | eb3920d05160a9e5570c958e08e9b950db660f64 | [
"Apache-2.0"
] | null | null | null | DataClean/redis_clean.py | DYC2016/zhaopin | eb3920d05160a9e5570c958e08e9b950db660f64 | [
"Apache-2.0"
] | null | null | null | import redis
conn=redis.Redis(host='127.0.0.1',db=0)
url_list=conn.smembers('zwmc_gsmc_zwlb')
for url in url_list:
print(url.decode('utf-8'))
if 'zhaopin' in url.decode('utf-8'):
print('rm {}'.format(url.decode('utf-8')))
conn.srem('zwmc_gsmc_zwlb',url) | 35.5 | 51 | 0.637324 | 49 | 284 | 3.571429 | 0.510204 | 0.154286 | 0.205714 | 0.222857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.042017 | 0.161972 | 284 | 8 | 52 | 35.5 | 0.693277 | 0 | 0 | 0 | 0 | 0 | 0.230216 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.25 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6e5d008705894d1d8a24ec1c9b0825922110db36 | 230 | py | Python | home/urls.py | joeseggie/demo_django_subdomains | 5bab03d8721b570a15d7a036d2eb4822786cbcc7 | [
"MIT"
] | null | null | null | home/urls.py | joeseggie/demo_django_subdomains | 5bab03d8721b570a15d7a036d2eb4822786cbcc7 | [
"MIT"
] | 7 | 2020-02-12T01:17:54.000Z | 2021-06-10T18:40:15.000Z | home/urls.py | joeseggie/demo_django_subdomains | 5bab03d8721b570a15d7a036d2eb4822786cbcc7 | [
"MIT"
] | null | null | null | from django.urls import path
from rest_framework.urlpatterns import format_suffix_patterns
from home.views import index
urlpatterns = [
path(r'', index, name='apiindex'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| 20.909091 | 61 | 0.791304 | 29 | 230 | 6.103448 | 0.586207 | 0.135593 | 0.225989 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126087 | 230 | 10 | 62 | 23 | 0.880597 | 0 | 0 | 0 | 0 | 0 | 0.034783 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 0.428571 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
6e64b611f8dc2c1ebc692485fea5a3d23119db13 | 1,314 | py | Python | app/route/user/provider.py | LifeLaboratory/backend_template | 4cbfa48efc632d1eaf364458fa116230e2395e81 | [
"MIT"
] | null | null | null | app/route/user/provider.py | LifeLaboratory/backend_template | 4cbfa48efc632d1eaf364458fa116230e2395e81 | [
"MIT"
] | 1 | 2019-12-01T14:43:00.000Z | 2019-12-01T14:43:00.000Z | app/route/user/provider.py | LifeLaboratory/backend_template | 4cbfa48efc632d1eaf364458fa116230e2395e81 | [
"MIT"
] | null | null | null | from app.api.base.base_sql import Sql
class Provider:
@staticmethod
def auth_user(args):
query = """
select "@user"
from "users"
where ("login" = '{login}'
and "password" = '{password}'
)
"""
return Sql.exec(query=query, args=args)
@staticmethod
def check_user(args):
query = """
select 1
from "users"
where ("login" = '{login}'
and "password" = '{password}'
)
"""
return Sql.exec(query=query, args=args)
@staticmethod
def register_user(args):
query = """
insert into "users"("login", "password", "fio", "description", "photo", "email", "number")
VALUES ('{login}',
'{password}',
'{fio}',
'{description}',
'{photo}',
'{email}',
'{number}')
returning "@user"
"""
return Sql.exec(query=query, args=args)
@staticmethod
def get_user_id(args):
query = """
select "@user"
from "users"
where "login" = '{login}'
"""
return Sql.exec(query=query, args=args)
@staticmethod
def get_user_info(args):
query = """
select *
from "users"
where "@user" = '{@login}'
"""
return Sql.exec(query=query, args=args)
| 22.271186 | 95 | 0.503044 | 130 | 1,314 | 5.023077 | 0.284615 | 0.114855 | 0.099541 | 0.137825 | 0.716692 | 0.716692 | 0.716692 | 0.584992 | 0.529862 | 0.434916 | 0 | 0.001142 | 0.333333 | 1,314 | 58 | 96 | 22.655172 | 0.744292 | 0 | 0 | 0.576923 | 0 | 0.019231 | 0.512177 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096154 | false | 0.076923 | 0.019231 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
6e8c2c652733d0db540f058bc16379c232d85074 | 125 | py | Python | projects/MAE/configs/models/mae_vit_huge_patch14.py | Oneflow-Inc/libai | e473bd3962f07b1e37232d2be39c8257df0ec0f3 | [
"Apache-2.0"
] | 55 | 2021-12-10T08:47:06.000Z | 2022-03-28T09:02:15.000Z | projects/MAE/configs/models/mae_vit_huge_patch14.py | Oneflow-Inc/libai | e473bd3962f07b1e37232d2be39c8257df0ec0f3 | [
"Apache-2.0"
] | 106 | 2021-11-03T05:16:45.000Z | 2022-03-31T06:16:23.000Z | projects/MAE/configs/models/mae_vit_huge_patch14.py | Oneflow-Inc/libai | e473bd3962f07b1e37232d2be39c8257df0ec0f3 | [
"Apache-2.0"
] | 13 | 2021-12-29T08:12:08.000Z | 2022-03-28T06:59:45.000Z | from .mae_vit_base_patch16 import model
model.patch_size = 14
model.embed_dim = 1280
model.depth = 32
model.num_heads = 16
| 15.625 | 39 | 0.784 | 22 | 125 | 4.181818 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11215 | 0.144 | 125 | 7 | 40 | 17.857143 | 0.747664 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6e95d85c39cddc71cb5958389b70992e6c622964 | 9,953 | py | Python | exabel_data_sdk/stubs/exabel/api/analytics/v1/derived_signal_messages_pb2.py | burk/python-sdk | 83fb81d09e0d6a407c8907a75bebb895decc7edc | [
"MIT"
] | null | null | null | exabel_data_sdk/stubs/exabel/api/analytics/v1/derived_signal_messages_pb2.py | burk/python-sdk | 83fb81d09e0d6a407c8907a75bebb895decc7edc | [
"MIT"
] | null | null | null | exabel_data_sdk/stubs/exabel/api/analytics/v1/derived_signal_messages_pb2.py | burk/python-sdk | 83fb81d09e0d6a407c8907a75bebb895decc7edc | [
"MIT"
] | null | null | null | """Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(name='exabel/api/analytics/v1/derived_signal_messages.proto', package='exabel.api.analytics.v1', syntax='proto3', serialized_options=b'\n\x1bcom.exabel.api.analytics.v1B\x1aDerivedSignalMessagesProtoP\x01Z\x1bexabel.com/api/analytics/v1', create_key=_descriptor._internal_create_key, serialized_pb=b'\n5exabel/api/analytics/v1/derived_signal_messages.proto\x12\x17exabel.api.analytics.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x97\x01\n\rDerivedSignal\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\t\x12\x12\n\nexpression\x18\x03 \x01(\t\x12\x13\n\x0bdescription\x18\x04 \x01(\t\x12@\n\x08metadata\x18\x05 \x01(\x0b2..exabel.api.analytics.v1.DerivedSignalMetadata"\xbf\x01\n\x15DerivedSignalMetadata\x12-\n\x08decimals\x18\x01 \x01(\x0b2\x1b.google.protobuf.Int32Value\x128\n\x04unit\x18\x02 \x01(\x0e2*.exabel.api.analytics.v1.DerivedSignalUnit\x12=\n\x04type\x18\x03 \x01(\x0e2*.exabel.api.analytics.v1.DerivedSignalTypeB\x03\xe0A\x03*a\n\x11DerivedSignalUnit\x12\x1f\n\x1bDERIVED_SIGNAL_UNIT_INVALID\x10\x00\x12\n\n\x06NUMBER\x10\x01\x12\t\n\x05RATIO\x10\x02\x12\x14\n\x10RATIO_DIFFERENCE\x10\x03*\x9a\x01\n\x11DerivedSignalType\x12\x1f\n\x1bDERIVED_SIGNAL_TYPE_INVALID\x10\x00\x12\x12\n\x0eDERIVED_SIGNAL\x10\x01\x12\x18\n\x14FILE_UPLOADED_SIGNAL\x10\x02\x12 \n\x1cFILE_UPLOADED_COMPANY_SIGNAL\x10\x03\x12\x14\n\x10PERSISTED_SIGNAL\x10\x04BX\n\x1bcom.exabel.api.analytics.v1B\x1aDerivedSignalMessagesProtoP\x01Z\x1bexabel.com/api/analytics/v1b\x06proto3', dependencies=[google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR])
_DERIVEDSIGNALUNIT = _descriptor.EnumDescriptor(name='DerivedSignalUnit', full_name='exabel.api.analytics.v1.DerivedSignalUnit', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='DERIVED_SIGNAL_UNIT_INVALID', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='NUMBER', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='RATIO', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='RATIO_DIFFERENCE', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=495, serialized_end=592)
_sym_db.RegisterEnumDescriptor(_DERIVEDSIGNALUNIT)
DerivedSignalUnit = enum_type_wrapper.EnumTypeWrapper(_DERIVEDSIGNALUNIT)
_DERIVEDSIGNALTYPE = _descriptor.EnumDescriptor(name='DerivedSignalType', full_name='exabel.api.analytics.v1.DerivedSignalType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='DERIVED_SIGNAL_TYPE_INVALID', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='DERIVED_SIGNAL', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='FILE_UPLOADED_SIGNAL', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='FILE_UPLOADED_COMPANY_SIGNAL', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='PERSISTED_SIGNAL', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=595, serialized_end=749)
_sym_db.RegisterEnumDescriptor(_DERIVEDSIGNALTYPE)
DerivedSignalType = enum_type_wrapper.EnumTypeWrapper(_DERIVEDSIGNALTYPE)
DERIVED_SIGNAL_UNIT_INVALID = 0
NUMBER = 1
RATIO = 2
RATIO_DIFFERENCE = 3
DERIVED_SIGNAL_TYPE_INVALID = 0
DERIVED_SIGNAL = 1
FILE_UPLOADED_SIGNAL = 2
FILE_UPLOADED_COMPANY_SIGNAL = 3
PERSISTED_SIGNAL = 4
_DERIVEDSIGNAL = _descriptor.Descriptor(name='DerivedSignal', full_name='exabel.api.analytics.v1.DerivedSignal', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='exabel.api.analytics.v1.DerivedSignal.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='label', full_name='exabel.api.analytics.v1.DerivedSignal.label', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='expression', full_name='exabel.api.analytics.v1.DerivedSignal.expression', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='description', full_name='exabel.api.analytics.v1.DerivedSignal.description', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='metadata', full_name='exabel.api.analytics.v1.DerivedSignal.metadata', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=148, serialized_end=299)
_DERIVEDSIGNALMETADATA = _descriptor.Descriptor(name='DerivedSignalMetadata', full_name='exabel.api.analytics.v1.DerivedSignalMetadata', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='decimals', full_name='exabel.api.analytics.v1.DerivedSignalMetadata.decimals', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='unit', full_name='exabel.api.analytics.v1.DerivedSignalMetadata.unit', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='type', full_name='exabel.api.analytics.v1.DerivedSignalMetadata.type', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\xe0A\x03', file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=302, serialized_end=493)
_DERIVEDSIGNAL.fields_by_name['metadata'].message_type = _DERIVEDSIGNALMETADATA
_DERIVEDSIGNALMETADATA.fields_by_name['decimals'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_DERIVEDSIGNALMETADATA.fields_by_name['unit'].enum_type = _DERIVEDSIGNALUNIT
_DERIVEDSIGNALMETADATA.fields_by_name['type'].enum_type = _DERIVEDSIGNALTYPE
DESCRIPTOR.message_types_by_name['DerivedSignal'] = _DERIVEDSIGNAL
DESCRIPTOR.message_types_by_name['DerivedSignalMetadata'] = _DERIVEDSIGNALMETADATA
DESCRIPTOR.enum_types_by_name['DerivedSignalUnit'] = _DERIVEDSIGNALUNIT
DESCRIPTOR.enum_types_by_name['DerivedSignalType'] = _DERIVEDSIGNALTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DerivedSignal = _reflection.GeneratedProtocolMessageType('DerivedSignal', (_message.Message,), {'DESCRIPTOR': _DERIVEDSIGNAL, '__module__': 'exabel.api.analytics.v1.derived_signal_messages_pb2'})
_sym_db.RegisterMessage(DerivedSignal)
DerivedSignalMetadata = _reflection.GeneratedProtocolMessageType('DerivedSignalMetadata', (_message.Message,), {'DESCRIPTOR': _DERIVEDSIGNALMETADATA, '__module__': 'exabel.api.analytics.v1.derived_signal_messages_pb2'})
_sym_db.RegisterMessage(DerivedSignalMetadata)
DESCRIPTOR._options = None
_DERIVEDSIGNALMETADATA.fields_by_name['type']._options = None | 236.97619 | 2,387 | 0.841455 | 1,324 | 9,953 | 5.994713 | 0.133686 | 0.049893 | 0.083785 | 0.074839 | 0.650246 | 0.600353 | 0.578052 | 0.493385 | 0.493385 | 0.493385 | 0 | 0.037568 | 0.039887 | 9,953 | 42 | 2,388 | 236.97619 | 0.79301 | 0.003115 | 0 | 0 | 1 | 0.04878 | 0.255924 | 0.219421 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.170732 | 0 | 0.170732 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6e9a6f61590eb90adefe204d35c91746d95f44eb | 1,217 | py | Python | textpy/__init__.py | shivapbhusal/textpy | 4386798abc5dda9f679708605b78e8d58188cb00 | [
"MIT"
] | null | null | null | textpy/__init__.py | shivapbhusal/textpy | 4386798abc5dda9f679708605b78e8d58188cb00 | [
"MIT"
] | null | null | null | textpy/__init__.py | shivapbhusal/textpy | 4386798abc5dda9f679708605b78e8d58188cb00 | [
"MIT"
] | null | null | null | """
An interface to call the public methods of the TextPy Class.
"""
__author__ = 'Shiva Bhusal'
import textpy
def words(text):
'''
It is an interface to call the public method of the TextPy Class.
It returns list of all the words in a text.
'''
T = textpy.TextPy()
return T.words(text)
def sentences(text):
'''
Returns all the sentences from the text in the form of list.
'''
T = textpy.TextPy()
return T.sentences(text)
def dates(text):
'''
Retuns all the dates from the text in the form of list.
'''
T = textpy.TextPy()
return T.dates(text)
def numbers(text):
'''
Retuns all the dates from the text in the form of list.
'''
T = textpy.TextPy()
return T.numbers(text)
def telephone(text):
'''
Retuns all the dates from the text in the form of list.
'''
T = textpy.TextPy()
return T.telephone(text)
def urls(text):
'''
Returns all the urls from the text in the form of list.
'''
T = textpy.TextPy()
return T.urls(text)
def misspelled_words(text):
'''
Retuns all the dates from the text in the form of list.
'''
T = textpy.TextPy()
return T.misspelled_words(text)
| 20.982759 | 69 | 0.622021 | 183 | 1,217 | 4.103825 | 0.196721 | 0.055925 | 0.121172 | 0.177097 | 0.575233 | 0.548602 | 0.479361 | 0.479361 | 0.479361 | 0.479361 | 0 | 0 | 0.271159 | 1,217 | 57 | 70 | 21.350877 | 0.846674 | 0.419885 | 0 | 0.304348 | 0 | 0 | 0.020374 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.304348 | false | 0 | 0.043478 | 0 | 0.652174 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
6ec3329c2ba37dbef2b8abce73036690ea23c265 | 22 | py | Python | tests/unittests/__init__.py | mpetyx/pyrif | 2f7ba863030d7337bb39ad502d1e09e26ac950d2 | [
"MIT"
] | 18 | 2015-01-28T14:18:07.000Z | 2019-01-24T15:21:51.000Z | tests/unittests/__init__.py | mpetyx/pyrif | 2f7ba863030d7337bb39ad502d1e09e26ac950d2 | [
"MIT"
] | 194 | 2015-01-04T15:09:40.000Z | 2016-10-04T09:31:59.000Z | tests/unittests/__init__.py | mpetyx/pyrif | 2f7ba863030d7337bb39ad502d1e09e26ac950d2 | [
"MIT"
] | 4 | 2015-11-27T10:43:52.000Z | 2021-01-28T12:07:54.000Z | __author__ = 'mpetyx'
| 11 | 21 | 0.727273 | 2 | 22 | 6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136364 | 22 | 1 | 22 | 22 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0.272727 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6edcc73fc6177ac6c4a2b801e6ffefeb36b88fbe | 1,490 | py | Python | scirate/paper.py | vprusso/scirate | 4dae1947aaa706c5aa8081e015ffac77dd238eee | [
"MIT"
] | 8 | 2019-02-04T22:01:48.000Z | 2021-04-03T17:33:46.000Z | scirate/paper.py | vprusso/scirate_extractor | 4dae1947aaa706c5aa8081e015ffac77dd238eee | [
"MIT"
] | null | null | null | scirate/paper.py | vprusso/scirate_extractor | 4dae1947aaa706c5aa8081e015ffac77dd238eee | [
"MIT"
] | 3 | 2019-10-13T11:39:59.000Z | 2021-04-03T17:33:47.000Z | """ Scirate paper class."""
from . import author
class SciratePaper:
"""Scirate paper information."""
def __init__(self, paper_dict, client):
self._paper_dict = paper_dict
self._client = client
def __repr__(self):
return self.title
@property
def arxiv_id(self):
"""Scirate id of paper."""
return self._paper_dict["id"]
@property
def submitted_date(self):
"""Date of paper submission."""
return self._paper_dict["submitted_date"]
@property
def published_date(self):
"""Date of paper publication."""
return self._paper_dict["published_date"]
@property
def author_comments(self):
"""Author comments on paper."""
return self._paper_dict["author_comments"]
@property
def title(self):
"""Title of paper."""
return self._paper_dict["title"]
@property
def abstract(self):
"""Abstract of paper."""
return self._paper_dict["abstract"]
@property
def scites(self):
"""Number of scites on paper."""
return self._paper_dict["scites"]
@property
def scitors(self):
"""Users who scited paper."""
return self._paper_dict["scitors"]
@property
def category(self):
"""Paper publishing category."""
return self._paper_dict["category"]
@property
def authors(self):
"""Authors of paper."""
return self._paper_dict["authors"]
| 23.650794 | 50 | 0.602685 | 165 | 1,490 | 5.2 | 0.224242 | 0.136364 | 0.181818 | 0.221445 | 0.254079 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0.27047 | 1,490 | 62 | 51 | 24.032258 | 0.789328 | 0.186577 | 0 | 0.27027 | 0 | 0 | 0.074783 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.324324 | false | 0 | 0.027027 | 0.027027 | 0.675676 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
42caf6e1786018f9a3c13c4cc11dc5cdfe0f3d11 | 1,951 | py | Python | __init__.py | wjhgg/HtmlTestRunner | 31ff1806f7480f03fecba444e014af6f409fdaa9 | [
"MIT"
] | null | null | null | __init__.py | wjhgg/HtmlTestRunner | 31ff1806f7480f03fecba444e014af6f409fdaa9 | [
"MIT"
] | null | null | null | __init__.py | wjhgg/HtmlTestRunner | 31ff1806f7480f03fecba444e014af6f409fdaa9 | [
"MIT"
] | null | null | null | """
A TestRunner for use with the Python unit testing framework. It
generates a HTML report to show the result at a glance.
------------------------------------------------------------------------
Copyright (c) 2004-2021, bugmaster
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name Wai Yip Tung nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# URL: https://github.com/SeldomQA/HTMLTestRunner
from .HTMLTestRunner import HTMLTestRunner
from .HTMLTestRunner import SMTP
__author__ = "bugmaster"
__version__ = "1.0.3"
__description__ = "Unittest-based HTML test report."
| 45.372093 | 73 | 0.756535 | 269 | 1,951 | 5.442379 | 0.579926 | 0.02459 | 0.023224 | 0.031421 | 0.125683 | 0.092896 | 0.092896 | 0.092896 | 0.092896 | 0.092896 | 0 | 0.006782 | 0.168631 | 1,951 | 42 | 74 | 46.452381 | 0.895808 | 0.880062 | 0 | 0 | 1 | 0 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
42e887e74285075919cfd26cf59b18f8579f6740 | 793 | py | Python | example/models.py | code-kitchen/django-utensils | 635e3e33291fa19020211913f89021fd0b402f29 | [
"MIT"
] | 5 | 2015-05-18T19:25:53.000Z | 2016-08-17T03:48:47.000Z | example/models.py | code-kitchen/django-utensils | 635e3e33291fa19020211913f89021fd0b402f29 | [
"MIT"
] | 3 | 2020-02-11T23:31:07.000Z | 2020-10-01T20:24:23.000Z | example/models.py | code-kitchen/django-utensils | 635e3e33291fa19020211913f89021fd0b402f29 | [
"MIT"
] | null | null | null | from django.db import models
from utensils.models import AddressedModel, TimeStampedModel
class Author(AddressedModel):
first_name = models.CharField(max_length=255, null=False, blank=False)
middle_names = models.CharField(max_length=255, null=False, blank=True)
last_name = models.CharField(max_length=255, null=False, blank=False)
class Meta:
ordering = ("last_name",)
def __unicode__(self):
return "{}, {}".format(self.last_name, self.first_name)
class Book(TimeStampedModel):
author = models.ForeignKey(Author)
title = models.CharField(max_length=255, null=False, blank=False)
publication_date = models.PositiveSmallIntegerField()
in_stock = models.BooleanField(default=True)
def __unicode__(self):
return self.title
| 30.5 | 75 | 0.730139 | 96 | 793 | 5.822917 | 0.416667 | 0.107335 | 0.128801 | 0.171735 | 0.334526 | 0.334526 | 0.334526 | 0.334526 | 0.261181 | 0.178891 | 0 | 0.018072 | 0.162673 | 793 | 25 | 76 | 31.72 | 0.823795 | 0 | 0 | 0.117647 | 0 | 0 | 0.018916 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0 | 0.117647 | 0.117647 | 0.941176 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
6e22b40d999d9e834d52ec57deec9e882fdd2bfb | 86 | py | Python | hub/herald/__init__.py | lbryio/scribe | 709ea1ebcb37de13e531182d0d5ea7b5ac3ebd47 | [
"MIT"
] | 2 | 2022-03-08T18:38:55.000Z | 2022-03-18T15:30:24.000Z | hub/herald/__init__.py | lbryio/scribe | 709ea1ebcb37de13e531182d0d5ea7b5ac3ebd47 | [
"MIT"
] | 10 | 2022-03-21T14:42:35.000Z | 2022-03-31T20:31:23.000Z | hub/herald/__init__.py | lbryio/scribe | 709ea1ebcb37de13e531182d0d5ea7b5ac3ebd47 | [
"MIT"
] | null | null | null | HUB_PROTOCOL_VERSION = "0.107.0"
PROTOCOL_MIN = (0, 54, 0)
PROTOCOL_MAX = (0, 199, 0)
| 21.5 | 32 | 0.674419 | 16 | 86 | 3.375 | 0.5625 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.191781 | 0.151163 | 86 | 3 | 33 | 28.666667 | 0.547945 | 0 | 0 | 0 | 0 | 0 | 0.081395 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6e254e151045ab9e89e14402c070c619e943f730 | 215 | py | Python | string_parser.py | n00b-asaurus/string-parser | fc37f00b0908d3d6db4c72bcba7559f0892c25b8 | [
"MIT"
] | null | null | null | string_parser.py | n00b-asaurus/string-parser | fc37f00b0908d3d6db4c72bcba7559f0892c25b8 | [
"MIT"
] | null | null | null | string_parser.py | n00b-asaurus/string-parser | fc37f00b0908d3d6db4c72bcba7559f0892c25b8 | [
"MIT"
] | null | null | null | class StringParser:
def __init__(self):
self.parse_trees = []
def register(self,tree):
self.parse_trees.append(tree)
def parse(self, string):
for tree in self.parse_trees:
return tree.parse(string) | 21.5 | 31 | 0.716279 | 31 | 215 | 4.741935 | 0.451613 | 0.183673 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.167442 | 215 | 10 | 32 | 21.5 | 0.821229 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.375 | false | 0 | 0 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
6e27508d5b87d55d094e9bab208f7d647026b040 | 346 | py | Python | tests/kyu_8_tests/test_even_or_odd.py | simonporter007/codewars-python-solutions | f05799f902b0fa10c5e3a28055bf07758c289087 | [
"MIT"
] | null | null | null | tests/kyu_8_tests/test_even_or_odd.py | simonporter007/codewars-python-solutions | f05799f902b0fa10c5e3a28055bf07758c289087 | [
"MIT"
] | null | null | null | tests/kyu_8_tests/test_even_or_odd.py | simonporter007/codewars-python-solutions | f05799f902b0fa10c5e3a28055bf07758c289087 | [
"MIT"
] | null | null | null |
import unittest
from katas.kyu_8.even_or_odd import even_or_odd
class EvenOrOddTestCases(unittest.TestCase):
def test_equals(self):
self.assertEqual(even_or_odd(0), "Even")
def test_equals_2(self):
self.assertEqual(even_or_odd(1), "Odd")
def test_equals_3(self):
self.assertEqual(even_or_odd(2), "Even")
| 21.625 | 48 | 0.708092 | 52 | 346 | 4.403846 | 0.403846 | 0.131004 | 0.196507 | 0.30131 | 0.366812 | 0.366812 | 0 | 0 | 0 | 0 | 0 | 0.021127 | 0.179191 | 346 | 15 | 49 | 23.066667 | 0.785211 | 0 | 0 | 0 | 0 | 0 | 0.031884 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | false | 0 | 0.222222 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6e30492d6ef42e136619f134aedf5a35f26b7be3 | 528 | py | Python | python/stack/leetcode/min_stack.py | googege/algo-learn | 054d05e8037005c5810906d837de889108dad107 | [
"MIT"
] | 153 | 2020-09-24T12:46:51.000Z | 2022-03-31T21:30:44.000Z | python/stack/leetcode/min_stack.py | googege/algo-learn | 054d05e8037005c5810906d837de889108dad107 | [
"MIT"
] | null | null | null | python/stack/leetcode/min_stack.py | googege/algo-learn | 054d05e8037005c5810906d837de889108dad107 | [
"MIT"
] | 35 | 2020-12-22T11:07:06.000Z | 2022-03-09T03:25:08.000Z | # 最小栈
class MinStack:
def __init__(self):
"""
initialize your data structure here.
"""
self.data = []
self.helper = []
def push(self, x: int) -> None:
self.data.append(x)
if len(self.helper) > 0:
x = min(x, self.helper[-1])
self.helper.append(x)
def pop(self) -> None:
self.data.pop()
self.helper.pop()
def top(self) -> int:
return self.data[-1]
def getMin(self) -> int:
return self.helper[-1]
| 19.555556 | 44 | 0.496212 | 66 | 528 | 3.909091 | 0.393939 | 0.232558 | 0.093023 | 0.131783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01173 | 0.354167 | 528 | 26 | 45 | 20.307692 | 0.744868 | 0.077652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3125 | false | 0 | 0 | 0.125 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 3 |
2817840bfa38cb6f7c0f55e462d241cb01f47b2d | 349 | py | Python | mybitbank/apps/addressbook/models.py | zonedoutspace/mybitbank | 85d28726117a3c1ca76be5772d30c9edae1df7f4 | [
"MIT"
] | null | null | null | mybitbank/apps/addressbook/models.py | zonedoutspace/mybitbank | 85d28726117a3c1ca76be5772d30c9edae1df7f4 | [
"MIT"
] | null | null | null | mybitbank/apps/addressbook/models.py | zonedoutspace/mybitbank | 85d28726117a3c1ca76be5772d30c9edae1df7f4 | [
"MIT"
] | 2 | 2016-06-13T19:57:30.000Z | 2018-09-15T21:03:45.000Z | from django.db import models
class savedAddress(models.Model):
name = models.CharField(max_length=200)
address = models.CharField(max_length=200)
currency = models.CharField(max_length=200)
comment = models.CharField(max_length=500)
status = models.IntegerField()
entered = models.DateTimeField('date published')
| 26.846154 | 52 | 0.724928 | 41 | 349 | 6.073171 | 0.560976 | 0.240964 | 0.289157 | 0.385542 | 0.325301 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041667 | 0.174785 | 349 | 12 | 53 | 29.083333 | 0.822917 | 0 | 0 | 0 | 0 | 0 | 0.040115 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
28297a4960c9e242470d7e0ee92ab4cf65ba03f1 | 295 | py | Python | MoMMI/Modules/howdoicode.py | T6751/MoMMI | 4b9dd0d49c6e2bd82b82a4893fc35475d4e39e9a | [
"MIT"
] | null | null | null | MoMMI/Modules/howdoicode.py | T6751/MoMMI | 4b9dd0d49c6e2bd82b82a4893fc35475d4e39e9a | [
"MIT"
] | null | null | null | MoMMI/Modules/howdoicode.py | T6751/MoMMI | 4b9dd0d49c6e2bd82b82a4893fc35475d4e39e9a | [
"MIT"
] | null | null | null | from typing import Match
from discord import Message
from MoMMI import MChannel, master, command
@command("howdoicode", "howdoicode")
async def dance(channel: MChannel, match: Match, message: Message) -> None:
await master.client.send(message.channel, "https://hackmd.io/@ss14/howdoicode")
| 36.875 | 83 | 0.766102 | 38 | 295 | 5.947368 | 0.605263 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007663 | 0.115254 | 295 | 7 | 84 | 42.142857 | 0.858238 | 0 | 0 | 0 | 0 | 0 | 0.183051 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
286c37a6288a4a80f72019b0617525667c029a24 | 728 | py | Python | flagsense/flagsense.py | flagsense/flagsense-python-sdk | 24f5ccd0efab2bb0f13746c0ec34e2965bcc60af | [
"Apache-2.0"
] | null | null | null | flagsense/flagsense.py | flagsense/flagsense-python-sdk | 24f5ccd0efab2bb0f13746c0ec34e2965bcc60af | [
"Apache-2.0"
] | null | null | null | flagsense/flagsense.py | flagsense/flagsense-python-sdk | 24f5ccd0efab2bb0f13746c0ec34e2965bcc60af | [
"Apache-2.0"
] | null | null | null | from .model.fs_user import FSUser
from .model.fs_flag import FSFlag
from .services.flagsense_service import FlagsenseService
_flagsense_service_map = {}
def create_service(sdk_id, sdk_secret, environment):
if sdk_id not in _flagsense_service_map:
_flagsense_service_map[sdk_id] = FlagsenseService(sdk_id, sdk_secret, environment)
return _flagsense_service_map[sdk_id]
def user(userId, attributes=None):
return FSUser(userId, attributes)
def flag(flagId, defaultKey=None, defaultValue=None):
return FSFlag(flagId, defaultKey, defaultValue)
# Below methods can be used on instance returned from createService method
# initialization_complete()
# wait_for_initialization_complete()
# get_variation(fs_flag, fs_user)
| 28 | 84 | 0.817308 | 98 | 728 | 5.765306 | 0.459184 | 0.141593 | 0.134513 | 0.049558 | 0.173451 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108516 | 728 | 25 | 85 | 29.12 | 0.87057 | 0.226648 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0.166667 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
286fefe7cad4729ef04e66a78dc998f7171051b9 | 172 | py | Python | pages/context_processors.py | dwolfhub/django-blog | 27f8c48bb6e268d2b4d858013c562ae193d19953 | [
"MIT"
] | null | null | null | pages/context_processors.py | dwolfhub/django-blog | 27f8c48bb6e268d2b4d858013c562ae193d19953 | [
"MIT"
] | null | null | null | pages/context_processors.py | dwolfhub/django-blog | 27f8c48bb6e268d2b4d858013c562ae193d19953 | [
"MIT"
] | null | null | null | from .models import Page
def pages_processor(request):
pages = Page.objects.all().exclude(slug='home').order_by('title')
print pages
return {'pages': pages}
| 19.111111 | 69 | 0.686047 | 23 | 172 | 5.043478 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168605 | 172 | 8 | 70 | 21.5 | 0.811189 | 0 | 0 | 0 | 0 | 0 | 0.081395 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.2 | null | null | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
2881ecc73ea2dac62f739547263d8f4f2314da9b | 158 | py | Python | bin/ticket_system_data.py | cmheisel/kardboard | 345b4f4fe01eed5148da5bcf0e30edc9d37b9cc6 | [
"MIT"
] | 7 | 2015-10-21T20:23:36.000Z | 2020-01-21T05:40:42.000Z | bin/ticket_system_data.py | cmheisel/kardboard | 345b4f4fe01eed5148da5bcf0e30edc9d37b9cc6 | [
"MIT"
] | 1 | 2021-12-02T17:56:31.000Z | 2021-12-02T17:56:31.000Z | bin/ticket_system_data.py | cmheisel/kardboard | 345b4f4fe01eed5148da5bcf0e30edc9d37b9cc6 | [
"MIT"
] | 6 | 2015-04-05T06:28:53.000Z | 2020-01-21T05:40:44.000Z | import pprint
import sys
from kardboard.models import Kard
k = Kard.objects.get(key=sys.argv[1])
i = k.ticket_system.get_issue(key=k.key)
pprint.pprint(i)
| 15.8 | 40 | 0.759494 | 29 | 158 | 4.068966 | 0.586207 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007143 | 0.113924 | 158 | 9 | 41 | 17.555556 | 0.835714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
288dd900c6a19fa901d54f044b4b45746751dde3 | 2,029 | py | Python | domain/filters.py | carlosvin/pricecalculator | 2c2c409e4a7f3e7d52001b19630a37a4e1a827ae | [
"Apache-2.0"
] | null | null | null | domain/filters.py | carlosvin/pricecalculator | 2c2c409e4a7f3e7d52001b19630a37a4e1a827ae | [
"Apache-2.0"
] | null | null | null | domain/filters.py | carlosvin/pricecalculator | 2c2c409e4a7f3e7d52001b19630a37a4e1a827ae | [
"Apache-2.0"
] | null | null | null | '''
Created on 27/10/2013
@author: carlos
'''
from domain.validator import FloatPossitiveValidator
class Field(object):
""" Help us to represent relevant fields of filter"""
def __init__(self, name, t, description="", extra_params='', validator=None):
self.name = name
self.type = t
self.description = description
self._value = None
self.extra_params = 'title=' + self.name + ' ' + extra_params
self._validator = validator
def __repr__(self):
if self.value:
return u"%s:%s" % (self.name, self.value)
else:
return u"%s:%s" % (self.name, "None")
@property
def value(self):
return self._value
@value.setter
def value(self, v):
if self._validator:
self._validator.validate(v)
self._value = v
class Filter(object):
def __init__(self):
self.fields = {}
def add_field(self, f):
self.fields[f.name] = f
def del_field(self, field_name):
del self.fields[field_name]
def filter(self, v):
raise "Not implemented, you must override this method"
@classmethod
def get_name(self):
raise "Not implemented, you must override this method"
@property
def id(self):
return self.get_name() + ' ' + str(self.fields.values())
@staticmethod
def factory(filter_type):
if filter_type == FilterLessThan.NAME:
return FilterLessThan()
elif filter_type == FilterMoreThan.NAME:
return FilterMoreThan()
else:
return None
class FilterXThan(Filter):
def __init__(self):
super(FilterXThan, self).__init__()
self._field = Field("price", "number", description="i.e: 0.3", extra_params='step=any', validator=FloatPossitiveValidator())
self.add_field(self._field)
@property
def value(self):
return float(self._field.value)
class FilterLessThan(FilterXThan):
NAME = 'Menor que'
def filter(self, v):
return self.value > float(v)
@classmethod
def get_name(self):
return self.NAME
class FilterMoreThan(FilterXThan):
NAME = 'Mayor que'
def filter(self, v):
return self.value < float(v)
@classmethod
def get_name(self):
return self.NAME
| 21.135417 | 127 | 0.698374 | 275 | 2,029 | 4.989091 | 0.290909 | 0.040816 | 0.040816 | 0.030612 | 0.257289 | 0.201166 | 0.176385 | 0.176385 | 0.112245 | 0.112245 | 0 | 0.005945 | 0.17102 | 2,029 | 95 | 128 | 21.357895 | 0.80975 | 0.041893 | 0 | 0.328358 | 0 | 0 | 0.082383 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.253731 | false | 0 | 0.014925 | 0.104478 | 0.552239 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
2899006fed0989c1f90904d9d6fc74f985004756 | 493 | py | Python | ascii_decode.py | dm-logv/pyxperiments | f41cf95aa113cb7a8d1f37e200ca33e2aefbc60e | [
"MIT"
] | 2 | 2019-06-27T14:55:07.000Z | 2019-06-27T18:24:29.000Z | ascii_decode.py | dm-logv/pyxperiments | f41cf95aa113cb7a8d1f37e200ca33e2aefbc60e | [
"MIT"
] | null | null | null | ascii_decode.py | dm-logv/pyxperiments | f41cf95aa113cb7a8d1f37e200ca33e2aefbc60e | [
"MIT"
] | null | null | null | """Decode ASCII-encoded messages"""
def decode(coded):
"""Decode a string of ASCII codes"""
return ''.join(map(chr, map(int, coded.split())))
if __name__ == '__main__':
print(decode('''
67 111 109 101 32 105
110 32 97 32 78 101
119 32 89 101 97 114
39 115 32 99 97 112
32 116 111 32 83 97
118 101 110 107 111 118
033
'''))
print(decode('''
77 115 102 112
117 110 105 108
104 121
'''))
| 20.541667 | 53 | 0.527383 | 75 | 493 | 3.36 | 0.626667 | 0.087302 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.390476 | 0.361055 | 493 | 23 | 54 | 21.434783 | 0.409524 | 0.121704 | 0 | 0.235294 | 0 | 0 | 0.635071 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0 | 0 | 0.117647 | 0.117647 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
289bd6c70ba290ceab97e49a649f25ac37a5b6e1 | 322 | py | Python | Code Accdemy/Boolean_If_review.py | JackVoice/Testing | 11e1b83c7b2e51fa0a8cdde67c5c1eab650e018a | [
"Unlicense"
] | null | null | null | Code Accdemy/Boolean_If_review.py | JackVoice/Testing | 11e1b83c7b2e51fa0a8cdde67c5c1eab650e018a | [
"Unlicense"
] | null | null | null | Code Accdemy/Boolean_If_review.py | JackVoice/Testing | 11e1b83c7b2e51fa0a8cdde67c5c1eab650e018a | [
"Unlicense"
] | null | null | null | def applicant_selector(gpa,ps_score,ec_count):
message = "This applicant should be rejected."
if (gpa >= 3):
if (ps_score >= 90):
if (ec_count >= 3):
message = "This applicant should be accepted."
else:
message = "This applicant should be given an in-person interview."
return message
| 32.2 | 73 | 0.652174 | 44 | 322 | 4.659091 | 0.545455 | 0.160976 | 0.292683 | 0.380488 | 0.409756 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016393 | 0.242236 | 322 | 9 | 74 | 35.777778 | 0.82377 | 0 | 0 | 0 | 0 | 0 | 0.378882 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
958e63147038d9d0503debc2e1763692e777c119 | 386 | py | Python | sols/1356.py | Paul11100/LeetCode | 9896c579dff1812c0c76964db8d60603ee715e35 | [
"MIT"
] | null | null | null | sols/1356.py | Paul11100/LeetCode | 9896c579dff1812c0c76964db8d60603ee715e35 | [
"MIT"
] | null | null | null | sols/1356.py | Paul11100/LeetCode | 9896c579dff1812c0c76964db8d60603ee715e35 | [
"MIT"
] | null | null | null | class Solution:
# Sorting key function (Accepted), O(n log n) time, O(n) space
def sortByBits(self, arr: List[int]) -> List[int]:
def tup(n):
return (bin(n).count('1'), n)
return sorted(arr, key=tup)
# One Liner (Top Voted), O(n log n) time, O(n) space
def sortByBits(self, A):
return sorted(A, key=lambda a: (bin(a).count('1'), a))
| 35.090909 | 66 | 0.569948 | 63 | 386 | 3.492063 | 0.460317 | 0.036364 | 0.045455 | 0.054545 | 0.309091 | 0.309091 | 0.309091 | 0.309091 | 0.309091 | 0.309091 | 0 | 0.006993 | 0.259067 | 386 | 10 | 67 | 38.6 | 0.762238 | 0.287565 | 0 | 0 | 0 | 0 | 0.007353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0.285714 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
95a495477a30bd5fe73cf506705a8430700062bb | 597 | py | Python | tests/test_simple_thread.py | MalteIwanicki/simple_thread | 6b4895dcdceeac5c5ac371222b8de98fd41796e2 | [
"MIT"
] | null | null | null | tests/test_simple_thread.py | MalteIwanicki/simple_thread | 6b4895dcdceeac5c5ac371222b8de98fd41796e2 | [
"MIT"
] | null | null | null | tests/test_simple_thread.py | MalteIwanicki/simple_thread | 6b4895dcdceeac5c5ac371222b8de98fd41796e2 | [
"MIT"
] | null | null | null | import pytest
from simple_thread.simple_thread import SimpleThread
def return_test():
return "test"
def test_no_parameter_thread():
thread = SimpleThread(return_test)
assert thread.join() == return_test()
def double(number):
return number * 2
def test_one_parameter_thread():
number = 2
thread = SimpleThread(double, (number))
assert thread.join() == double(number)
def adds_a_and_b(a, b):
return a + b
def test_two_parameters_thread():
a = 5
b = 10
thread = SimpleThread(adds_a_and_b, (a, b))
assert thread.join() == adds_a_and_b(a, b)
| 18.090909 | 52 | 0.685092 | 86 | 597 | 4.488372 | 0.290698 | 0.103627 | 0.124352 | 0.069948 | 0.085492 | 0.085492 | 0 | 0 | 0 | 0 | 0 | 0.010549 | 0.20603 | 597 | 32 | 53 | 18.65625 | 0.803797 | 0 | 0 | 0 | 0 | 0 | 0.0067 | 0 | 0 | 0 | 0 | 0 | 0.15 | 1 | 0.3 | false | 0 | 0.1 | 0.15 | 0.55 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
95c3242b3f3b37c983897c23f9d2e66c26edf4c8 | 1,547 | py | Python | pyDS/queue.py | parthsharma2/pyDS | 541e4ebfb90ae11ad7085eb0b4d4771519992bb2 | [
"MIT"
] | null | null | null | pyDS/queue.py | parthsharma2/pyDS | 541e4ebfb90ae11ad7085eb0b4d4771519992bb2 | [
"MIT"
] | 1 | 2018-10-03T12:50:26.000Z | 2018-10-06T07:37:52.000Z | pyDS/queue.py | parthsharma2/pyDS | 541e4ebfb90ae11ad7085eb0b4d4771519992bb2 | [
"MIT"
] | 1 | 2018-10-03T11:13:38.000Z | 2018-10-03T11:13:38.000Z |
class Queue:
"""An implementation of the Queue data strucutre."""
def __init__(self):
self._items = []
def __repr__(self):
return "pyDS.queue.Queue({})".format(self._items)
def __str__(self):
return str(self._items)
def __len__(self):
return len(self._items)
def is_empty(self):
"""Check queue is empty.
Returns:
True if Queue is empty, False otherwise.
"""
return self._items == []
def enqueue(self, item):
"""Add item to Queue.
Args:
item: The item to be inserted.
"""
self._items.append(item)
def dequeue(self):
"""Remove item from queue.
Returns:
The first item from the Queue. Raises IndexError if Queue empty.
"""
if not self.is_empty():
return self._items.pop(0)
else:
raise IndexError("Queue is empty")
def front(self):
"""Return the first Queue item.
Returns:
The first item from the Queue. Raises IndexError if Queue empty.
"""
if not self.is_empty():
return self._items[0]
else:
return IndexError("Queue is empty")
def rear(self):
"""Return the last Queue item.
Returns:
The last item from the Queue. Raises IndexError if Queue empty.
"""
if not self.is_empty():
return self._items[-1]
else:
return IndexError("Queue is empty")
| 22.1 | 76 | 0.539108 | 181 | 1,547 | 4.447514 | 0.270718 | 0.100621 | 0.074534 | 0.059627 | 0.431056 | 0.396273 | 0.31677 | 0.31677 | 0.31677 | 0.31677 | 0 | 0.003033 | 0.360698 | 1,547 | 69 | 77 | 22.42029 | 0.81092 | 0.323853 | 0 | 0.285714 | 0 | 0 | 0.070136 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.321429 | false | 0 | 0 | 0.107143 | 0.678571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.