hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
038b1cfcc645cf8cb82fb4e333f1b1f1e4df4eb8 | 382 | py | Python | test.py | lesibius/DSGE | 70ba2548cf25fa338c514de33b78c90de8b9e9f6 | [
"MIT"
] | null | null | null | test.py | lesibius/DSGE | 70ba2548cf25fa338c514de33b78c90de8b9e9f6 | [
"MIT"
] | null | null | null | test.py | lesibius/DSGE | 70ba2548cf25fa338c514de33b78c90de8b9e9f6 | [
"MIT"
] | 1 | 2020-12-29T02:55:10.000Z | 2020-12-29T02:55:10.000Z | from os import getcwd
from os.path import join
from DSGE.Econ_model import Econ_model
from DSGE.Equation_parser import Econ_model_parser, get_dependencies
from DSGE.Computation import make_equations, evaluate_function_tree
model = Econ_model('IMF',join(getcwd(),'models','test','simple_model'),join(getcwd(),'models','test','params'))
model(10,4)
print(model.results['F'][0])
| 25.466667 | 111 | 0.780105 | 58 | 382 | 4.948276 | 0.517241 | 0.125436 | 0.10453 | 0.139373 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011461 | 0.086387 | 382 | 14 | 112 | 27.285714 | 0.810888 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.625 | 0 | 0.625 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
03966e70071c773523978791d9be1ff9c8ee79f9 | 742 | py | Python | vframe_cli/commands/templates/simple.py | ngi-nix/vframe | 60469e25203136f9d6a5ecaabe2423695ee9a0f2 | [
"MIT"
] | 1 | 2021-05-15T11:06:39.000Z | 2021-05-15T11:06:39.000Z | vframe_cli/commands/templates/simple.py | julescarbon/vframe | 0798841fa9eb7e1252e4cdf71d68d991c26acab8 | [
"MIT"
] | null | null | null | vframe_cli/commands/templates/simple.py | julescarbon/vframe | 0798841fa9eb7e1252e4cdf71d68d991c26acab8 | [
"MIT"
] | null | null | null | #############################################################################
#
# VFRAME
# MIT License
# Copyright (c) 2020 Adam Harvey and VFRAME
# https://vframe.io
#
#############################################################################
import click
@click.command('')
@click.pass_context
def cli(ctx):
"""Simple template"""
# ------------------------------------------------
# imports
from os.path import join
from vframe.settings import app_cfg
# ------------------------------------------------
# start
log = app_cfg.LOG
log.info('Template info message')
log.debug('Template debug message')
log.warn('Template warn message')
log.error('Template error message')
log.info(f'{app_cfg.UCODE_OK}') | 21.823529 | 78 | 0.447439 | 69 | 742 | 4.73913 | 0.565217 | 0.122324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006182 | 0.128032 | 742 | 34 | 79 | 21.823529 | 0.499227 | 0.281671 | 0 | 0 | 0 | 0 | 0.284932 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0.083333 | 0.25 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
03ace17174a8efcf06638dddfc1637911ad80da2 | 2,324 | py | Python | Control.py | OhadNir/CPU_Pipline_Emulator | 1f824192f19e2a6002b830be73ce27dc5b3c821f | [
"MIT"
] | null | null | null | Control.py | OhadNir/CPU_Pipline_Emulator | 1f824192f19e2a6002b830be73ce27dc5b3c821f | [
"MIT"
] | null | null | null | Control.py | OhadNir/CPU_Pipline_Emulator | 1f824192f19e2a6002b830be73ce27dc5b3c821f | [
"MIT"
] | null | null | null | '''
Create the Control class:
Deal with hazards
Forwarding
Branch handling
'''
class Control(object):
def __init__(self, ForwardStatus):
self.DataHazardFlag=False
self.ControlHazardFlag=False
self.forwardFlag=ForwardStatus
'''
Forward keys:
0: Inactive
1: Execution forward
2: Memory forward
'''
def checkDataHazards(self, pipeline_registers):
self.DataHazardFlag=False
IDEX=1
EXMEM=2
MEMWB=3
EXHazard=False
if pipeline_registers[EXMEM].input is not None and pipeline_registers[EXMEM].input.full_instr != "nop":
if pipeline_registers[EXMEM].input is not None and pipeline_registers[IDEX].input is not None and pipeline_registers[EXMEM].input.RD==pipeline_registers[IDEX].input.RS:
self.DataHazardFlag=True
EXHazard=True
if pipeline_registers[EXMEM].input is not None and pipeline_registers[IDEX].input is not None and pipeline_registers[EXMEM].input.RD==pipeline_registers[IDEX].input.RT:
self.DataHazardFlag=True
EXHazard=True
if pipeline_registers[MEMWB].input is not None and pipeline_registers[MEMWB].input.full_instr != "nop":
if (not EXHazard) and pipeline_registers[MEMWB].input is not None and pipeline_registers[IDEX].input is not None and pipeline_registers[MEMWB].input.RD==pipeline_registers[IDEX].input.RS:
self.DataHazardFlag=True
if (not EXHazard) and pipeline_registers[MEMWB].input is not None and pipeline_registers[IDEX].input is not None and pipeline_registers[MEMWB].input.RD==pipeline_registers[IDEX].input.RT:
self.DataHazardFlag=True
def BranchValue(self, pipeline_registers, branch_labels):
MEMWB=3
instr=pipeline_registers[MEMWB].input
if instr.operation=="beq" and instr.RS==instr.RT:
return branch_labels[instr.RD]
if instr.operation=="bne" and instr.RS!=instr.RT:
return branch_labels[instr.RD]
return -1
def CheckBranch(self, pipeline_registers, branch_labels):
MEMWB=3
instr=pipeline_registers[MEMWB].input
if instr is not None and instr.isBranch:
return self.BranchValue(pipeline_registers, branch_labels)
return -1
| 43.037037 | 199 | 0.685026 | 289 | 2,324 | 5.380623 | 0.193772 | 0.284244 | 0.154341 | 0.084887 | 0.697106 | 0.681672 | 0.681672 | 0.681672 | 0.664952 | 0.65209 | 0 | 0.00558 | 0.228916 | 2,324 | 53 | 200 | 43.849057 | 0.862165 | 0.03012 | 0 | 0.459459 | 0 | 0 | 0.005571 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.108108 | false | 0 | 0 | 0 | 0.27027 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
03b1050e79f49de884e4d4e5a2f405c97c495ff3 | 1,692 | py | Python | 11-funcoes/11-funcoes.py | ROGERIOPDOSSANTOS/AulaPythonBasico | d67f3d66ab3552168326c121755e314e98d8be79 | [
"MIT"
] | null | null | null | 11-funcoes/11-funcoes.py | ROGERIOPDOSSANTOS/AulaPythonBasico | d67f3d66ab3552168326c121755e314e98d8be79 | [
"MIT"
] | null | null | null | 11-funcoes/11-funcoes.py | ROGERIOPDOSSANTOS/AulaPythonBasico | d67f3d66ab3552168326c121755e314e98d8be79 | [
"MIT"
] | null | null | null | # Código utilizado no livro Aprenda Python Básico - Rápido e Fácil de entender, de Felipe Galvão
# Mais informações sobre o livro: http://felipegalvao.com.br/livros
# Capítulo 11: Funções
# Definição de função sem parâmetros e sem valor de retorno
def print_ola_tres_vezes():
print("Ola Python")
print("Ola Python")
print("Ola Python")
print_ola_tres_vezes()
# Definição de uma função com parâmetro e valor de retorno
def numero_ao_cubo(numero):
valor_a_retornar = numero * numero * numero
return(valor_a_retornar)
numero = numero_ao_cubo(4)
print(numero)
# A chamada da função acima sem argumento retorna um erro
# numero = numero_ao_cubo()
# Definição de função com parâmetro com valor padrão
def print_ola(nome="estranho"):
print("Olá, " + nome)
print_ola("Priscilla")
print_ola()
# Chamada de função com parâmetros nomeados
def print_infos(nome, idade):
print("Olá, meu nome é %s e tenho %d anos" % (nome, idade))
print_infos(idade=30, nome="Felipe")
# Definição da função com número variável de argumentos: *args e **kwargs
def print_tudo_2_vezes(*args):
for parametro in args:
print(parametro + "! " + parametro + "!")
print_tudo_2_vezes("Olá", "Python", "Felipe")
def print_info(**kwargs):
for parametro, valor in kwargs.items():
print(parametro + " - " + str(valor))
print_info(nome="Felipe", idade=30, nacionalidade="Brasil")
def print_info_2(nome, idade, **kwargs):
print("Nome: " + nome)
print("Idade: " + str(idade))
print("\nInformações adicionais:")
for parametro, valor in kwargs.items():
print(parametro + " - " + str(valor))
print_info_2(nome="Felipe", idade=30, nacionalidade="Brasil", telefone="999998888") | 28.677966 | 96 | 0.716312 | 243 | 1,692 | 4.851852 | 0.358025 | 0.054283 | 0.035623 | 0.048346 | 0.25106 | 0.206955 | 0.145886 | 0.145886 | 0.103478 | 0.103478 | 0 | 0.015482 | 0.160165 | 1,692 | 59 | 97 | 28.677966 | 0.814215 | 0.320922 | 0 | 0.21875 | 0 | 0 | 0.164179 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.21875 | false | 0 | 0 | 0 | 0.21875 | 0.78125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
03b1c1e07378f0d67dfac6def537adb5de504d87 | 487 | py | Python | test/test_user.py | fixxyzeal/FSP-DataAPI | e92fb0f6b29fbd86e77b220c9b90b4afa2ae3a5d | [
"MIT"
] | null | null | null | test/test_user.py | fixxyzeal/FSP-DataAPI | e92fb0f6b29fbd86e77b220c9b90b4afa2ae3a5d | [
"MIT"
] | null | null | null | test/test_user.py | fixxyzeal/FSP-DataAPI | e92fb0f6b29fbd86e77b220c9b90b4afa2ae3a5d | [
"MIT"
] | null | null | null | from bl.user import Authenticate
from bl.crypto import InsertHistory
import os
from dotenv import load_dotenv
from flask import Flask
from flask_pymongo import PyMongo
load_dotenv()
app = Flask(__name__)
app.config['MONGO_DBNAME'] = os.environ.get('DB')
app.config['MONGO_URI'] = os.environ.get('DBCONNECTION')
mongo = PyMongo(app)
def test_Authenticate():
assert len(Authenticate(mongo, "a", "abb")) == 2
def test_InsertHistory():
assert InsertHistory(mongo, "EVX", 49, 3)
| 23.190476 | 56 | 0.749487 | 69 | 487 | 5.130435 | 0.463768 | 0.033898 | 0.079096 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009434 | 0.129363 | 487 | 20 | 57 | 24.35 | 0.825472 | 0 | 0 | 0 | 0 | 0 | 0.086242 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 1 | 0.133333 | false | 0 | 0.4 | 0 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
03cc5b86b045d275d2e02770dab09f52b2421263 | 888 | py | Python | examples/example_basic/example_basic/translate/views.py | adamziel/django_translate | f93b7bce518a7f3aa0130f854ef36bd4c26ab9ba | [
"MIT"
] | 12 | 2015-04-16T02:58:49.000Z | 2018-03-01T13:03:55.000Z | examples/example_basic/example_basic/translate/views.py | adamziel/django_translate | f93b7bce518a7f3aa0130f854ef36bd4c26ab9ba | [
"MIT"
] | 10 | 2015-04-09T22:25:16.000Z | 2021-06-10T17:39:36.000Z | examples/example_basic/example_basic/translate/views.py | adamziel/django_translate | f93b7bce518a7f3aa0130f854ef36bd4c26ab9ba | [
"MIT"
] | 5 | 2016-10-25T12:23:45.000Z | 2020-07-27T10:59:00.000Z | from django.http import HttpResponse
from django.shortcuts import render, render_to_response
from django.template import RequestContext, loader
from django_translate.services import trans as _, transchoice
def hello(request):
return render_to_response("hello.html", context=RequestContext(request))
def apples(request):
return render_to_response("apples.html", context=RequestContext(request))
def pythonic_apples(request):
return render_to_response("apples_python.html", {"rendered":
u"<h1>{0}</h1>"
"<p>{1}</p>"
"<p>{2}</p>"
"<p>{3}</p>".format(
_("apples.header"),
_("apples.want_some", {"fruits": "apples"}),
transchoice("apples.praise_n", 1),
transchoice("apples.praise_n", 3)
)
})
def po(request):
return render_to_response("po.html", context=RequestContext(request))
| 31.714286 | 77 | 0.664414 | 106 | 888 | 5.386792 | 0.396226 | 0.070053 | 0.140105 | 0.14711 | 0.367776 | 0.143608 | 0.143608 | 0 | 0 | 0 | 0 | 0.011127 | 0.190315 | 888 | 27 | 78 | 32.888889 | 0.783032 | 0 | 0 | 0 | 0 | 0 | 0.188063 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.181818 | 0.181818 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
03db7b6c965b3b805e8c187a38d6bb55e90e4054 | 12,940 | py | Python | pysnmp/RM2-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/RM2-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/RM2-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module RM2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RM2-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:49:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, MibIdentifier, IpAddress, Counter64, snmpModules, Integer32, Bits, iso, ModuleIdentity, Gauge32, Unsigned32, ObjectName, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, Counter32, ObjectIdentity, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "MibIdentifier", "IpAddress", "Counter64", "snmpModules", "Integer32", "Bits", "iso", "ModuleIdentity", "Gauge32", "Unsigned32", "ObjectName", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "Counter32", "ObjectIdentity", "TimeTicks")
TextualConvention, TimeStamp, RowStatus, TestAndIncr, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TimeStamp", "RowStatus", "TestAndIncr", "DisplayString", "TruthValue")
lucent = MibIdentifier((1, 3, 6, 1, 4, 1, 1751))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1))
softSwitch = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198))
resourceMonitor = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4))
rm2 = ModuleIdentity((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2))
if mibBuilder.loadTexts: rm2.setLastUpdated('240701')
if mibBuilder.loadTexts: rm2.setOrganization('Lucent Technologies')
rmSystem = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 1))
rmDiskGrp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2))
rmCpuGrp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3))
rmFileGrp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4))
rmProcessGrp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5))
rmDescr = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmDescr.setStatus('current')
rmObjectID = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 1, 2), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmObjectID.setStatus('current')
rmUpTime = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 1, 3), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmUpTime.setStatus('current')
rmNetAddress = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmNetAddress.setStatus('current')
rmControl = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmControl.setStatus('current')
diskPeriod = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: diskPeriod.setStatus('current')
diskUsageWarningPct = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: diskUsageWarningPct.setStatus('current')
diskUsageAlarmPct = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: diskUsageAlarmPct.setStatus('current')
duNumber = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2048))).setMaxAccess("readonly")
if mibBuilder.loadTexts: duNumber.setStatus('current')
diskUsageTable = MibTable((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 5), )
if mibBuilder.loadTexts: diskUsageTable.setStatus('current')
diskUsageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 5, 1), ).setIndexNames((0, "RM2-MIB", "duIndex"))
if mibBuilder.loadTexts: diskUsageEntry.setStatus('current')
duIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2048))).setMaxAccess("readonly")
if mibBuilder.loadTexts: duIndex.setStatus('current')
duFSName = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 5, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: duFSName.setStatus('current')
duSize = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 5, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: duSize.setStatus('current')
duPctUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 2, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: duPctUsed.setStatus('current')
cpuPeriod = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpuPeriod.setStatus('current')
cpuUtilization = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpuUtilization.setStatus('current')
cpuUtilWarningPct = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpuUtilWarningPct.setStatus('current')
cpuUtilAlarmPct = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpuUtilAlarmPct.setStatus('current')
cpuLoad = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpuLoad.setStatus('current')
cpuLoadWarningThreshold = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpuLoadWarningThreshold.setStatus('current')
cpuLoadAlarmThreshold = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 3, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpuLoadAlarmThreshold.setStatus('current')
filePeriod = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: filePeriod.setStatus('current')
fmNumber = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fmNumber.setStatus('current')
fmTable = MibTable((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 3), )
if mibBuilder.loadTexts: fmTable.setStatus('current')
fmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 3, 1), ).setIndexNames((0, "RM2-MIB", "fmIndex"))
if mibBuilder.loadTexts: fmEntry.setStatus('current')
fmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fmIndex.setStatus('current')
fmName = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 3, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fmName.setStatus('current')
fmCurSize = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fmCurSize.setStatus('current')
fmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fmThreshold.setStatus('current')
archiveDir = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 4, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: archiveDir.setStatus('current')
processPeriod = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: processPeriod.setStatus('current')
processNumber = MibScalar((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processNumber.setStatus('current')
processTable = MibTable((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3), )
if mibBuilder.loadTexts: processTable.setStatus('current')
processEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1), ).setIndexNames((0, "RM2-MIB", "processIndex"))
if mibBuilder.loadTexts: processEntry.setStatus('current')
processIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processIndex.setStatus('current')
processID = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processID.setStatus('current')
processName = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: processName.setStatus('current')
processUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processUpTime.setStatus('current')
processCPUUsageWarnMark = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processCPUUsageWarnMark.setStatus('current')
processCPUUsageAlarmMark = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processCPUUsageAlarmMark.setStatus('current')
processCPUUsageCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processCPUUsageCurrent.setStatus('current')
processMemUsageAlarmMark = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processMemUsageAlarmMark.setStatus('current')
processMemUsageCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 1751, 1, 1198, 4, 2, 5, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8192))).setMaxAccess("readonly")
if mibBuilder.loadTexts: processMemUsageCurrent.setStatus('current')
mibBuilder.exportSymbols("RM2-MIB", fmNumber=fmNumber, rmCpuGrp=rmCpuGrp, fmName=fmName, rmProcessGrp=rmProcessGrp, lucent=lucent, duFSName=duFSName, cpuUtilAlarmPct=cpuUtilAlarmPct, softSwitch=softSwitch, processName=processName, processCPUUsageWarnMark=processCPUUsageWarnMark, cpuLoadWarningThreshold=cpuLoadWarningThreshold, diskUsageAlarmPct=diskUsageAlarmPct, fmIndex=fmIndex, diskUsageTable=diskUsageTable, rmDescr=rmDescr, cpuUtilWarningPct=cpuUtilWarningPct, cpuLoadAlarmThreshold=cpuLoadAlarmThreshold, duNumber=duNumber, cpuUtilization=cpuUtilization, fmThreshold=fmThreshold, rmObjectID=rmObjectID, PYSNMP_MODULE_ID=rm2, rmUpTime=rmUpTime, rmSystem=rmSystem, products=products, fmCurSize=fmCurSize, processMemUsageAlarmMark=processMemUsageAlarmMark, duIndex=duIndex, fmTable=fmTable, diskUsageEntry=diskUsageEntry, processID=processID, processIndex=processIndex, rm2=rm2, rmFileGrp=rmFileGrp, rmControl=rmControl, archiveDir=archiveDir, processCPUUsageCurrent=processCPUUsageCurrent, processPeriod=processPeriod, processNumber=processNumber, fmEntry=fmEntry, rmNetAddress=rmNetAddress, diskUsageWarningPct=diskUsageWarningPct, cpuPeriod=cpuPeriod, duPctUsed=duPctUsed, processTable=processTable, processEntry=processEntry, processMemUsageCurrent=processMemUsageCurrent, duSize=duSize, resourceMonitor=resourceMonitor, processUpTime=processUpTime, cpuLoad=cpuLoad, processCPUUsageAlarmMark=processCPUUsageAlarmMark, rmDiskGrp=rmDiskGrp, diskPeriod=diskPeriod, filePeriod=filePeriod)
| 112.521739 | 1,493 | 0.740649 | 1,639 | 12,940 | 5.846248 | 0.106772 | 0.012106 | 0.016907 | 0.022542 | 0.564496 | 0.520246 | 0.441244 | 0.44114 | 0.438322 | 0.435817 | 0 | 0.109338 | 0.09459 | 12,940 | 114 | 1,494 | 113.508772 | 0.708518 | 0.023802 | 0 | 0 | 0 | 0 | 0.096174 | 0.003486 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.056075 | 0 | 0.056075 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
03dc14d058ea88276ca19c0ae868ae97f0fef295 | 901 | py | Python | ccnlab/utils.py | nikhilxb/ccnlab | 5325e9f4160eb61806b1258b89776461fefeebb5 | [
"MIT"
] | 5 | 2021-06-15T06:34:28.000Z | 2021-07-15T07:30:49.000Z | ccnlab/utils.py | nikhilxb/ccnlab | 5325e9f4160eb61806b1258b89776461fefeebb5 | [
"MIT"
] | null | null | null | ccnlab/utils.py | nikhilxb/ccnlab | 5325e9f4160eb61806b1258b89776461fefeebb5 | [
"MIT"
] | 1 | 2021-08-19T15:29:55.000Z | 2021-08-19T15:29:55.000Z | import fnmatch
from collections import defaultdict
def listdict_to_dictlist(ld):
dl = defaultdict(list)
keys = set(k for d in ld for k in d)
for d in ld:
for k in keys:
dl[k].append(d[k] if k in d else None)
return dl
class SearchableRegistry:
def __init__(self):
self.items = {}
def __call__(self, *args, **kwargs):
return self.get(*args, **kwargs)
def register(self, item):
self.items[item.__name__] = item
def get(self, *globs, call=True):
"""Return items that match any of the specified globs. By default, assumes that items are
callable (i.e. a function or constructor that requires no arguments) and calls them."""
names = self.items.keys()
if len(globs) > 0:
names = [name for name in names if any(fnmatch.fnmatch(name, glob) for glob in globs)]
return [self.items[name]() if call else self.items[name] for name in names]
| 29.064516 | 94 | 0.675916 | 145 | 901 | 4.103448 | 0.441379 | 0.07563 | 0.020168 | 0.026891 | 0.107563 | 0.047059 | 0.047059 | 0 | 0 | 0 | 0 | 0.001414 | 0.215316 | 901 | 30 | 95 | 30.033333 | 0.84017 | 0.190899 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.238095 | false | 0 | 0.095238 | 0.047619 | 0.52381 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
03e234a8bbe4103f9efefc5de35e7e61646a40a2 | 589 | py | Python | setup.py | alejandroperezcosio/ready-python-logs | fdae2846da9e20fc0090d0081569d7d0071c6e3f | [
"MIT"
] | null | null | null | setup.py | alejandroperezcosio/ready-python-logs | fdae2846da9e20fc0090d0081569d7d0071c6e3f | [
"MIT"
] | null | null | null | setup.py | alejandroperezcosio/ready-python-logs | fdae2846da9e20fc0090d0081569d7d0071c6e3f | [
"MIT"
] | null | null | null | import setuptools
setuptools.setup(
name='ready-python-logs',
version='0.3',
author="Alejandro Perez Cosio",
author_email="alejandroperezcosio@gmail.com",
url="https://github.com/alejandroperezcosio/ready-python-logs",
license='MIT',
long_description=open('README.md').read(),
packages=setuptools.find_packages(),
install_requires= [
'ready-python-config @ git+https://github.com/alejandroperezcosio/ready-python-config.git#egg=rpyconfig'
],
dependency_links=[
'http://github.com/user/repo/tarball/master#egg=rpyconfig'
]
)
| 31 | 112 | 0.696095 | 67 | 589 | 6.044776 | 0.656716 | 0.108642 | 0.074074 | 0.162963 | 0.217284 | 0.217284 | 0 | 0 | 0 | 0 | 0 | 0.003992 | 0.149406 | 589 | 18 | 113 | 32.722222 | 0.804391 | 0 | 0 | 0 | 0 | 0.058824 | 0.502547 | 0.049236 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.058824 | 0 | 0.058824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
03e57b0ec51c818a949cb2c68ac9c37101b39681 | 222 | py | Python | addsuptok.py | priyankk01/guvi | 17ccfcf81252ce4f6a0d9c9226b4769fc45993d2 | [
"MIT"
] | null | null | null | addsuptok.py | priyankk01/guvi | 17ccfcf81252ce4f6a0d9c9226b4769fc45993d2 | [
"MIT"
] | null | null | null | addsuptok.py | priyankk01/guvi | 17ccfcf81252ce4f6a0d9c9226b4769fc45993d2 | [
"MIT"
] | null | null | null | nk=list(map(int,input().split()))
n=nk[0]
k=nk[1]
l=list(map(int,input().split()))
f=0
for i in range(len(l)-1):
if(l[i]+l[i+1]==k):
print('yes')
f=1
break
if(f==0):
print('no')
| 17.076923 | 34 | 0.463964 | 44 | 222 | 2.340909 | 0.5 | 0.135922 | 0.194175 | 0.291262 | 0.38835 | 0 | 0 | 0 | 0 | 0 | 0 | 0.042945 | 0.265766 | 222 | 12 | 35 | 18.5 | 0.588957 | 0 | 0 | 0 | 0 | 0 | 0.02381 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.166667 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
03f19a98da2b23376ba8c3b60181334e2a7358bf | 2,913 | py | Python | PreprocessData/all_class_files/ContactPoint.py | wkid-neu/Schema | 4854720a15894dd814691a55e03329ecbbb6f558 | [
"MIT"
] | 3 | 2021-11-06T12:29:05.000Z | 2022-03-22T12:48:55.000Z | PreprocessData/all_class_files/ContactPoint.py | DylanNEU/Schema | 4854720a15894dd814691a55e03329ecbbb6f558 | [
"MIT"
] | null | null | null | PreprocessData/all_class_files/ContactPoint.py | DylanNEU/Schema | 4854720a15894dd814691a55e03329ecbbb6f558 | [
"MIT"
] | 1 | 2021-11-06T12:29:12.000Z | 2021-11-06T12:29:12.000Z | from PreprocessData.all_class_files.StructuredValue import StructuredValue
import global_data
class ContactPoint(StructuredValue):
def __init__(self, additionalType=None, alternateName=None, description=None, disambiguatingDescription=None, identifier=None, image=None, mainEntityOfPage=None, name=None, potentialAction=None, sameAs=None, url=None, areaServed=None, availableLanguage=None, contactOption=None, contactType=None, email=None, faxNumber=None, hoursAvailable=None, productSupported=None, telephone=None):
StructuredValue.__init__(self, additionalType, alternateName, description, disambiguatingDescription, identifier, image, mainEntityOfPage, name, potentialAction, sameAs, url)
self.areaServed = areaServed
self.availableLanguage = availableLanguage
self.contactOption = contactOption
self.contactType = contactType
self.email = email
self.faxNumber = faxNumber
self.hoursAvailable = hoursAvailable
self.productSupported = productSupported
self.telephone = telephone
def set_areaServed(self, areaServed):
self.areaServed = areaServed
def get_areaServed(self):
return self.areaServed
def set_availableLanguage(self, availableLanguage):
self.availableLanguage = availableLanguage
def get_availableLanguage(self):
return self.availableLanguage
def set_contactOption(self, contactOption):
self.contactOption = contactOption
def get_contactOption(self):
return self.contactOption
def set_contactType(self, contactType):
self.contactType = contactType
def get_contactType(self):
return self.contactType
def set_email(self, email):
self.email = email
def get_email(self):
return self.email
def set_faxNumber(self, faxNumber):
self.faxNumber = faxNumber
def get_faxNumber(self):
return self.faxNumber
def set_hoursAvailable(self, hoursAvailable):
self.hoursAvailable = hoursAvailable
def get_hoursAvailable(self):
return self.hoursAvailable
def set_productSupported(self, productSupported):
self.productSupported = productSupported
def get_productSupported(self):
return self.productSupported
def set_telephone(self, telephone):
self.telephone = telephone
def get_telephone(self):
return self.telephone
def __setattr__(self, key, value_list):
if type(value_list).__name__ == "NoneType" or key == "node_id":
self.__dict__[key] = value_list
return
for value in value_list:
str_value = type(value).__name__
if str_value not in global_data.get_table()[key]:
raise ValueError("非法类型!")
self.__dict__[key] = value_list
| 35.52439 | 390 | 0.691727 | 283 | 2,913 | 6.911661 | 0.19788 | 0.027607 | 0.064417 | 0.025562 | 0.02045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.235839 | 2,913 | 81 | 391 | 35.962963 | 0.878706 | 0 | 0 | 0.338983 | 0 | 0 | 0.007062 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.338983 | false | 0 | 0.033898 | 0.152542 | 0.559322 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
ff062a83d830c556715143f2efa7fe77a17b2e33 | 483 | py | Python | {{cookiecutter.project_slug}}/{{cookiecutter.pkg_name}}/views/_route.py | hilarryxu/cookiecutter-zweb | c207c77f2ddb63a3ba5c64d5761b6aaeddb77bcd | [
"BSD-3-Clause"
] | null | null | null | {{cookiecutter.project_slug}}/{{cookiecutter.pkg_name}}/views/_route.py | hilarryxu/cookiecutter-zweb | c207c77f2ddb63a3ba5c64d5761b6aaeddb77bcd | [
"BSD-3-Clause"
] | null | null | null | {{cookiecutter.project_slug}}/{{cookiecutter.pkg_name}}/views/_route.py | hilarryxu/cookiecutter-zweb | c207c77f2ddb63a3ba5c64d5761b6aaeddb77bcd | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
class Route(object):
def __init__(self, base_url=None):
self.base_url = base_url
self.handlers = []
def __call__(self, url, **kwds):
name = kwds.pop('name', None)
if self.base_url:
url = '/' + self.base_url.strip('/') + '/' + url.lstrip('/')
def _(cls):
self.handlers.append((url, cls, kwds, name))
return cls
return _
route = Route()
api_route = Route('api')
| 21 | 72 | 0.519669 | 58 | 483 | 4.051724 | 0.413793 | 0.148936 | 0.187234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003003 | 0.310559 | 483 | 22 | 73 | 21.954545 | 0.702703 | 0.043478 | 0 | 0 | 0 | 0 | 0.023913 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ff181d00b426e1d4d4c7d3c9984673e6d113b81b | 672 | py | Python | RGBled.py | HaroldPetersInskipp/Raspberry-Pi-GPIO-Scripts | b29266eb06f18e84606c4868ddb5afe11d87f737 | [
"Unlicense"
] | null | null | null | RGBled.py | HaroldPetersInskipp/Raspberry-Pi-GPIO-Scripts | b29266eb06f18e84606c4868ddb5afe11d87f737 | [
"Unlicense"
] | null | null | null | RGBled.py | HaroldPetersInskipp/Raspberry-Pi-GPIO-Scripts | b29266eb06f18e84606c4868ddb5afe11d87f737 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
from gpiozero import RGBLED
from time import sleep
led = RGBLED(18,19,21,active_high=True)
while True:
#Red
led.color = (1,0,0)
sleep(1)
led.off()
#Green
led.color = (0,1,0)
sleep(1)
led.off()
#Blue
led.color = (0,0,1)
sleep(1)
led.off()
#Purple
led.color = (1,0,1)
sleep(1)
led.off()
#Yellow
led.color = (1,1,0)
sleep(1)
led.off()
#White
led.color = (1,1,1)
sleep(1)
led.off()
#Cyan
led.color = (0,1,1)
sleep(1)
led.off() | 18.666667 | 39 | 0.434524 | 92 | 672 | 3.163043 | 0.315217 | 0.19244 | 0.216495 | 0.28866 | 0.333333 | 0.28866 | 0 | 0 | 0 | 0 | 0 | 0.089974 | 0.421131 | 672 | 36 | 40 | 18.666667 | 0.658098 | 0.080357 | 0 | 0.56 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.08 | 0 | 0.08 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ff1a14d0aca978642d8ff991e2964c2f5277c55a | 561 | py | Python | edi_835_parser/segments/provider_summary.py | shalini1017/edi-835-parser | 5c7c9549621a71cea893b37998e4bdea94822c5c | [
"MIT"
] | null | null | null | edi_835_parser/segments/provider_summary.py | shalini1017/edi-835-parser | 5c7c9549621a71cea893b37998e4bdea94822c5c | [
"MIT"
] | null | null | null | edi_835_parser/segments/provider_summary.py | shalini1017/edi-835-parser | 5c7c9549621a71cea893b37998e4bdea94822c5c | [
"MIT"
] | null | null | null | from edi_835_parser.elements.identifier import Identifier
from edi_835_parser.segments.utilities import split_segment, get_element
class ProviderSummary:
identification = 'TS3'
identifier = Identifier()
def __init__(self, segment: str):
self.index = segment.split(':', 1)[0]
segment = segment.split(':', 1)[1]
segment = split_segment(segment)
self.identifier = segment[0]
self.facility_type_code = get_element(segment, 2)
def __repr__(self):
return '\n'.join(str(item) for item in self.__dict__.items())
if __name__ == '__main__':
pass
| 23.375 | 72 | 0.737968 | 75 | 561 | 5.12 | 0.546667 | 0.09375 | 0.052083 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02686 | 0.137255 | 561 | 23 | 73 | 24.391304 | 0.766529 | 0 | 0 | 0 | 0 | 0 | 0.026738 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0.066667 | 0.133333 | 0.066667 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
ff1e935d5941ae2bb63ba7b6ab71aab1f2c49d08 | 2,690 | py | Python | pgoapi/utilities.py | superfsm/pgoapipub | c484d61f8e3a2698801f6b2c3dd42c65bd70849e | [
"MIT"
] | null | null | null | pgoapi/utilities.py | superfsm/pgoapipub | c484d61f8e3a2698801f6b2c3dd42c65bd70849e | [
"MIT"
] | null | null | null | pgoapi/utilities.py | superfsm/pgoapipub | c484d61f8e3a2698801f6b2c3dd42c65bd70849e | [
"MIT"
] | null | null | null | """
pgoapi - Pokemon Go API
Copyright (c) 2016 tjado <https://github.com/tejado>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
Author: tjado <https://github.com/tejado>
"""
import re
import struct
import logging
from json import JSONEncoder
# other stuff
from google.protobuf.internal import encoder
from geopy.geocoders import GoogleV3
from s2sphere import Cell, CellId, LatLng
log = logging.getLogger(__name__)
def f2i(float):
return struct.unpack('<Q', struct.pack('<d', float))[0]
def f2h(float):
return hex(struct.unpack('<Q', struct.pack('<d', float))[0])
def h2f(hex):
return struct.unpack('<d', struct.pack('<Q', int(hex,16)))[0]
def to_camel_case(value):
return ''.join(word.capitalize() if word else '_' for word in value.split('_'))
# JSON Encoder to handle bytes
class JSONByteEncoder(JSONEncoder):
def default(self, o):
return o.decode('utf-8')
def get_pos_by_name(location_name):
geolocator = GoogleV3()
loc = geolocator.geocode(location_name, timeout=10)
if not loc:
return None
log.info("Location for '%s' found: %s", location_name, loc.address)
log.info('Coordinates (lat/long/alt) for location: %s %s %s', loc.latitude, loc.longitude, loc.altitude)
return (loc.latitude, loc.longitude, loc.altitude)
def get_cell_ids(lat, long, radius = 10):
origin = CellId.from_lat_lng(LatLng.from_degrees(lat, long)).parent(15)
walk = [origin.id()]
right = origin.next()
left = origin.prev()
# Search around provided radius
for i in range(radius):
walk.append(right.id())
walk.append(left.id())
right = right.next()
left = left.prev()
# Return everything
return sorted(walk)
| 32.409639 | 108 | 0.727509 | 400 | 2,690 | 4.845 | 0.495 | 0.045408 | 0.016512 | 0.019608 | 0.094943 | 0.069143 | 0.034056 | 0.034056 | 0.034056 | 0 | 0 | 0.009874 | 0.171747 | 2,690 | 82 | 109 | 32.804878 | 0.859964 | 0.458364 | 0 | 0 | 0 | 0 | 0.065744 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.184211 | false | 0 | 0.184211 | 0.131579 | 0.605263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
20872490deb4c1875ae7248e5dae529082dc29b9 | 281 | py | Python | contest/pythonist2/find-angle/find-angle.py | zeyuanxy/HackerRank | 5194a4af780ece396501c215996685d1be529e73 | [
"MIT"
] | 4 | 2017-01-18T17:51:58.000Z | 2019-10-20T12:14:37.000Z | contest/pythonist2/find-angle/find-angle.py | zeyuanxy/HackerRank | 5194a4af780ece396501c215996685d1be529e73 | [
"MIT"
] | null | null | null | contest/pythonist2/find-angle/find-angle.py | zeyuanxy/HackerRank | 5194a4af780ece396501c215996685d1be529e73 | [
"MIT"
] | 8 | 2016-03-14T17:16:59.000Z | 2021-06-26T10:11:33.000Z | # -*- coding: utf-8 -*-
# @Author: Zeyuan Shang
# @Date: 2016-05-04 14:13:24
# @Last Modified by: Zeyuan Shang
# @Last Modified time: 2016-05-04 14:15:11
import cmath
import math
AB = input()
BC = input()
print str(int(round(math.degrees(cmath.phase(complex(BC, AB)))))) + '°' | 25.545455 | 71 | 0.651246 | 47 | 281 | 3.914894 | 0.702128 | 0.119565 | 0.086957 | 0.108696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.122363 | 0.156584 | 281 | 11 | 71 | 25.545455 | 0.649789 | 0.523132 | 0 | 0 | 0 | 0 | 0.007752 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.4 | null | null | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
208dc5f025c001d6197fa1e8f2839d34a02ee8f1 | 1,825 | py | Python | document/ebpf_program/catalog.py | guard-project/cb-manager | 7dc7c7b9beacc45236674642f4b64373a2f4cdb3 | [
"MIT"
] | 2 | 2020-05-14T00:32:32.000Z | 2022-02-28T21:08:16.000Z | document/ebpf_program/catalog.py | guard-project/cb-manager | 7dc7c7b9beacc45236674642f4b64373a2f4cdb3 | [
"MIT"
] | 4 | 2020-12-09T16:22:45.000Z | 2021-04-06T09:58:01.000Z | document/ebpf_program/catalog.py | guard-project/cb-manager | 7dc7c7b9beacc45236674642f4b64373a2f4cdb3 | [
"MIT"
] | null | null | null | from elasticsearch_dsl import Boolean, Nested, Text
from document.base import BaseDocument, BaseInnerDoc
class _eBPFProgramCatalogConfigMetricOpenMetricsMetadataLabelInnerDoc(
BaseInnerDoc):
"""eBPF program open metrics label."""
name = Text(required=True)
value = Text(required=True)
class _eBPFProgramCatalogConfigMetricOpenMetricsMetadataInnerDoc(BaseInnerDoc):
"""eBPF program open metrics metadata."""
type = Text(required=True)
help = Text()
labels = Nested(
_eBPFProgramCatalogConfigMetricOpenMetricsMetadataLabelInnerDoc)
class _eBPFProgramCatalogConfigMetricInnerDoc(BaseInnerDoc):
"""eBPF program metric data."""
name = Text(required=True)
map_name = Text() # FIXME required=True (map-name the correct name)
open_metrics_metadata = Nested(_eBPFProgramCatalogConfigMetricOpenMetricsMetadataInnerDoc) # noqa: E501
class _eBPFProgramCatalogConfigInnerDoc(BaseInnerDoc):
"""eBPF program parameter configuration."""
code = Text(required=True)
metrics = Nested(_eBPFProgramCatalogConfigMetricInnerDoc)
class _eBPFProgramCatalogParameterInnerDoc(BaseInnerDoc):
"""eBPF program parameter."""
id = Text(required=True)
# possible values: integer, number, string, choice, boolean
type = Text(required=True)
list = Boolean()
values = Text() # when type = choice
description = Text()
example = Text()
class _eBPFProgramCatalogDocument(BaseDocument):
"""Represents an eBPF program in the catalog."""
# id already defined by Elasticsearch
config = Nested(_eBPFProgramCatalogConfigInnerDoc, required=True)
parameters = Nested(_eBPFProgramCatalogParameterInnerDoc)
description = Text()
class Index:
"""Elasticsearch configuration."""
name = 'ebpf-program-catalog'
| 29.435484 | 108 | 0.740274 | 154 | 1,825 | 8.675325 | 0.409091 | 0.080838 | 0.083832 | 0.040419 | 0.050898 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001987 | 0.172603 | 1,825 | 61 | 109 | 29.918033 | 0.882781 | 0.219726 | 0 | 0.193548 | 0 | 0 | 0.014451 | 0 | 0 | 0 | 0 | 0.016393 | 0 | 1 | 0 | false | 0 | 0.064516 | 0 | 0.903226 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
2092fb173a91c43cc796846e350f277f2d41c18f | 948 | py | Python | labmar27.py | seanmacb/COMP-115-Exercises | fbe7e5b158f2db785b886b6c600f1a8beb19ab1f | [
"MIT"
] | null | null | null | labmar27.py | seanmacb/COMP-115-Exercises | fbe7e5b158f2db785b886b6c600f1a8beb19ab1f | [
"MIT"
] | null | null | null | labmar27.py | seanmacb/COMP-115-Exercises | fbe7e5b158f2db785b886b6c600f1a8beb19ab1f | [
"MIT"
] | null | null | null | '''
#Returns the fibonnaci sequence when prompted for a terms
def fib(a):
folder=0
fold=1
flist=[]
flist.append(fold)
for i in range(0,a-1):
f=fold+folder
flist.append(f)
folder=fold
fold=f
return flist
#Returns the sum of a fibonnaci sequence with a terms
def fibsum(a):
folder=0
fold=1
flist=[]
flist.append(fold)
for i in range(0,a-1):
f=fold+folder
flist.append(f)
folder=fold
fold=f
numsum=sum(flist)
return (numsum)
def main():
number=int(input("Enter in however many fibonacci numbers you want: "))
print(fib(number))
print("The sum of those numbers is",fibsum(number))
main()
'''
#Function that rolls a dice twice in main()
import random
def die():
a=random.randint(1,6)
return a
def main():
a=input("Press enter to roll the die!")
print(die())
print(die())
main()
| 16.344828 | 75 | 0.591772 | 142 | 948 | 3.950704 | 0.380282 | 0.078431 | 0.032086 | 0.042781 | 0.30303 | 0.30303 | 0.30303 | 0.30303 | 0.30303 | 0.30303 | 0 | 0.014837 | 0.28903 | 948 | 57 | 76 | 16.631579 | 0.817507 | 0.806962 | 0 | 0.222222 | 0 | 0 | 0.162791 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.444444 | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
209f0107640b52d625c21f1d727c32883e4a33c3 | 2,576 | py | Python | python/tensor_classification/test/test_manifold_discriminant.py | laurafroelich/tensor_classification | cd9a23d290207dcfa09f1e496f101df7ed62194c | [
"MIT"
] | 11 | 2018-09-25T11:43:22.000Z | 2022-03-11T08:33:30.000Z | python/tensor_classification/test/test_manifold_discriminant.py | laurafroelich/tensor_classification | cd9a23d290207dcfa09f1e496f101df7ed62194c | [
"MIT"
] | 9 | 2019-03-19T12:11:16.000Z | 2021-11-14T07:06:57.000Z | python/tensor_classification/test/test_manifold_discriminant.py | laurafroelich/tensor_classification | cd9a23d290207dcfa09f1e496f101df7ed62194c | [
"MIT"
] | 2 | 2020-10-25T08:30:06.000Z | 2022-03-21T12:06:12.000Z | import pytest
from tensor_classification.classificationmethods import ManifoldDiscrimantAnalysisParafacTucker as pt
import numpy as np
from numpy import random as random
import scipy.stats as stats
"""
Unit tests for python manifold discriminant analysis.
"""
def generate_data(classes, observations_per_class, shape=None):
"""
The purpose of this method is to simulate data for testing the manifold discriminant analysis tensor_classification.
For each class i we simulate the same number of observations where the data is uniformly distributed around i.
The method returns the simulated observations and the class for each observation.
:param classes: Int, integer specifying number of classes to simulate observations for.
:param observations_per_class: Int, integer specifying the number of observations per class.
:param shape: List, list specifying the shape of each simulated observation.
:return: observations: Tensor containing all simulated observations
:return: output_classes: List of classes specifying the class for each simulated observation
"""
if shape is None:
shape = [5, 6]
output_classes = np.zeros(classes*observations_per_class)
observations = np.multiply.outer(output_classes, np.zeros(shape))
for i in range(classes):
observations[observations_per_class*i:observations_per_class*(i+1)] \
= i + np.random.uniform(-0.5, 0.5, [observations_per_class]+shape)
output_classes[observations_per_class*i:observations_per_class*(i+1)] = i
return observations, output_classes
#def test_QtCheck():
# assert False
@pytest.mark.skip(reason="This test fails, need to fix at a later stage.")
def test_based_differences():
model = pt.TuckerDiscriminantAnalysis()
input_params = generate_data(5, 1000)
input_data = input_params[0]
input_classes = input_params[1]
means = model.class_based_differences(input_data, input_classes)
print(means)
assert True
#def test_fail():
# assert False
def test_generate_data():
# generate_data(5, 31)
pass
def test_fit_manifold():
assert True
# pass
def test_fit_pipeline():
assert True
# pass
def test_initialize_manifold_with_u():
pass
def test_initialize_manifold_without_u():
pass
def test_calculate_set_tolerances():
modeller = pt.TuckerDiscriminantAnalysis()
modeller.set_tolerances(0, 0)
assert modeller.Fdifftol == 0, modeller.Udifftol == 0
def test_tucker_object_data_matrix():
pass
def test_my_cost():
pass
| 28.307692 | 120 | 0.743789 | 341 | 2,576 | 5.434018 | 0.340176 | 0.041554 | 0.09714 | 0.045332 | 0.095521 | 0.047491 | 0.047491 | 0.047491 | 0.047491 | 0.047491 | 0 | 0.010466 | 0.184006 | 2,576 | 90 | 121 | 28.622222 | 0.871075 | 0.322981 | 0 | 0.190476 | 0 | 0 | 0.028256 | 0 | 0 | 0 | 0 | 0 | 0.095238 | 1 | 0.238095 | false | 0.119048 | 0.119048 | 0 | 0.380952 | 0.02381 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
20a26ae98fbb3757ac38066f73f1c724d35221d3 | 2,099 | py | Python | Tests/CoinCommandTest.py | goodship1/CoinCommand | 9b8f36d4b5241f9cb56cf68bf449c1af03c44a7a | [
"MIT"
] | 2 | 2018-01-25T22:10:37.000Z | 2020-02-13T16:49:55.000Z | Tests/CoinCommandTest.py | goodship1/CoinCommand | 9b8f36d4b5241f9cb56cf68bf449c1af03c44a7a | [
"MIT"
] | 5 | 2018-03-03T23:35:21.000Z | 2019-09-22T18:30:49.000Z | Tests/CoinCommandTest.py | goodship1/CoinCommand | 9b8f36d4b5241f9cb56cf68bf449c1af03c44a7a | [
"MIT"
] | null | null | null | import requests
import ast
"""some modifactions to coinCommand to allow an easier way to test cli"""
def cp(coin,currency):
"""gets coin price """
try:
return cp_Request_to_Url(coin,currency)
except Exception as err:
return "coin or currency doesnt exist"
def cp_Request_to_Url(coin,price):
url="https://min-api.cryptocompare.com/data/price?fsym=%s&tsyms=%s"%(coin,price)
request_to_Url = requests.get(url)
return formatting_Unicode_currency(request_To_url.text,price)
def formatting_Unicode_Currency(request,price):
unicode_Format = ast.literal_eval(request)
euro = u'\u20ac'
if(price == 'USD'):
return '$'+str(unicode_Format[price])
if(price =='EUR'):
return euro+str(unicode_Format[price])
def mined(coin):
try:
return mined_Request_To_Url(coin)
except Exception as err:
return coin_Doesnt_Exist()
def mined_Request_To_Url(coin):
url = 'https://www.cryptocompare.com/api/data/coinsnapshot/?fsym=%s&tsym=USD'%coin
request_To_Url = requests.get(url)
return formatting_Unicode_Mined(request_To_Url.text)
def formatting_Unicode_Mined(request):
formatting_Mined_Information = ast.literal_eval(request)
return formatting_Mined_Information['Data']['TotalCoinsMined']
def algo(coin):
""" gets the coin implementation algorithm"""
try:
return algo_Request_To_Url(coin)
except Exception as err:
return(coin_Doesnt_Exist())
def algo_Request_To_Url(coin):
url = 'https://www.cryptocompare.com/api/data/coinsnapshot/?fsym=%s&tsym=USD'%coin
request_To_Url = requests.get(url)
return formatting_Unicode_CoinSnapShot(request_To_Url.text)
def formatting_Unicode_CoinSnapShot(request):
formatting_Of_CoinSnapShot = ast.literal_eval(request)
return formatting_Of_CoinSnapShot['Data']['Algorithm']
def news():
return request_To_news
def request_to_News():
url = 'https://min-api.cryptocompare.com/data/v2/news/?lang=EN'
request_To_url = requests.get(url)
return formatting_Unicode_news(request_To_url.text)
def formatting_Unicode_news(request):
formatting_News_unicode = ast.literal_eval(request)
return formatting_News_unicode
| 23.322222 | 84 | 0.775131 | 303 | 2,099 | 5.115512 | 0.224422 | 0.092903 | 0.108387 | 0.061935 | 0.543871 | 0.509032 | 0.418065 | 0.304516 | 0.304516 | 0.24129 | 0 | 0.001605 | 0.109576 | 2,099 | 89 | 85 | 23.58427 | 0.827715 | 0.025727 | 0 | 0.196078 | 0 | 0.058824 | 0.167603 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.039216 | 0.019608 | 0.568627 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
20a9de2b7e432d7723ae2900ca820b3d07d3057f | 192 | py | Python | windse/TurbineManager.py | malihass/WindSE | ea6763847a5988c54a74db16126f90b3975c79c3 | [
"Apache-2.0"
] | null | null | null | windse/TurbineManager.py | malihass/WindSE | ea6763847a5988c54a74db16126f90b3975c79c3 | [
"Apache-2.0"
] | null | null | null | windse/TurbineManager.py | malihass/WindSE | ea6763847a5988c54a74db16126f90b3975c79c3 | [
"Apache-2.0"
] | null | null | null | class GenericTurbine(object):
def __init__(self, loc, RD, W):
self.loc = loc # Location in Space
self.RD = RD # Rotor Diameter
self.W = W # Width of influence | 38.4 | 43 | 0.59375 | 26 | 192 | 4.230769 | 0.653846 | 0.127273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.3125 | 192 | 5 | 43 | 38.4 | 0.833333 | 0.265625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
20dcd2810b2b5e99322acbb5905844e020c55fec | 865 | py | Python | final_project/machinetranslation/tests/tests.py | bluesydan/xzceb-flask_eng_fr | e026b3b13402de94d7e75958f359e9c0cf4017aa | [
"Apache-2.0"
] | null | null | null | final_project/machinetranslation/tests/tests.py | bluesydan/xzceb-flask_eng_fr | e026b3b13402de94d7e75958f359e9c0cf4017aa | [
"Apache-2.0"
] | null | null | null | final_project/machinetranslation/tests/tests.py | bluesydan/xzceb-flask_eng_fr | e026b3b13402de94d7e75958f359e9c0cf4017aa | [
"Apache-2.0"
] | null | null | null | import unittest
from translator import english_to_french, french_to_english
class TestenglishToFrench(unittest.TestCase):
def test1(self):
self.assertEqual(english_to_french("Hello"), "Bonjour")
self.assertEqual(english_to_french("Farewell"), "Adieu")
self.assertEqual(english_to_french("What time is it?"), "Quelle heure est-il?")
def test1none(self):
self.assertRaises(ValueError, english_to_french, None)
class TestfrenchToEnglish(unittest.TestCase):
def test2(self):
self.assertEqual(french_to_english("Bonjour"), "Hello")
self.assertEqual(french_to_english("Adieu"), "Goodbye")
self.assertEqual(french_to_english("Il est tard."), "It's late.")
def test2none(self):
self.assertRaises(ValueError, french_to_english, None)
if __name__ == "__main__":
unittest.main()
| 30.892857 | 87 | 0.708671 | 102 | 865 | 5.735294 | 0.392157 | 0.153846 | 0.128205 | 0.123077 | 0.307692 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005571 | 0.169942 | 865 | 28 | 88 | 30.892857 | 0.809192 | 0 | 0 | 0 | 0 | 0 | 0.132794 | 0 | 0 | 0 | 0 | 0 | 0.444444 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
20efb42ccbbb8110c184c38cc854c0e1a7a7c998 | 1,519 | py | Python | setup.py | masalomon01/heroku-kafka-eze | 0d25c0ccff272b842a8bafebdb1b56852e928105 | [
"MIT"
] | null | null | null | setup.py | masalomon01/heroku-kafka-eze | 0d25c0ccff272b842a8bafebdb1b56852e928105 | [
"MIT"
] | null | null | null | setup.py | masalomon01/heroku-kafka-eze | 0d25c0ccff272b842a8bafebdb1b56852e928105 | [
"MIT"
] | null | null | null | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
version='0.0.2', # Update the version number for new releases
name='heroku-kafka-eze', # This is the name of your PyPI-package.
description='Python kafka package for use with heroku\'s kafka. You\'ll only need your heroku api key and app name',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
url='https://github.com/masalomon01/heroku-kafka-eze',
author='MarSal',
keywords='heroku, kafka',
author_email='salermom@gmail.com',
license='MIT',
py_modules=["heroku_kafka_eze"],
install_requires=[
'kafka-python==1.3.5',
'heroku3==3.3.0'
]
) | 37.04878 | 120 | 0.63792 | 192 | 1,519 | 4.979167 | 0.5 | 0.17887 | 0.235356 | 0.16318 | 0.056485 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025663 | 0.230415 | 1,519 | 41 | 121 | 37.04878 | 0.79213 | 0.129032 | 0 | 0 | 0 | 0 | 0.496206 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.085714 | 0 | 0.085714 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4550ae581a536b11f675f20d45a0203cf027945a | 622 | py | Python | homework/lesson-4/exercise-5/main.py | ai-nikolaev/repo-pybasics | bb34fdd0ef5dc562ef1708268917a0f29e7ae4d4 | [
"MIT"
] | null | null | null | homework/lesson-4/exercise-5/main.py | ai-nikolaev/repo-pybasics | bb34fdd0ef5dc562ef1708268917a0f29e7ae4d4 | [
"MIT"
] | null | null | null | homework/lesson-4/exercise-5/main.py | ai-nikolaev/repo-pybasics | bb34fdd0ef5dc562ef1708268917a0f29e7ae4d4 | [
"MIT"
] | null | null | null | # 5. Реализовать формирование списка, используя функцию range()
# и возможности генератора. В список должны войти четные числа
# от 100 до 1000 (включая границы). Необходимо получить результат
# вычисления произведения всех элементов списка.
# Подсказка: использовать функцию reduce().
from functools import reduce
# функция вычисления произведения
def my_func(prev_el, el):
# prev_el - предыдущий элемент
# el - текущий элемент
return prev_el * el
# формирование списка
my_list = [a for a in range(100, 1001) if a % 2 == 0]
# вывод итоговых данных
print(f'Вывод результата: {reduce(my_func, my_list)}')
| 28.272727 | 65 | 0.749196 | 85 | 622 | 5.4 | 0.694118 | 0.039216 | 0.034858 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.03301 | 0.172026 | 622 | 21 | 66 | 29.619048 | 0.858252 | 0.641479 | 0 | 0 | 0 | 0 | 0.207547 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0.2 | 0.6 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
4556c1af93caec5ff0b8f7cf7907d2be48c7c2a0 | 151 | py | Python | Hello World Programs/Python/Hayloworld.py | TeacherManoj0131/HacktoberFest2020-Contributions | c7119202fdf211b8a6fc1eadd0760dbb706a679b | [
"MIT"
] | 256 | 2020-09-30T19:31:34.000Z | 2021-11-20T18:09:15.000Z | Hello World Programs/Python/Hayloworld.py | TeacherManoj0131/HacktoberFest2020-Contributions | c7119202fdf211b8a6fc1eadd0760dbb706a679b | [
"MIT"
] | 293 | 2020-09-30T19:14:54.000Z | 2021-06-06T02:34:47.000Z | Hello World Programs/Python/Hayloworld.py | TeacherManoj0131/HacktoberFest2020-Contributions | c7119202fdf211b8a6fc1eadd0760dbb706a679b | [
"MIT"
] | 1,620 | 2020-09-30T18:37:44.000Z | 2022-03-03T20:54:22.000Z | # Using list to print hello world
my_list = ["H", "e", "l", "l", "o", " ", "W", "o", "r", "l", "d"]
s = ""
for i in my_list:
s = s + i
print(s)
| 15.1 | 65 | 0.437086 | 29 | 151 | 2.206897 | 0.62069 | 0.1875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.271523 | 151 | 9 | 66 | 16.777778 | 0.581818 | 0.205298 | 0 | 0 | 0 | 0 | 0.09322 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
455ebcfbe6e377cb57360ec0596e894542519812 | 538 | py | Python | LeetCode/Math/Sum/167. Two Sum II - Input array is sorted.py | thehanemperor/LeetCode | 8d120162657a1e29c3e821b51ac4121300fc7a12 | [
"MIT"
] | null | null | null | LeetCode/Math/Sum/167. Two Sum II - Input array is sorted.py | thehanemperor/LeetCode | 8d120162657a1e29c3e821b51ac4121300fc7a12 | [
"MIT"
] | null | null | null | LeetCode/Math/Sum/167. Two Sum II - Input array is sorted.py | thehanemperor/LeetCode | 8d120162657a1e29c3e821b51ac4121300fc7a12 | [
"MIT"
] | null | null | null | # EASY
# Two pointer
# --> if < tar
# <-- if > tar
# Time O(N) Space O(1)
class Solution:
def twoSum(self, numbers: List[int], target: int) -> List[int]:
left,right = 0, len(numbers)-1
while left < right:
if numbers[left] + numbers[right] == target:
return [left+1,right+1]
elif numbers[left] + numbers[right] < target:
left += 1
elif numbers[left] + numbers[right] > target:
right -=1
return [] | 28.315789 | 67 | 0.481413 | 62 | 538 | 4.177419 | 0.419355 | 0.127413 | 0.208494 | 0.266409 | 0.374517 | 0.262548 | 0.262548 | 0 | 0 | 0 | 0 | 0.021148 | 0.384758 | 538 | 19 | 68 | 28.315789 | 0.761329 | 0.139405 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
456852f7e7b515c2666b275ae34b136cdab0908f | 5,436 | py | Python | padding_oracle.py | peterwilliams97/compression | dd40032a24cc111d4a881f4f8aa2389df519ddd0 | [
"MIT"
] | null | null | null | padding_oracle.py | peterwilliams97/compression | dd40032a24cc111d4a881f4f8aa2389df519ddd0 | [
"MIT"
] | null | null | null | padding_oracle.py | peterwilliams97/compression | dd40032a24cc111d4a881f4f8aa2389df519ddd0 | [
"MIT"
] | null | null | null | """
1 1: 73 's'
2 2: 4f73 'Os'
3 3: 204f73 ' Os'
4 4: 68204f73 'h Os'
5 5: 7368204f73 'sh Os'
6 6: 697368204f73 'ish Os'
7 7: 6d697368204f73 'mish Os'
8 8: 616d697368204f73 'amish Os'
9 9: 65616d697368204f73 'eamish Os'
10 10: 7565616d697368204f73 'ueamish Os'
11 11: 717565616d697368204f73 'queamish Os'
12 12: 53717565616d697368204f73 'Squeamish Os'
13 13: 2053717565616d697368204f73 ' Squeamish Os'
14 14: 652053717565616d697368204f73 'e Squeamish Os'
15 15: 72652053717565616d697368204f73 're Squeamish Os'
16 16: 6172652053717565616d697368204f73 'are Squeamish Os'
1:: 1 1: 20 ' '
2:: 2 2: 7320 's '
3:: 3 3: 647320 'ds '
4:: 4 4: 72647320 'rds '
5:: 5 5: 6f72647320 'ords '
6:: 6 6: 576f72647320 'Words '
7:: 7 7: 20576f72647320 ' Words '
8:: 8 8: 6320576f72647320 'c Words '
9:: 9 9: 696320576f72647320 'ic Words '
10:: 10 10: 67696320576f72647320 'gic Words '
11:: 11 11: 6167696320576f72647320 'agic Words '
12:: 12 12: 4d6167696320576f72647320 'Magic Words '
13:: 13 13: 204d6167696320576f72647320 ' Magic Words '
14:: 14 14: 65204d6167696320576f72647320 'e Magic Words '
15:: 15 15: 6865204d6167696320576f72647320 'he Magic Words '
16:: 16 16: 546865204d6167696320576f72647320 'The Magic Words '
--------------------------------------------------------------------------------
The Magic Words are Squeamish Ossifrage\t\t\t\t\t\t\t\t\t
"""
import urllib
import urllib.parse
import urllib.request
import urllib.error
TARGET = 'http://crypto-class.appspot.com/po?er='
# --------------------------------------------------------------
# padding oracle
# --------------------------------------------------------------
def txt(cip):
return ''.join('%02x' % x for x in cip)
def asc(a):
return ''.join(chr(x) for x in a)
def query(q):
target = TARGET + urllib.parse.quote(q) # Create query URL
# print('-' * 80)
# print(target)
try:
r = urllib.request.urlopen(target) # Wait for response
except urllib.error.URLError as e:
# print("We got: %d '%s'" % (e.code, e.reason)) # Print response code
good = e.code == 404 # True => good padding
return good, e.code
print('-' * 80)
print(r)
assert False
base = 'f20bdba6ff29eed7b046d1df9fb7000058b1ffb4210a580f748b4ac714c001bd4a61044426fb515dad3f21f18aa577c0bdf302936266926ff37dbf7035d5eeb4'
b = len(base)
assert b % 2 == 0, b
n = b // 2
cip = [int(base[2 * i:2 * i + 2], 16) for i in range(n)]
print('cip=%d' % len(cip))
assert txt(cip) == base
def do_block(cip, B):
N = (B + 1) * 16
M = B * 16
cip = cip[:N]
assert len(cip) == N
answer = []
plain = ''
for pos in range(1, 16 + 1):
found = False
orig_g = None
print('%2d::' % pos, end=' ')
for n in range(0x100):
o = M - pos
h = [n] + list(reversed(answer))
d = [h[i] ^ cip[M - pos + i] ^ pos for i in range(pos)]
xcip = cip[:M - pos] + d + cip[M:]
assert len(xcip) == N, (len(xcip), N, M, pos)
q = txt(xcip)
try:
g, k = query(q)
except:
orig_n = n
g = False
if g:
assert not found
found = True
answer.append(n)
a = list(reversed(answer))
plain = asc(a)
print('%2d %2d: %50s %18r' % (len(a), len(plain), txt(a), plain))
break
if not found:
assert orig_n is not None
n = orig_n
answer.append(n)
a = list(reversed(answer))
plain = asc(a)
print('%2d %2d: %50s %18r' % (len(a), len(plain), txt(a), plain))
a = list(reversed(answer))
plain = asc(a)
return plain
parts = []
for B in 1, 2, 3:
plain = do_block(cip, B)
print('-' * 80)
parts.append(plain)
print('=' * 80)
plain = ''.join(parts)
print('%d %r' % (len(plain), plain))
| 39.970588 | 137 | 0.399742 | 519 | 5,436 | 4.175337 | 0.312139 | 0.007383 | 0.009691 | 0.011075 | 0.08814 | 0.08814 | 0.08814 | 0.075219 | 0.071066 | 0.071066 | 0 | 0.257796 | 0.469095 | 5,436 | 135 | 138 | 40.266667 | 0.49307 | 0.557211 | 0 | 0.186667 | 0 | 0 | 0.098133 | 0.05558 | 0 | 0 | 0.002171 | 0 | 0.093333 | 1 | 0.053333 | false | 0 | 0.053333 | 0.026667 | 0.16 | 0.12 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4572a985665c76033407df453472f6074b10dacf | 893 | py | Python | code/Polimorfismo.py | Koki-Orba/Learn | b8c9285b52fa73c8b801eae27a41cc86be1421bd | [
"MIT"
] | null | null | null | code/Polimorfismo.py | Koki-Orba/Learn | b8c9285b52fa73c8b801eae27a41cc86be1421bd | [
"MIT"
] | null | null | null | code/Polimorfismo.py | Koki-Orba/Learn | b8c9285b52fa73c8b801eae27a41cc86be1421bd | [
"MIT"
] | null | null | null | #Polimorfismo
# #Es la capacidad que tienen los objetos en
# #diferentes clases para usar un comportamiento
# #o atributo del mismo nombre pero con diferente valor
#
# # Por ejemplo
#class Auto:
# rueda = 4
# def desplazamiento(self):
# print("el auto se esta desplazando sobre 4 ruegas")
#
#class Moto:
# rueda = 2
# def desplazamiento(self):
# print("la moto se esta desplazando sobre 2 ruedas")
#
#Ambos son vehiculos pero se desplazan diferente
#
#Ejercicio 1
class Animales:
def __init__(self, nombre):
self.nombre = nombre
def tipo_animal(self):
pass
class Leon(Animales):
def tipo_animal(self):
print("animal salvaje")
class Perro(Animales):
def tipo_animal(self):
print("animal domestico")
nuevo_animal = Leon("Simba")
nuevo_animal.tipo_animal()
nuevo_animal2= Perro("Firulais")
nuevo_animal2.tipo_animal() | 22.897436 | 60 | 0.692049 | 117 | 893 | 5.17094 | 0.529915 | 0.082645 | 0.064463 | 0.084298 | 0.119008 | 0.119008 | 0.119008 | 0 | 0 | 0 | 0 | 0.009929 | 0.210526 | 893 | 39 | 61 | 22.897436 | 0.848227 | 0.50168 | 0 | 0.2 | 0 | 0 | 0.101655 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.266667 | false | 0.066667 | 0 | 0 | 0.466667 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
4575f1c3527e225921e38f6821449f2eb60bd89b | 58 | py | Python | dxm/lib/DxJobs/DxJobCounter.py | experiortec/dxm-toolkit | b2ab6189e163c62fa8d7251cd533d2a36430d44a | [
"Apache-2.0"
] | 5 | 2018-08-23T15:47:05.000Z | 2022-01-19T23:38:18.000Z | dxm/lib/DxJobs/DxJobCounter.py | experiortec/dxm-toolkit | b2ab6189e163c62fa8d7251cd533d2a36430d44a | [
"Apache-2.0"
] | 59 | 2018-10-15T10:37:00.000Z | 2022-03-22T20:49:25.000Z | dxm/lib/DxJobs/DxJobCounter.py | experiortec/dxm-toolkit | b2ab6189e163c62fa8d7251cd533d2a36430d44a | [
"Apache-2.0"
] | 12 | 2019-03-08T19:59:13.000Z | 2021-12-16T03:28:04.000Z | rows_total = 0
rows_masked = 0
ret = 0
profileret = 0
| 11.6 | 16 | 0.655172 | 10 | 58 | 3.6 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.095238 | 0.275862 | 58 | 4 | 17 | 14.5 | 0.761905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
457c568091d3e59679151054417b66eb0a5bd907 | 2,010 | py | Python | tests/test_endpoints.py | orlandodiaz/insta_api | 5dc5914a751fd7c2b1fcfe50fea792112d6d50d5 | [
"MIT"
] | 11 | 2018-10-10T19:18:27.000Z | 2021-05-29T17:15:42.000Z | tests/test_endpoints.py | orlandodiaz/insta_api | 5dc5914a751fd7c2b1fcfe50fea792112d6d50d5 | [
"MIT"
] | 3 | 2020-02-21T14:10:59.000Z | 2022-01-25T00:36:33.000Z | tests/test_endpoints.py | orlandodiaz/insta_api | 5dc5914a751fd7c2b1fcfe50fea792112d6d50d5 | [
"MIT"
] | 5 | 2019-04-03T09:46:38.000Z | 2022-01-24T23:52:26.000Z | import pytest
from insta_api.insta_api import InstaAPI
from insta_api.endpoints import *
@pytest.fixture(scope="module")
def insta():
insta = InstaAPI(use_cookies=False)
yield insta
insta._close_session()
class TestEndpoints:
""" These tests make sure that the API endpoints are still reachable and not moved"""
def test_base_endpoint(self, insta):
resp = insta.ses.head(base_endpoint)
assert resp.status_code != 404
def test_login_endpoint(self, insta):
resp = insta.ses.head(base_endpoint + login_endpoint)
assert resp.status_code != 404
def test_upload_photo_endpoint(self, insta):
resp = insta.ses.head(base_endpoint + post_photo_endpoint1)
assert resp.status_code != 404
def test_exploretag_endpoint(self, insta):
resp = insta.ses.head(base_endpoint +
explore_tag.format(hashtag="test"))
assert resp.status_code != 404
def test_like_endpoint(self, insta):
resp = insta.ses.head(base_endpoint +
like_endpoint.format(media_id='_'))
assert resp.status_code != 404
def test_follow_endpoint(self, insta):
resp = insta.ses.head(base_endpoint +
follow_endpoint.format(user_id="0"))
assert resp.status_code != 404
def test_unfollow_endpoint(self, insta):
resp = insta.ses.head(base_endpoint +
unfollow_endpoint.format(user_id="0"))
assert resp.status_code != 404
def test_graphql_endpoint(self, insta):
resp = insta.ses.head(base_endpoint + graphql_endpoint)
assert resp.status_code != 404
def test_logout_endpoint(self, insta):
resp = insta.ses.head(base_endpoint + logout_endpoint)
assert resp.status_code != 404
def test_hashtag_suggestions_endpoint(self, insta):
resp = insta.ses.head(base_endpoint + search_hashtag_endpoint)
assert resp.status_code != 404
| 32.95082 | 89 | 0.658706 | 252 | 2,010 | 5.003968 | 0.25 | 0.104679 | 0.134814 | 0.166535 | 0.647898 | 0.647898 | 0.623315 | 0.551943 | 0.437748 | 0.080888 | 0 | 0.021941 | 0.251741 | 2,010 | 60 | 90 | 33.5 | 0.816489 | 0.038806 | 0 | 0.325581 | 0 | 0 | 0.006753 | 0 | 0 | 0 | 0 | 0 | 0.232558 | 1 | 0.255814 | false | 0 | 0.069767 | 0 | 0.348837 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
458289e4f11ba10d6a0003b4ef41b1bf27bdd6d5 | 208 | py | Python | hasilla/const.py | DAVZAN/hasilla | 1b0dbd58613a28cf4b8aa675d86b4062d41a305d | [
"MIT"
] | null | null | null | hasilla/const.py | DAVZAN/hasilla | 1b0dbd58613a28cf4b8aa675d86b4062d41a305d | [
"MIT"
] | null | null | null | hasilla/const.py | DAVZAN/hasilla | 1b0dbd58613a28cf4b8aa675d86b4062d41a305d | [
"MIT"
] | null | null | null | """Silla constants."""
from typing import Final
COMMAND_BACKLOG: Final = "Backlog"
CONF_MANUFACTURER: Final = "manufacturer"
CONF_NAME: Final = "name"
CONF_MODEL: Final = "md"
CONF_SW_VERSION: Final = "sw" | 20.8 | 41 | 0.735577 | 27 | 208 | 5.444444 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.134615 | 208 | 10 | 42 | 20.8 | 0.816667 | 0.076923 | 0 | 0 | 0 | 0 | 0.144385 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.166667 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
45896c85a489a7eb2718ac18217e71b65ac35bff | 3,550 | py | Python | advent/problem_02.py | MattJDavidson/python-adventofcode | d8f52a8d09561ed3d7dfe0f379f19cb71268073f | [
"BSD-2-Clause"
] | null | null | null | advent/problem_02.py | MattJDavidson/python-adventofcode | d8f52a8d09561ed3d7dfe0f379f19cb71268073f | [
"BSD-2-Clause"
] | null | null | null | advent/problem_02.py | MattJDavidson/python-adventofcode | d8f52a8d09561ed3d7dfe0f379f19cb71268073f | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""--- Day 2: I Was Told There Would Be No Math ---
The elves are running low on wrapping paper, and so they need to submit an
order for more. They have a list of the dimensions (length l, width w, and
height h) of each present, and only want to order exactly as much as they need.
Fortunately, every present is a box (a perfect right rectangular prism), which
makes calculating the required wrapping paper for each gift a little easier:
1. Find the surface area of the box, which is 2*l*w + 2*w*h + 2*h*l.
1. The elves also need a little extra paper for each present: the area of
the smallest side.
For example:
A present with dimensions 2x3x4 requires 2*6 + 2*12 + 2*8 = 52 square feet of
wrapping paper plus 6 square feet of slack, for a total of 58 square feet. A
present with dimensions 1x1x10 requires 2*1 + 2*10 + 2*10 = 42 square feet of
wrapping paper plus 1 square foot of slack, for a total of 43 square feet. All
numbers in the elves' list are in feet. How many total square feet of wrapping
paper should they order?
--- Part Two ---
The elves are also running low on ribbon. Ribbon is all the same width, so they
only have to worry about the length they need to order, which they would again
like to be exact.
The ribbon required to wrap a present is the shortest distance around its
sides, or the smallest perimeter of any one face. Each present also requires a
bow made out of ribbon as well; the feet of ribbon required for the perfect bow
is equal to the cubic feet of volume of the present. Don't ask how they tie the
bow, though; they'll never tell.
For example:
A present with dimensions 2x3x4 requires 2+2+3+3 = 10 feet of ribbon to wrap
the present plus 2*3*4 = 24 feet of ribbon for the bow, for a total of 34 feet.
A present with dimensions 1x1x10 requires 1+1+1+1 = 4 feet of ribbon to wrap
the present plus 1*1*10 = 10 feet of ribbon for the bow, for a total of 14
feet. How many total feet of ribbon should they order?
"""
from functools import reduce
import operator
import sys
import click
def format_lines(text):
"""Returns list of list of ints"""
return [[int(i) for i in str.split(line, 'x')] for line in str.split(text)]
def calculate_area(length, width, height):
"""Calculates the area + slack based on dimensions"""
area_of_sides = [length*width,
width*height,
height*length]
return sum(2*area_of_sides, min(area_of_sides))
def total_area(text):
"""Returns total area of wrapping paper"""
return sum(calculate_area(*dimensions) for dimensions in format_lines(text))
def calculate_wribbon(dimensions):
"""Returns required wribbon"""
dimensions.sort()
return reduce(operator.mul, dimensions, 1) + sum(dimensions[:2])*2
def total_wribbon(text):
return sum(calculate_wribbon(dimensions) for dimensions in format_lines(text))
def calculate_solution_1(data):
return total_area(data)
def calculate_solution_2(data):
return total_wribbon(data)
@click.command()
@click.option('--source_file', default='data/02.txt',
help='source data file for problem')
def main(source_file):
"""Simple solution to adventofcode problem 2."""
data = ''
with open(source_file) as source:
data = source.read()
print('Total required wrapping paper for part 1 is {}'.format(total_area(data)))
print('Total required wribbon for part 2 is {}'.format(total_wribbon(data)))
if __name__ == "__main__":
sys.exit(main())
| 35.148515 | 84 | 0.716338 | 601 | 3,550 | 4.174709 | 0.311148 | 0.026305 | 0.028697 | 0.035074 | 0.20845 | 0.198485 | 0.16102 | 0.129135 | 0.103627 | 0.025508 | 0 | 0.029236 | 0.200282 | 3,550 | 100 | 85 | 35.5 | 0.854526 | 0.618028 | 0 | 0 | 0 | 0 | 0.110355 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.242424 | false | 0 | 0.121212 | 0.090909 | 0.575758 | 0.060606 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
459164f40f31987a93c7b77cb184b573ba599b70 | 1,510 | py | Python | DockerFeed/Stores/FolderStore.py | DIPSAS/DockerFeed | 64ad9e491112b70c2a50ad8c62cc3865d4908d2e | [
"MIT"
] | 3 | 2019-11-18T14:23:42.000Z | 2020-04-29T08:24:25.000Z | DockerFeed/Stores/FolderStore.py | DIPSAS/DockerFeed | 64ad9e491112b70c2a50ad8c62cc3865d4908d2e | [
"MIT"
] | null | null | null | DockerFeed/Stores/FolderStore.py | DIPSAS/DockerFeed | 64ad9e491112b70c2a50ad8c62cc3865d4908d2e | [
"MIT"
] | null | null | null | import os
import glob
import shutil
from DockerFeed.Stores.AbstractStore import AbstractStore
class FolderStore(AbstractStore):
def __init__(self, \
sourceFolder ='stacks'):
self.__sourceFolder = sourceFolder
os.makedirs(self.__sourceFolder, exist_ok=True)
def GetSource(self):
return self.__sourceFolder
def Pull(self, artifactName, outputFolder):
sourcePath = self.__GetArtifactSourcePath(artifactName)
destinationPath = os.path.join(outputFolder, artifactName)
if not(os.path.isfile(sourcePath)):
raise FileNotFoundError("Artifact {0} does not exist in source folder {1}".format(artifactName, self.__sourceFolder))
os.makedirs(outputFolder, exist_ok=True)
shutil.copyfile(sourcePath, destinationPath)
def Push(self, artifactFile):
destinationPath = os.path.join(self.__sourceFolder, os.path.basename(artifactFile))
shutil.copyfile(artifactFile, destinationPath)
def Exists(self, artifactName):
sourcePath = self.__GetArtifactSourcePath(artifactName)
return os.path.isfile(sourcePath)
def Remove(self, artifactName):
sourcePath = self.__GetArtifactSourcePath(artifactName)
os.remove(sourcePath)
def List(self, searchPattern = '*'):
return glob.glob(os.path.join(self.__sourceFolder, searchPattern))
def __GetArtifactSourcePath(self, artifactName):
return os.path.join(self.__sourceFolder, artifactName) | 31.458333 | 129 | 0.711921 | 148 | 1,510 | 7.074324 | 0.337838 | 0.122254 | 0.038204 | 0.13467 | 0.194842 | 0.120344 | 0 | 0 | 0 | 0 | 0 | 0.001649 | 0.196689 | 1,510 | 48 | 130 | 31.458333 | 0.8615 | 0 | 0 | 0.096774 | 0 | 0 | 0.0364 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.258065 | false | 0 | 0.129032 | 0.096774 | 0.548387 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
4596cb527b6a0d794e9825856661f34d54238d4e | 4,702 | py | Python | HRI/TFVT_HRI/perception/common/visualize.py | WorldEditors/PaddleRobotics | d02efd74662c6f78dfb964e8beb93f1914dcb2f3 | [
"Apache-2.0"
] | 146 | 2020-12-08T11:51:38.000Z | 2022-03-23T12:58:43.000Z | HRI/TFVT_HRI/perception/common/visualize.py | WorldEditors/PaddleRobotics | d02efd74662c6f78dfb964e8beb93f1914dcb2f3 | [
"Apache-2.0"
] | 10 | 2020-12-23T03:00:31.000Z | 2022-03-23T09:55:30.000Z | HRI/TFVT_HRI/perception/common/visualize.py | WorldEditors/PaddleRobotics | d02efd74662c6f78dfb964e8beb93f1914dcb2f3 | [
"Apache-2.0"
] | 43 | 2020-12-21T09:40:39.000Z | 2022-03-31T06:41:32.000Z | import cv2
import numpy as np
from PIL import Image
from PIL import ImageDraw
from subprocess import Popen, PIPE
import pycocotools.mask as coco_mask_util
def draw_bboxes(image, bboxes, labels=None, output_file=None, fill='red'):
"""
Draw bounding boxes on image.
Return image with drawings as BGR ndarray.
Args:
image (string | ndarray): input image path or image BGR ndarray.
bboxes (np.array): bounding boxes.
labels (list of string): the label names of bboxes.
output_file (string): output image path.
"""
if labels:
assert len(bboxes) == len(labels)
if isinstance(image, str):
image = Image.open(image)
elif isinstance(image, np.ndarray):
image = Image.fromarray(image[:, :, ::-1], mode='RGB')
else:
raise ValueError('`image` should be image path in string or '
'image ndarray.')
draw = ImageDraw.Draw(image)
for i in range(len(bboxes)):
xmin, ymin, xmax, ymax = bboxes[i]
left, right, top, bottom = xmin, xmax, ymin, ymax
lines = [(left, top), (left, bottom), (right, bottom),
(right, top), (left, top)]
draw.line(lines, width=4, fill=fill)
if labels and image.mode == 'RGB':
draw.text((left, top), labels[i], (255, 255, 0))
if output_file:
print('The image with bbox is saved as {}'.format(output_file))
image.save(output_file)
return np.array(image)[:, :, ::-1]
def save_as_gif(images, gif_file, fps=5):
"""
Save numpy images as gif file using ffmpeg.
Args:
images (list|ndarray): a list of uint8 images or uint8 ndarray
with shape [time, height, width, channels]. `channels` can
be 1 or 3.
gif_file (str): path to saved gif file.
fps (int): frames per second of the animation.
"""
h, w, c = images[0].shape
cmd = [
'ffmpeg', '-y',
'-f', 'rawvideo',
'-vcodec', 'rawvideo',
'-r', '%.02f' % fps,
'-s', '%dx%d' % (w, h),
'-pix_fmt', {1: 'gray', 3: 'rgb24'}[c],
'-i', '-',
'-filter_complex', '[0:v]split[x][z];[z]palettegen[y];[x][y]paletteuse',
'-r', '%.02f' % fps,
'-f', 'gif',
'-']
proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
for image in images:
proc.stdin.write(image.tostring())
out, err = proc.communicate()
if proc.returncode:
err = '\n'.join([' '.join(cmd), err.decode('utf8')])
raise IOError(err)
del proc
with open(gif_file, 'wb') as f:
f.write(out)
def colormap(rgb=False):
"""
Get colormap
"""
color_list = np.array([
0.000, 0.447, 0.741, 0.850, 0.325, 0.098, 0.929, 0.694, 0.125, 0.494,
0.184, 0.556, 0.466, 0.674, 0.188, 0.301, 0.745, 0.933, 0.635, 0.078,
0.184, 0.300, 0.300, 0.300, 0.600, 0.600, 0.600, 1.000, 0.000, 0.000,
1.000, 0.500, 0.000, 0.749, 0.749, 0.000, 0.000, 1.000, 0.000, 0.000,
0.000, 1.000, 0.667, 0.000, 1.000, 0.333, 0.333, 0.000, 0.333, 0.667,
0.000, 0.333, 1.000, 0.000, 0.667, 0.333, 0.000, 0.667, 0.667, 0.000,
0.667, 1.000, 0.000, 1.000, 0.333, 0.000, 1.000, 0.667, 0.000, 1.000,
1.000, 0.000, 0.000, 0.333, 0.500, 0.000, 0.667, 0.500, 0.000, 1.000,
0.500, 0.333, 0.000, 0.500, 0.333, 0.333, 0.500, 0.333, 0.667, 0.500,
0.333, 1.000, 0.500, 0.667, 0.000, 0.500, 0.667, 0.333, 0.500, 0.667,
0.667, 0.500, 0.667, 1.000, 0.500, 1.000, 0.000, 0.500, 1.000, 0.333,
0.500, 1.000, 0.667, 0.500, 1.000, 1.000, 0.500, 0.000, 0.333, 1.000,
0.000, 0.667, 1.000, 0.000, 1.000, 1.000, 0.333, 0.000, 1.000, 0.333,
0.333, 1.000, 0.333, 0.667, 1.000, 0.333, 1.000, 1.000, 0.667, 0.000,
1.000, 0.667, 0.333, 1.000, 0.667, 0.667, 1.000, 0.667, 1.000, 1.000,
1.000, 0.000, 1.000, 1.000, 0.333, 1.000, 1.000, 0.667, 1.000, 0.167,
0.000, 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000,
0.000, 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000,
0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000,
0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000, 0.000,
0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000, 0.833,
0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.143, 0.143, 0.143, 0.286,
0.286, 0.286, 0.429, 0.429, 0.429, 0.571, 0.571, 0.571, 0.714, 0.714,
0.714, 0.857, 0.857, 0.857, 1.000, 1.000, 1.000
]).astype(np.float32)
color_list = color_list.reshape((-1, 3)) * 255
if not rgb:
color_list = color_list[:, ::-1]
return color_list
| 39.512605 | 78 | 0.547639 | 859 | 4,702 | 2.973225 | 0.221187 | 0.139389 | 0.107674 | 0.090838 | 0.326938 | 0.244714 | 0.210258 | 0.199295 | 0.17502 | 0.17502 | 0 | 0.28239 | 0.259677 | 4,702 | 118 | 79 | 39.847458 | 0.451307 | 0.127605 | 0 | 0.02381 | 0 | 0.011905 | 0.063124 | 0.012475 | 0 | 0 | 0 | 0 | 0.011905 | 1 | 0.035714 | false | 0 | 0.071429 | 0 | 0.130952 | 0.011905 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4599fd645dac58591b4888f8c302e038c819c9ec | 468 | py | Python | tests/server/execute_test.py | jwminton/voila | b003a7fc62023e5b4c8dab7dd64b94a920610c15 | [
"BSD-3-Clause"
] | 2,977 | 2019-09-27T04:51:38.000Z | 2022-03-31T12:02:41.000Z | tests/server/execute_test.py | sthagen/voila-dashboards-voila | 7613fbb95f39a93f874ea57a8ab4a31140ace394 | [
"BSD-3-Clause"
] | 735 | 2019-09-27T08:02:34.000Z | 2022-03-31T19:58:01.000Z | tests/server/execute_test.py | sthagen/voila-dashboards-voila | 7613fbb95f39a93f874ea57a8ab4a31140ace394 | [
"BSD-3-Clause"
] | 335 | 2019-10-06T05:23:29.000Z | 2022-03-23T21:35:00.000Z | # test basics of Voilà running a notebook
async def test_hello_world(http_server_client, print_notebook_url):
response = await http_server_client.fetch(print_notebook_url)
assert response.code == 200
html_text = response.body.decode('utf-8')
assert 'Hi Voilà' in html_text
assert 'print(' not in html_text, 'by default the source code should be stripped'
assert 'test_template.css' not in html_text, "test_template should not be the default"
| 42.545455 | 90 | 0.760684 | 73 | 468 | 4.657534 | 0.547945 | 0.094118 | 0.088235 | 0.076471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010256 | 0.166667 | 468 | 10 | 91 | 46.8 | 0.861538 | 0.083333 | 0 | 0 | 0 | 0 | 0.28103 | 0 | 0 | 0 | 0 | 0 | 0.571429 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.428571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
459f654e31e45c6e4ce99f9d39d667c41d0dd137 | 234 | py | Python | Chapter01/2_add.py | blep/Deep-Learning-for-Computer-Vision | c5f07d1af15e804cf238e7f9dbdb3c139d5696b2 | [
"MIT"
] | null | null | null | Chapter01/2_add.py | blep/Deep-Learning-for-Computer-Vision | c5f07d1af15e804cf238e7f9dbdb3c139d5696b2 | [
"MIT"
] | null | null | null | Chapter01/2_add.py | blep/Deep-Learning-for-Computer-Vision | c5f07d1af15e804cf238e7f9dbdb3c139d5696b2 | [
"MIT"
] | null | null | null | import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
x = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32)
z = x + y
session = tf.Session()
values = {x: 5.0, y: 4.0}
result = session.run([z], values)
print(result)
| 14.625 | 33 | 0.683761 | 40 | 234 | 3.95 | 0.55 | 0.164557 | 0.189873 | 0.278481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.050505 | 0.153846 | 234 | 15 | 34 | 15.6 | 0.747475 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0.111111 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
45b07a4470e8dc5546324d1f703f9a61ca2a74cd | 3,977 | py | Python | tests/test_paragraphs.py | atvaccaro/homer | c19b08bca6a783041b1e9f2ee8ab7d392ab4626b | [
"MIT"
] | 660 | 2019-08-11T08:16:29.000Z | 2022-03-08T08:03:01.000Z | tests/test_paragraphs.py | atvaccaro/homer | c19b08bca6a783041b1e9f2ee8ab7d392ab4626b | [
"MIT"
] | 8 | 2019-08-15T20:40:54.000Z | 2021-09-29T17:41:45.000Z | tests/test_paragraphs.py | atvaccaro/homer | c19b08bca6a783041b1e9f2ee8ab7d392ab4626b | [
"MIT"
] | 41 | 2019-08-15T18:33:00.000Z | 2022-03-24T19:28:39.000Z | import unittest
from homer import analyzer
class TestParagraphs(unittest.TestCase):
def test_sentence_length(self):
sentences = [
{
'para': """Big or small, Panda is adorable. Call me something. But let me share a very interesting story with you, which I am sure you have never heard of.""",
'longest_sentence': """But let me share a very interesting story with you, which I am sure you have never heard of."""
},
{
'para': 'I love winter. Summer is good too. Autumn is the best and my whole family likes it.',
'longest_sentence': 'Autumn is the best and my whole family likes it.'
},
{
'para': 'Do you like great books? I love books. They really get me going since they help me enhance my knowledge.',
'longest_sentence': 'They really get me going since they help me enhance my knowledge.'
}
]
for data in sentences:
paragraph_obj = analyzer.Paragraph(data['para'])
self.assertEqual(data['longest_sentence'], str(paragraph_obj.longest_sentence))
def test_word_count(self):
sentences = [
{
'para': '''Life is short, as everyone knows. When I was a kid I used to wonder about this. Is life actually short, or are we really complaining about its finiteness? Would we be just as likely to feel life was short if we lived 10 times as long?''',
'words': 47,
'sentences': 4
},
{
'para': '''Having kids showed me how to convert a continuous quantity, time, into discrete quantities. You only get 52 weekends with your 2 year old. If Christmas-as-magic lasts from say ages 3 to 10, you only get to watch your child experience it 8 times. And while it's impossible to say what is a lot or a little of a continuous quantity like time, 8 is not a lot of something. If you had a handful of 8 peanuts, or a shelf of 8 books to choose from, the quantity would definitely seem limited, no matter what your lifespan was.''',
'words': 98,
'sentences': 5
},
{
'para': '''Ok, so life actually is short. Does it make any difference to know that?''',
'words': 14,
'sentences': 2
},
{
'para': '''Call me Ishmael. Some years ago—never mind how long precisely— having little or no money in my purse, and nothing particular to interest me on shore, I thought I would sail about a little and see the watery part of the world. It is a way I have of driving off the spleen and regulating the circulation. Whenever I find myself growing grim about the mouth; whenever it is a damp, drizzly November in my soul; whenever I find myself involuntarily pausing before coffin warehouses, and bringing up the rear of every funeral I meet; and especially whenever my hypos get such an upper hand of me, that it requires a strong moral principle to prevent me from deliberately stepping into the street, and methodically knocking people's hats off—then, I account it high time to get to sea as soon as I can. This is my substitute for pistol and ball. With a philosophical flourish Cato throws himself upon his sword; I quietly take to the ship. There is nothing surprising in this. If they but knew it, almost all men in their degree, some time or other, cherish very nearly the same feelings towards the ocean with me.''',
'words': 201,
'sentences': 8
}
]
for data in sentences:
para = analyzer.Paragraph(data['para'])
# for p in para.sentences:
# print(p.words)
self.assertEqual(data['words'], para.total_words)
self.assertEqual(data['sentences'], len(para))
if __name__ == "__main__":
unittest.main()
| 67.40678 | 1,139 | 0.638169 | 580 | 3,977 | 4.346552 | 0.47069 | 0.02975 | 0.02261 | 0.010313 | 0.129314 | 0.129314 | 0.129314 | 0.129314 | 0.129314 | 0.129314 | 0 | 0.008897 | 0.293437 | 3,977 | 58 | 1,140 | 68.568966 | 0.887189 | 0.010812 | 0 | 0.08 | 0 | 0.12 | 0.67362 | 0 | 0 | 0 | 0 | 0 | 0.06 | 1 | 0.04 | false | 0 | 0.04 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
45b54bbd9600917663b68af3a511dd2a5dd1390f | 11,380 | py | Python | buildscripts/linter/git.py | MartinNeupauer/mongo | 6cc2dfe7edd312b8596355edef454e15988e350e | [
"Apache-2.0"
] | 1 | 2019-05-15T03:41:50.000Z | 2019-05-15T03:41:50.000Z | buildscripts/linter/git.py | MartinNeupauer/mongo | 6cc2dfe7edd312b8596355edef454e15988e350e | [
"Apache-2.0"
] | 2 | 2021-03-26T00:01:11.000Z | 2021-03-26T00:02:19.000Z | buildscripts/linter/git.py | MartinNeupauer/mongo | 6cc2dfe7edd312b8596355edef454e15988e350e | [
"Apache-2.0"
] | null | null | null | """Git Utility functions."""
from __future__ import absolute_import
from __future__ import print_function
import itertools
import os
import re
import subprocess
from typing import Any, Callable, List, Tuple
from buildscripts import moduleconfig
from buildscripts.resmokelib.utils import globstar
# Path to the modules in the mongodb source tree
# Has to match the string in SConstruct
MODULE_DIR = "src/mongo/db/modules"
def get_base_dir():
# type: () -> str
"""
Get the base directory for mongo repo.
This script assumes that it is running in buildscripts/, and uses
that to find the base directory.
"""
try:
return subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).rstrip()
except subprocess.CalledProcessError:
# We are not in a valid git directory. Use the script path instead.
return os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
def get_repos():
# type: () -> List[Repo]
"""Get a list of Repos to check linters for."""
base_dir = get_base_dir()
# Get a list of modules
# TODO: how do we filter rocks, does it matter?
mongo_modules = moduleconfig.discover_module_directories(
os.path.join(base_dir, MODULE_DIR), None)
paths = [os.path.join(base_dir, MODULE_DIR, m) for m in mongo_modules]
paths.append(base_dir)
return [Repo(p) for p in paths]
class Repo(object):
"""Class encapsulates all knowledge about a git repository, and its metadata to run linters."""
def __init__(self, path):
# type: (str) -> None
"""Construct a repo object."""
self.path = path
def _callgito(self, args):
# type: (List[str]) -> str
"""Call git for this repository, and return the captured output."""
# These two flags are the equivalent of -C in newer versions of Git
# but we use these to support versions pre 1.8.5 but it depends on the command
# and what the current directory is
if "ls-files" in args:
# This command depends on the current directory and works better if not run with
# work-tree
return subprocess.check_output(['git', '--git-dir', os.path.join(self.path, ".git")] +
args)
else:
return subprocess.check_output([
'git', '--git-dir', os.path.join(self.path, ".git"), '--work-tree', self.path
] + args)
def _callgit(self, args):
# type: (List[str]) -> int
"""
Call git for this repository without capturing output.
This is designed to be used when git returns non-zero exit codes.
"""
# These two flags are the equivalent of -C in newer versions of Git
# but we use these to support versions pre 1.8.5 but it depends on the command
# and what the current directory is
return subprocess.call([
'git',
'--git-dir',
os.path.join(self.path, ".git"),
] + args)
def _get_local_dir(self, path):
# type: (str) -> str
"""Get a directory path relative to the git root directory."""
if os.path.isabs(path):
path = os.path.relpath(path, self.path)
# Normalize Windows style paths to Unix style which git uses on all platforms
path = path.replace("\\", "/")
return path
def get_candidates(self, candidates, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""
Get the set of candidate files to check by querying the repository.
Returns the full path to the file for clang-format to consume.
"""
if candidates is not None and len(candidates) > 0:
candidates = [self._get_local_dir(f) for f in candidates]
valid_files = list(
set(candidates).intersection(self.get_candidate_files(filter_function)))
else:
valid_files = list(self.get_candidate_files(filter_function))
# Get the full file name here
valid_files = [os.path.normpath(os.path.join(self.path, f)) for f in valid_files]
return valid_files
def _git_ls_files(self, cmd, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""Run git-ls-files and filter the list of files to a valid candidate list."""
gito = self._callgito(cmd)
# This allows us to pick all the interesting files
# in the mongo and mongo-enterprise repos
file_list = [line.rstrip() for line in gito.splitlines() if filter_function(line.rstrip())]
return file_list
def get_candidate_files(self, filter_function):
# type: (Callable[[str], bool]) -> List[str]
"""Query git to get a list of all files in the repo to consider for analysis."""
return self._git_ls_files(["ls-files", "--cached"], filter_function)
def get_working_tree_candidate_files(self, filter_function):
# type: (Callable[[str], bool]) -> List[str]
# pylint: disable=invalid-name
"""Query git to get a list of all files in the working tree to consider for analysis."""
return self._git_ls_files(["ls-files", "--cached", "--others"], filter_function)
def get_working_tree_candidates(self, filter_function):
# type: (Callable[[str], bool]) -> List[str]
"""
Get the set of candidate files to check by querying the repository.
Returns the full path to the file for clang-format to consume.
"""
valid_files = list(self.get_working_tree_candidate_files(filter_function))
# Get the full file name here
valid_files = [os.path.normpath(os.path.join(self.path, f)) for f in valid_files]
# Filter out files that git thinks exist but were removed.
valid_files = [f for f in valid_files if os.path.exists(f)]
return valid_files
def is_detached(self):
# type: () -> bool
"""Return true if the current working tree in a detached HEAD state."""
# symbolic-ref returns 1 if the repo is in a detached HEAD state
return self._callgit(["symbolic-ref", "--quiet", "HEAD"]) == 1
def is_ancestor(self, parent, child):
# type: (str, str) -> bool
"""Return true if the specified parent hash an ancestor of child hash."""
# merge base returns 0 if parent is an ancestor of child
return not self._callgit(["merge-base", "--is-ancestor", parent, child])
def is_commit(self, sha1):
# type: (str) -> bool
"""Return true if the specified hash is a valid git commit."""
# cat-file -e returns 0 if it is a valid hash
return not self._callgit(["cat-file", "-e", "%s^{commit}" % sha1])
def is_working_tree_dirty(self):
# type: () -> bool
"""Return true the current working tree have changes."""
# diff returns 1 if the working tree has local changes
return self._callgit(["diff", "--quiet"]) == 1
def does_branch_exist(self, branch):
# type: (str) -> bool
"""Return true if the branch exists."""
# rev-parse returns 0 if the branch exists
return not self._callgit(["rev-parse", "--verify", branch])
def get_merge_base(self, commit):
# type: (str) -> str
"""Get the merge base between 'commit' and HEAD."""
return self._callgito(["merge-base", "HEAD", commit]).rstrip()
def get_branch_name(self):
# type: () -> str
"""
Get the current branch name, short form.
This returns "master", not "refs/head/master".
Will not work if the current branch is detached.
"""
branch = self.rev_parse(["--abbrev-ref", "HEAD"])
if branch == "HEAD":
raise ValueError("Branch is currently detached")
return branch
def add(self, command):
# type: (List[str]) -> str
"""Git add wrapper."""
return self._callgito(["add"] + command)
def checkout(self, command):
# type: (List[str]) -> str
"""Git checkout wrapper."""
return self._callgito(["checkout"] + command)
def commit(self, command):
# type: (List[str]) -> str
"""Git commit wrapper."""
return self._callgito(["commit"] + command)
def diff(self, command):
# type: (List[str]) -> str
"""Git diff wrapper."""
return self._callgito(["diff"] + command)
def log(self, command):
# type: (List[str]) -> str
"""Git log wrapper."""
return self._callgito(["log"] + command)
def rev_parse(self, command):
# type: (List[str]) -> str
"""Git rev-parse wrapper."""
return self._callgito(["rev-parse"] + command).rstrip()
def rm(self, command):
# type: (List[str]) -> str
# pylint: disable=invalid-name
"""Git rm wrapper."""
return self._callgito(["rm"] + command)
def show(self, command):
# type: (List[str]) -> str
"""Git show wrapper."""
return self._callgito(["show"] + command)
def expand_file_string(glob_pattern):
# type: (str) -> List[str]
"""Expand a string that represents a set of files."""
return [os.path.abspath(f) for f in globstar.iglob(glob_pattern)]
def get_files_to_check_working_tree(filter_function):
# type: (Callable[[str], bool]) -> List[str]
"""
Get a list of files to check from the working tree.
This will pick up files not managed by git.
"""
repos = get_repos()
valid_files = list(
itertools.chain.from_iterable(
[r.get_working_tree_candidates(filter_function) for r in repos]))
return valid_files
def get_files_to_check(files, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""Get a list of files that need to be checked based on which files are managed by git."""
# Get a list of candidate_files
candidates_nested = [expand_file_string(f) for f in files]
candidates = list(itertools.chain.from_iterable(candidates_nested))
if len(files) > 0 and len(candidates) == 0:
raise ValueError("Globs '%s' did not find any files with glob." % (files))
repos = get_repos()
valid_files = list(
itertools.chain.from_iterable(
[r.get_candidates(candidates, filter_function) for r in repos]))
if len(files) > 0 and len(valid_files) == 0:
raise ValueError("Globs '%s' did not find any files with glob in git." % (files))
return valid_files
def get_files_to_check_from_patch(patches, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""Take a patch file generated by git diff, and scan the patch for a list of files to check."""
candidates = [] # type: List[str]
# Get a list of candidate_files
check = re.compile(r"^diff --git a\/([\w\/\.\-]+) b\/[\w\/\.\-]+")
lines = [] # type: List[str]
for patch in patches:
with open(patch, "rb") as infile:
lines += infile.readlines()
candidates = [check.match(line).group(1) for line in lines if check.match(line)]
repos = get_repos()
valid_files = list(
itertools.chain.from_iterable(
[r.get_candidates(candidates, filter_function) for r in repos]))
return valid_files
| 35.899054 | 99 | 0.618366 | 1,542 | 11,380 | 4.451362 | 0.180285 | 0.025495 | 0.025641 | 0.018357 | 0.427156 | 0.368444 | 0.33042 | 0.280886 | 0.27331 | 0.267483 | 0 | 0.002499 | 0.261424 | 11,380 | 316 | 100 | 36.012658 | 0.814158 | 0.369684 | 0 | 0.174242 | 0 | 0 | 0.072404 | 0 | 0 | 0 | 0 | 0.003165 | 0 | 1 | 0.227273 | false | 0 | 0.068182 | 0 | 0.537879 | 0.007576 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
45c4e52b6d11492b4ca4b3af39e648ac643280a0 | 382 | py | Python | Northq/Exercise1/Excercise1.py | Amr116/challenges | 49cd92af5bc9f01c73e4615b37facced60c7bc48 | [
"MIT"
] | null | null | null | Northq/Exercise1/Excercise1.py | Amr116/challenges | 49cd92af5bc9f01c73e4615b37facced60c7bc48 | [
"MIT"
] | null | null | null | Northq/Exercise1/Excercise1.py | Amr116/challenges | 49cd92af5bc9f01c73e4615b37facced60c7bc48 | [
"MIT"
] | null | null | null | def RemovesNthDupicate(array, n):
hashTable = {}
i = 0
while i < len(array):
if array[i] in hashTable:
hashTable[array[i]] += 1
if hashTable[array[i]] == n:
hashTable[array[i]] = 1
array.pop(i)
else:
hashTable[array[i]] = 1
i += 1
return array
| 23.875 | 55 | 0.426702 | 41 | 382 | 3.97561 | 0.365854 | 0.184049 | 0.368098 | 0.294479 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024155 | 0.458115 | 382 | 15 | 56 | 25.466667 | 0.763285 | 0 | 0 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
afd6e14df45a089eecfeaf82446faf4e34332e75 | 1,806 | py | Python | adminator/manager.py | techlib/adminator | f7f6695778dc5fb741f118fe2e0358bdda632c7b | [
"MIT"
] | 1 | 2019-04-27T22:51:56.000Z | 2019-04-27T22:51:56.000Z | adminator/manager.py | techlib/adminator | f7f6695778dc5fb741f118fe2e0358bdda632c7b | [
"MIT"
] | 2 | 2016-11-25T10:02:15.000Z | 2017-05-31T08:29:03.000Z | adminator/manager.py | techlib/adminator | f7f6695778dc5fb741f118fe2e0358bdda632c7b | [
"MIT"
] | 1 | 2021-12-01T05:26:43.000Z | 2021-12-01T05:26:43.000Z | #!/usr/bin/python3 -tt
# -*- coding: utf-8 -*-
__all__ = ['Manager']
from twisted.internet.threads import deferToThread
from twisted.internet import task, reactor
from twisted.python import log
from adminator.device import Device
from adminator.dhcp_value import DhcpOptionValue
from adminator.dhcp_option import DhcpOption
from adminator.user import User
from adminator.network import Network
from adminator.network_pool import NetworkPool
from adminator.network_acl import NetworkAcl
from adminator.domain import Domain
from adminator.record import Record
from adminator.lease4 import Lease4
from adminator.lease6 import Lease6
from adminator.interface import Interface
from adminator.port import Port
from adminator.connection import Connection
from adminator.switch import Switch
from adminator.mac_history import MacHistory
from adminator.switch_interface import SwitchInterface
from adminator.config_pattern import ConfigPatttern
class Manager(object):
def __init__(self, db):
self.db = db
# Something like models
self.user = User(self)
self.device = Device(self)
self.interface = Interface(self)
self.dhcp_option = DhcpOption(self)
self.dhcp_option_value = DhcpOptionValue(self)
self.network = Network(self)
self.network_pool = NetworkPool(self)
self.network_acl = NetworkAcl(self)
self.record = Record(self)
self.domain = Domain(self)
self.lease4 = Lease4(self)
self.lease6 = Lease6(self)
self.port = Port(self)
self.connection = Connection(self)
self.switch = Switch(self)
self.mac_history = MacHistory(self)
self.switch_interface = SwitchInterface(self)
self.config_pattern = ConfigPatttern(self)
# vim:set sw=4 ts=4 et:
| 30.610169 | 54 | 0.739203 | 223 | 1,806 | 5.883408 | 0.278027 | 0.178354 | 0.045732 | 0.027439 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008191 | 0.188815 | 1,806 | 58 | 55 | 31.137931 | 0.887372 | 0.048173 | 0 | 0 | 0 | 0 | 0.004082 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023256 | false | 0 | 0.488372 | 0 | 0.534884 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
afdffc8efe1f06e1eb63a23331a16b36f464f053 | 12,605 | py | Python | test/test_web_bag.py | funkyeah/tiddlyweb | 2346e6c05aa03ae9c8f2687d9ef9e46103267a8e | [
"BSD-3-Clause"
] | null | null | null | test/test_web_bag.py | funkyeah/tiddlyweb | 2346e6c05aa03ae9c8f2687d9ef9e46103267a8e | [
"BSD-3-Clause"
] | null | null | null | test/test_web_bag.py | funkyeah/tiddlyweb | 2346e6c05aa03ae9c8f2687d9ef9e46103267a8e | [
"BSD-3-Clause"
] | null | null | null | """
Test that GETting a bag can list the tiddlers.
"""
import httplib2
import urllib
import simplejson
from fixtures import muchdata, reset_textstore, _teststore, initialize_app
from tiddlyweb.model.bag import Bag
from tiddlyweb.stores import StorageInterface
policy_dict = dict(
read=[u'chris',u'jeremy',u'GUEST'],
write=[u'chris',u'jeremy'],
create=[u'chris',u'jeremy'],
delete=[u'chris'],
manage=[],
owner=u'chris')
def setup_module(module):
initialize_app()
reset_textstore()
module.store = _teststore()
muchdata(module.store)
def test_get_bag_tiddler_list_default():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html;charset=UTF-8 is %s' % response['content-type']
assert content.count('<li>') == 10
def test_get_bag_tiddler_list_404():
"""
A request for the tiddlers in a non existent bag gives a 404.
"""
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag99/tiddlers',
method='GET')
assert response['status'] == '404'
assert '(' not in content
def test_get_bag_tiddler_list_text():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers.txt',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/plain; charset=UTF-8', 'response content-type should be text/plain; charset=UTF-8 is %s' % response['content-type']
assert len(content.rstrip().split('\n')) == 10, 'len tiddlers should be 10 is %s' % len(content.split('\n'))
def test_get_bag_tiddler_list_html():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers.html',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html;charset=UTF-8 is %s' % response['content-type']
assert content.count('<li>') == 10
def test_get_bag_tiddler_list_415():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers.gif',
method='GET')
assert response['status'] == '415', 'response status should be 415 is %s' % response['status']
def test_get_bag_tiddler_list_html_default():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers',
method='GET', headers={'Accept': 'text/html'})
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html;charset=UTF-8 is %s' % response['content-type']
assert content.count('<li>') == 10
def test_get_bag_tiddler_list_filtered():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers.txt?select=title:tiddler8',
method='GET')
assert response['status'] == '200'
assert response['last-modified'] == 'Fri, 23 May 2008 03:03:00 GMT'
assert len(content.rstrip().split('\n')) == 1, 'len tiddlers should be 1 is %s' % len(content.rstrip().split('\n'))
def test_get_bag_tiddler_list_bogus_filter():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers.txt?sort=-monkey',
method='GET')
assert response['status'] == '400'
assert 'malformed filter' in content
def test_get_bags_default():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html;charset=UTF-8 is %s' % response['content-type']
assert content.count('<li>') == 30
assert content.count('bags/') == 30
def test_get_bags_txt():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.txt',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/plain; charset=UTF-8', 'response content-type should be text/plain; charset=UTF-8 is %s' % response['content-type']
assert len(content.rstrip().split('\n')) == 30, 'len bags should be 32 is %s' % len(content.rstrip().split('\n'))
def test_get_bags_html():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.html',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html;charset=UTF-8 is %s' % response['content-type']
assert content.count('<li>') == 30
assert content.count('bags/') == 30
def test_get_bags_unsupported_neg_format():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.gif',
method='GET')
assert response['status'] == '415', 'response status should be 415 is %s' % response['status']
def test_get_bags_unsupported_format():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.jpeg',
method='GET')
assert response['status'] == '415', 'response status should be 415 is %s' % response['status']
def test_get_bags_json():
"""
Uses extension.
"""
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.json',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'application/json; charset=UTF-8', \
'response content-type should be application/json; charset=UTF-8 is %s' % response['content-type']
info = simplejson.loads(content)
assert type(info) == list
assert len(info) == 30
def test_get_bags_wiki():
"""
Doesn't support wiki.
"""
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.wiki',
method='GET')
assert response['status'] == '415'
def test_get_bags_unsupported_neg_format_with_accept():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags.gif',
method='GET', headers={'Accept': 'text/html'})
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html;charset=UTF-8 is %s' % response['content-type']
def test_get_bag_tiddler_list_empty():
"""
A request for the tiddlers in an empty bag gives a 200, empty page.
"""
bag = Bag('bagempty');
store.put(bag)
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bagempty/tiddlers.json',
method='GET')
assert response['status'] == '200'
results = simplejson.loads(content)
assert len(results) == 0
response, content = http.request('http://our_test_domain:8001/bags/bagempty/tiddlers.html',
method='GET')
def test_put_bag():
"""
PUT a new bag to the server.
"""
json_string = simplejson.dumps(dict(policy=policy_dict))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bagpuss',
method='PUT', headers={'Content-Type': 'application/json'}, body=json_string)
location = response['location']
assert response['status'] == '204'
assert location == 'http://our_test_domain:8001/bags/bagpuss'
response, content = http.request(location, method='GET',
headers={'Accept': 'application/json'})
assert response['status'] == '200'
assert 'etag' in response
etag = response['etag']
info = simplejson.loads(content)
assert info['policy']['delete'] == policy_dict['delete']
response, content = http.request('http://our_test_domain:8001/bags/bagpuss.json',
method='GET', headers={'if-none-match': etag})
assert response['status'] == '304', content
response, content = http.request('http://our_test_domain:8001/bags/bagpuss.json',
method='GET', headers={'if-none-match': etag + 'foo'})
assert response['status'] == '200', content
def test_put_bag_bad_json():
"""
PUT a new bag to the server.
"""
json_string = simplejson.dumps(dict(policy=policy_dict))
json_string = json_string[0:-1]
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bagpuss',
method='PUT', headers={'Content-Type': 'application/json'}, body=json_string)
assert response['status'] == '400'
assert 'unable to put bag' in content
assert 'unable to make json into' in content
def test_delete_bag():
"""
PUT a new bag to the server and then DELETE it.
"""
json_string = simplejson.dumps(dict(policy={}))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/deleteme',
method='PUT', headers={'Content-Type': 'application/json'}, body=json_string)
location = response['location']
assert response['status'] == '204'
assert location == 'http://our_test_domain:8001/bags/deleteme'
response, content = http.request(location, method='DELETE')
assert response['status'] == '204'
response, content = http.request(location, method='GET', headers={'Accept':'application/json'})
assert response['status'] == '404'
def test_put_bag_wrong_type():
"""
PUT a new bag to the server.
"""
json_string = simplejson.dumps(dict(policy=policy_dict))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bagpuss',
method='PUT', headers={'Content-Type': 'text/plain'}, body=json_string)
assert response['status'] == '415'
def test_get_bag_tiddlers_constraints():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers',
method='GET')
assert response['status'] == '200'
_put_policy('bag0', dict(policy=dict(read=['NONE'])))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers',
method='GET')
assert response['status'] == '403'
assert 'may not read' in content
def test_roundtrip_unicode_bag():
http = httplib2.Http()
encoded_bag_name = '%E3%81%86%E3%81%8F%E3%81%99'
bag_name = urllib.unquote(encoded_bag_name).decode('utf-8')
bag_content = {'policy':{'read':['a','b','c','GUEST']}}
body = simplejson.dumps(bag_content)
response, content = http.request('http://our_test_domain:8001/bags/%s' % encoded_bag_name,
method='PUT', body=body, headers={'Content-Type': 'application/json'})
assert response['status'] == '204'
bag = Bag(bag_name)
bag = store.get(bag)
assert bag.name == bag_name
response, content = http.request('http://our_test_domain:8001/bags/%s.json' % encoded_bag_name,
method='GET')
bag_data = simplejson.loads(content)
assert response['status'] == '200'
assert bag_data['policy']['read'] == ['a','b','c','GUEST']
def test_no_delete_store():
"""
XXX: Not sure how to test this. We want to test for
StoreMethodNotImplemented raising HTTP400. But
it is hard to inject in a false store.
"""
pass
def _put_policy(bag_name, policy_dict):
"""
XXX: This is duplicated from test_web_tiddler. Clean up!
"""
json = simplejson.dumps(policy_dict)
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/%s' % bag_name,
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
| 38.784615 | 160 | 0.659897 | 1,671 | 12,605 | 4.852783 | 0.114303 | 0.109138 | 0.074978 | 0.102602 | 0.781971 | 0.742632 | 0.69688 | 0.674066 | 0.656554 | 0.650388 | 0 | 0.036765 | 0.180008 | 12,605 | 324 | 161 | 38.904321 | 0.747775 | 0.043237 | 0 | 0.5 | 0 | 0 | 0.328737 | 0.013873 | 0 | 0 | 0 | 0 | 0.298165 | 1 | 0.119266 | false | 0.004587 | 0.027523 | 0 | 0.146789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
afea4b536e618cb520246e81772f2873c30233cf | 1,605 | py | Python | examples/tensorflow/inference/tf-serving/inception/inference.py | FinchZHU/uai-sdk | 78e06bebba2d18233ce6dcb5be619e940f7a7ef3 | [
"Apache-2.0"
] | 38 | 2017-04-26T04:00:09.000Z | 2022-02-10T02:51:05.000Z | examples/tensorflow/inference/tf-serving/inception/inference.py | FinchZHU/uai-sdk | 78e06bebba2d18233ce6dcb5be619e940f7a7ef3 | [
"Apache-2.0"
] | 17 | 2017-11-20T20:47:09.000Z | 2022-02-09T23:48:46.000Z | examples/tensorflow/inference/tf-serving/inception/inference.py | FinchZHU/uai-sdk | 78e06bebba2d18233ce6dcb5be619e940f7a7ef3 | [
"Apache-2.0"
] | 28 | 2017-07-08T05:23:13.000Z | 2020-08-18T03:12:27.000Z | # Copyright 2017 The UAI-SDK Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
""" A very simple Inception-v3 inferencer.
The model that loaded was saved by SavedModelBuilder.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from PIL import Image
import json
from uai.arch.tf_serving import TFServingAiUcloudModel
class InceptionModel(TFServingAiUcloudModel):
""" Inception-v3 example model
"""
def __init__(self, conf):
super(InceptionModel, self).__init__(conf)
def load_model(self):
super(InceptionModel, self).load_model()
def execute(self, data, batch_size):
output_tensor = super(InceptionModel, self).execute(data, batch_size)
ret = []
for i in range(batch_size):
classes_arr = output_tensor[0]
ret.append(classes_arr[i][0])
return ret | 34.891304 | 81 | 0.67352 | 204 | 1,605 | 5.142157 | 0.573529 | 0.057197 | 0.045758 | 0.030505 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009456 | 0.209346 | 1,605 | 46 | 82 | 34.891304 | 0.817179 | 0.488474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15 | false | 0 | 0.4 | 0 | 0.65 | 0.05 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
afedcadb49ea41d5c13a82e8c5e90b2add6d0187 | 4,132 | py | Python | mundiapi/models/get_period_response.py | hugocpolos/MundiAPI-PYTHON | 164545cc58bf18c946d5456e9ba4d55a378a339a | [
"MIT"
] | 10 | 2017-08-30T15:53:00.000Z | 2021-02-11T18:06:56.000Z | mundiapi/models/get_period_response.py | hugocpolos/MundiAPI-PYTHON | 164545cc58bf18c946d5456e9ba4d55a378a339a | [
"MIT"
] | 4 | 2018-05-05T15:15:09.000Z | 2021-12-22T00:52:41.000Z | mundiapi/models/get_period_response.py | hugocpolos/MundiAPI-PYTHON | 164545cc58bf18c946d5456e9ba4d55a378a339a | [
"MIT"
] | 7 | 2017-04-27T13:46:52.000Z | 2021-04-14T13:44:23.000Z | # -*- coding: utf-8 -*-
"""
mundiapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
from mundiapi.api_helper import APIHelper
import mundiapi.models.get_subscription_response
class GetPeriodResponse(object):
"""Implementation of the 'GetPeriodResponse' model.
Response object for getting a period
Attributes:
start_at (datetime): TODO: type description here.
end_at (datetime): TODO: type description here.
id (string): TODO: type description here.
billing_at (datetime): TODO: type description here.
subscription (GetSubscriptionResponse): TODO: type description here.
status (string): TODO: type description here.
duration (int): TODO: type description here.
created_at (string): TODO: type description here.
updated_at (string): TODO: type description here.
cycle (int): TODO: type description here.
"""
# Create a mapping from Model property names to API property names
_names = {
"start_at":'start_at',
"end_at":'end_at',
"id":'id',
"billing_at":'billing_at',
"subscription":'subscription',
"status":'status',
"duration":'duration',
"created_at":'created_at',
"updated_at":'updated_at',
"cycle":'cycle'
}
def __init__(self,
start_at=None,
end_at=None,
id=None,
billing_at=None,
subscription=None,
status=None,
duration=None,
created_at=None,
updated_at=None,
cycle=None):
"""Constructor for the GetPeriodResponse class"""
# Initialize members of the class
self.start_at = APIHelper.RFC3339DateTime(start_at) if start_at else None
self.end_at = APIHelper.RFC3339DateTime(end_at) if end_at else None
self.id = id
self.billing_at = APIHelper.RFC3339DateTime(billing_at) if billing_at else None
self.subscription = subscription
self.status = status
self.duration = duration
self.created_at = created_at
self.updated_at = updated_at
self.cycle = cycle
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
start_at = APIHelper.RFC3339DateTime.from_value(dictionary.get("start_at")).datetime if dictionary.get("start_at") else None
end_at = APIHelper.RFC3339DateTime.from_value(dictionary.get("end_at")).datetime if dictionary.get("end_at") else None
id = dictionary.get('id')
billing_at = APIHelper.RFC3339DateTime.from_value(dictionary.get("billing_at")).datetime if dictionary.get("billing_at") else None
subscription = mundiapi.models.get_subscription_response.GetSubscriptionResponse.from_dictionary(dictionary.get('subscription')) if dictionary.get('subscription') else None
status = dictionary.get('status')
duration = dictionary.get('duration')
created_at = dictionary.get('created_at')
updated_at = dictionary.get('updated_at')
cycle = dictionary.get('cycle')
# Return an object of this model
return cls(start_at,
end_at,
id,
billing_at,
subscription,
status,
duration,
created_at,
updated_at,
cycle)
| 36.245614 | 181 | 0.595837 | 435 | 4,132 | 5.508046 | 0.213793 | 0.07596 | 0.079299 | 0.095993 | 0.235392 | 0.127295 | 0.0601 | 0 | 0 | 0 | 0 | 0.009612 | 0.320184 | 4,132 | 113 | 182 | 36.566372 | 0.843361 | 0.308567 | 0 | 0 | 1 | 0 | 0.10289 | 0 | 0 | 0 | 0 | 0.088496 | 0 | 1 | 0.032787 | false | 0 | 0.032787 | 0 | 0.131148 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
aff58bae84d31bed4ab97ea8f2d9fc9f4fd59352 | 2,250 | py | Python | thbase/thrift2/cell.py | YutSean/pythbase | add955b74c442a1eb2d11438b9a89fd3e0afe0b2 | [
"Apache-2.0"
] | 3 | 2021-12-16T02:47:23.000Z | 2022-01-28T09:03:54.000Z | thbase/thrift2/cell.py | YutSean/thbase | add955b74c442a1eb2d11438b9a89fd3e0afe0b2 | [
"Apache-2.0"
] | 1 | 2021-01-21T01:10:18.000Z | 2021-01-21T01:10:18.000Z | thbase/thrift2/cell.py | YutSean/thbase | add955b74c442a1eb2d11438b9a89fd3e0afe0b2 | [
"Apache-2.0"
] | null | null | null | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Union
from thbase.util.bytes import to_str
class Cell(object):
def __init__(self, table_name, # type: Union[None, bytes]
row, # type: Union[None, bytes]
family, # type: Union[None, bytes]
qualifier, # type: Union[None, bytes]
value, # type: Union[None, bytes]
timestamp, # type: Union[None, int]
):
"""
Data structure to load data from hbase. If there is no matched cell or some errors occur at hbase server,
all the attributes could be None. In python2, bytes are represented by str, so the type of value is str.
Args:
table_name: name of the table.
row: the row key.
family: the column family.
qualifier: the column qualifier.
value: the bytes stored in the cell.
timestamp: a long int.
"""
self._table_name = table_name
self._row = row
self._family = family
self._qualifier = qualifier
self._value = value
self._timestamp = timestamp
@property
def table_name(self):
return self._table_name
@property
def row(self):
return self._row
@property
def family(self):
return self._family
@property
def qualifier(self):
return self._qualifier
@property
def value(self):
return self._value
@property
def timestamp(self):
return self._timestamp
def __str__(self):
return ":".join([to_str(self.table_name), to_str(self.row), to_str(self.family), to_str(self.qualifier)]) + \
' => ' + to_str(self.value)
| 31.25 | 117 | 0.624444 | 294 | 2,250 | 4.666667 | 0.377551 | 0.045918 | 0.056851 | 0.065598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003159 | 0.296444 | 2,250 | 71 | 118 | 31.690141 | 0.86355 | 0.480889 | 0 | 0.162162 | 0 | 0 | 0.004682 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.216216 | false | 0 | 0.054054 | 0.189189 | 0.486486 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
aff8524799a2e94f4cc924fe90fee3d383128306 | 245 | py | Python | tesseract.py | Swarnava-Sadhukhan/Vehicle-Reg.-Details-using-Number-Plate | 71a11a1c348bd4707ec286cef690d197c877a864 | [
"MIT"
] | null | null | null | tesseract.py | Swarnava-Sadhukhan/Vehicle-Reg.-Details-using-Number-Plate | 71a11a1c348bd4707ec286cef690d197c877a864 | [
"MIT"
] | null | null | null | tesseract.py | Swarnava-Sadhukhan/Vehicle-Reg.-Details-using-Number-Plate | 71a11a1c348bd4707ec286cef690d197c877a864 | [
"MIT"
] | null | null | null | from PIL import Image
import pytesseract
pytesseract.pytesseract.tesseract_cmd = r"C:\Program Files\Tesseract-OCR\tesseract.exe"
image = Image.open(your_image)
image_to_text = pytesseract.image_to_string(image, lang='eng')
print(image_to_text)
| 30.625 | 87 | 0.820408 | 37 | 245 | 5.216216 | 0.567568 | 0.108808 | 0.11399 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.077551 | 245 | 7 | 88 | 35 | 0.853982 | 0 | 0 | 0 | 0 | 0 | 0.191837 | 0.134694 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
b300b8069a759a4c33c0e1f442dc7c9290118408 | 971 | py | Python | pupy/cheese.py | jessekrubin/pup | 2cab5da7b1b39453c44be556b691db83442b0565 | [
"BSD-2-Clause"
] | 2 | 2019-03-07T09:26:36.000Z | 2019-07-31T17:24:23.000Z | pupy/cheese.py | jessekrubin/pup | 2cab5da7b1b39453c44be556b691db83442b0565 | [
"BSD-2-Clause"
] | 2 | 2019-10-26T02:29:54.000Z | 2021-06-25T15:28:12.000Z | pupy/cheese.py | jessekrubin/pup | 2cab5da7b1b39453c44be556b691db83442b0565 | [
"BSD-2-Clause"
] | 1 | 2019-07-31T17:24:32.000Z | 2019-07-31T17:24:32.000Z | # -*- coding: utf-8 -*-
# Pretty ~ Useful ~ Python
"""
String Methods
"""
def string_score(strang: str) -> int:
"""Sum of letter values where a==1 and z == 26
:param strang: string to be scored
:type strang: str
:returns: -> score of the string
:rtype: int
.. doctest:: python
>>> string_score('me')
18
>>> string_score('poooood')
95
>>> string_score('gregory')
95
"""
return sum((ord(character) - 96 for character in strang.lower()))
def is_palindrome(string: str) -> bool:
"""True a string is a palindrome; False if string is not a palindrome.
:param string:
.. doctest::python
>>> is_palindrome("racecar")
True
>>> is_palindrome("greg")
False
"""
return all(
character == string[-index - 1] for index, character in enumerate(string)
)
if __name__ == "__main__":
from doctest import testmod
testmod()
| 19.039216 | 81 | 0.569516 | 115 | 971 | 4.678261 | 0.530435 | 0.081784 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019006 | 0.295572 | 971 | 50 | 82 | 19.42 | 0.767544 | 0.542739 | 0 | 0 | 0 | 0 | 0.023188 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b304a8a0ec5b6c10876a973d320e1f0fc9420547 | 12,361 | py | Python | pysnmp/CISCO-SYS-INFO-LOG-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-SYS-INFO-LOG-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-SYS-INFO-LOG-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-SYS-INFO-LOG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-SYS-INFO-LOG-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:57:12 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
IpAddress, MibIdentifier, TimeTicks, Integer32, ModuleIdentity, Bits, Gauge32, ObjectIdentity, NotificationType, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, iso, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "MibIdentifier", "TimeTicks", "Integer32", "ModuleIdentity", "Bits", "Gauge32", "ObjectIdentity", "NotificationType", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "iso", "Counter32")
DisplayString, TextualConvention, RowStatus, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus", "TruthValue")
ciscoSysInfoLogMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 330))
ciscoSysInfoLogMIB.setRevisions(('2005-08-12 10:00', '2003-01-24 10:00',))
if mibBuilder.loadTexts: ciscoSysInfoLogMIB.setLastUpdated('200508121000Z')
if mibBuilder.loadTexts: ciscoSysInfoLogMIB.setOrganization('Cisco System, Inc.')
ciscoSysInfoLogMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 0))
ciscoSysInfoLogMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 1))
ciscoSysInfoLogMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 2))
csilGlobalConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 1))
csilServerConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2))
csilCommandConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3))
csilSysInfoLogEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: csilSysInfoLogEnabled.setStatus('current')
csilSysInfoLogNotifEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: csilSysInfoLogNotifEnabled.setStatus('current')
csilMaxServerAllowed = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setUnits('servers').setMaxAccess("readwrite")
if mibBuilder.loadTexts: csilMaxServerAllowed.setStatus('current')
csilMaxProfilePerServerAllowed = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 2), Unsigned32()).setUnits('profiles').setMaxAccess("readonly")
if mibBuilder.loadTexts: csilMaxProfilePerServerAllowed.setStatus('current')
csilServerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3), )
if mibBuilder.loadTexts: csilServerTable.setStatus('current')
csilServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1), ).setIndexNames((0, "CISCO-SYS-INFO-LOG-MIB", "csilServerIndex"))
if mibBuilder.loadTexts: csilServerEntry.setStatus('current')
csilServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 1), Unsigned32())
if mibBuilder.loadTexts: csilServerIndex.setStatus('current')
csilServerAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 2), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerAddressType.setStatus('current')
csilServerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerAddress.setStatus('current')
csilServerProfileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 4), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerProfileIndex.setStatus('current')
csilServerProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("tftp", 1), ("rcp", 2), ("ftp", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerProtocol.setStatus('current')
csilServerRcpUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 6), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerRcpUserName.setStatus('current')
csilServerInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 7), Unsigned32().clone(1440)).setUnits('minutes').setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerInterval.setStatus('current')
csilServerLoggingFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 8), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerLoggingFileName.setStatus('current')
csilServerLastStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("none", 1), ("noLogFile", 2), ("noCommand", 3), ("linkBlock", 4), ("authError", 5), ("addrError", 6), ("copying", 7), ("writeError", 8), ("success", 9), ("ftpError", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: csilServerLastStatus.setStatus('current')
csilServerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 2, 3, 1, 10), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilServerRowStatus.setStatus('current')
csilMaxCommandPerProfile = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 1), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: csilMaxCommandPerProfile.setStatus('current')
csilCommandsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2), )
if mibBuilder.loadTexts: csilCommandsTable.setStatus('current')
csilCommandsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2, 1), ).setIndexNames((0, "CISCO-SYS-INFO-LOG-MIB", "csilCommandProfileIndex"), (0, "CISCO-SYS-INFO-LOG-MIB", "csilCommandIndex"))
if mibBuilder.loadTexts: csilCommandsEntry.setStatus('current')
csilCommandProfileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2, 1, 1), Unsigned32())
if mibBuilder.loadTexts: csilCommandProfileIndex.setStatus('current')
csilCommandIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2, 1, 2), Unsigned32())
if mibBuilder.loadTexts: csilCommandIndex.setStatus('current')
csilCommandString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2, 1, 3), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilCommandString.setStatus('current')
csilCommandExecOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2, 1, 4), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilCommandExecOrder.setStatus('current')
csilCommandStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 330, 1, 3, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: csilCommandStatus.setStatus('current')
csilLoggingFailNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 330, 0, 1)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilServerLastStatus"))
if mibBuilder.loadTexts: csilLoggingFailNotif.setStatus('current')
ciscoSysInfoLogMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 1))
ciscoSysInfoLogMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 2))
ciscoSysInfoLogMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 1, 1)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilGlobalConfigGroup"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerConfigGroup"), ("CISCO-SYS-INFO-LOG-MIB", "csilCommandConfigGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoSysInfoLogMIBCompliance = ciscoSysInfoLogMIBCompliance.setStatus('current')
csilGlobalConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 2, 1)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilSysInfoLogEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
csilGlobalConfigGroup = csilGlobalConfigGroup.setStatus('current')
csilServerConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 2, 2)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilMaxServerAllowed"), ("CISCO-SYS-INFO-LOG-MIB", "csilMaxProfilePerServerAllowed"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerAddress"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerAddressType"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerProfileIndex"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerProtocol"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerInterval"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerLoggingFileName"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerRcpUserName"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerLastStatus"), ("CISCO-SYS-INFO-LOG-MIB", "csilServerRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
csilServerConfigGroup = csilServerConfigGroup.setStatus('current')
csilCommandConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 2, 3)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilMaxCommandPerProfile"), ("CISCO-SYS-INFO-LOG-MIB", "csilCommandString"), ("CISCO-SYS-INFO-LOG-MIB", "csilCommandExecOrder"), ("CISCO-SYS-INFO-LOG-MIB", "csilCommandStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
csilCommandConfigGroup = csilCommandConfigGroup.setStatus('current')
csilNotifControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 2, 4)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilSysInfoLogNotifEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
csilNotifControlGroup = csilNotifControlGroup.setStatus('current')
csilLoggingFailNotifGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 330, 2, 2, 5)).setObjects(("CISCO-SYS-INFO-LOG-MIB", "csilLoggingFailNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
csilLoggingFailNotifGroup = csilLoggingFailNotifGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-SYS-INFO-LOG-MIB", csilCommandIndex=csilCommandIndex, csilServerProfileIndex=csilServerProfileIndex, csilCommandConfig=csilCommandConfig, csilServerEntry=csilServerEntry, csilCommandProfileIndex=csilCommandProfileIndex, csilCommandConfigGroup=csilCommandConfigGroup, csilGlobalConfigGroup=csilGlobalConfigGroup, ciscoSysInfoLogMIBObjects=ciscoSysInfoLogMIBObjects, csilLoggingFailNotifGroup=csilLoggingFailNotifGroup, csilGlobalConfig=csilGlobalConfig, csilCommandString=csilCommandString, ciscoSysInfoLogMIBCompliance=ciscoSysInfoLogMIBCompliance, ciscoSysInfoLogMIB=ciscoSysInfoLogMIB, ciscoSysInfoLogMIBCompliances=ciscoSysInfoLogMIBCompliances, csilMaxProfilePerServerAllowed=csilMaxProfilePerServerAllowed, csilServerRcpUserName=csilServerRcpUserName, csilServerRowStatus=csilServerRowStatus, csilServerConfigGroup=csilServerConfigGroup, csilServerInterval=csilServerInterval, csilSysInfoLogNotifEnabled=csilSysInfoLogNotifEnabled, csilServerIndex=csilServerIndex, ciscoSysInfoLogMIBNotifs=ciscoSysInfoLogMIBNotifs, csilNotifControlGroup=csilNotifControlGroup, csilServerAddressType=csilServerAddressType, csilCommandStatus=csilCommandStatus, csilLoggingFailNotif=csilLoggingFailNotif, PYSNMP_MODULE_ID=ciscoSysInfoLogMIB, csilServerLastStatus=csilServerLastStatus, csilCommandExecOrder=csilCommandExecOrder, ciscoSysInfoLogMIBGroups=ciscoSysInfoLogMIBGroups, csilServerProtocol=csilServerProtocol, csilSysInfoLogEnabled=csilSysInfoLogEnabled, csilServerConfig=csilServerConfig, ciscoSysInfoLogMIBConform=ciscoSysInfoLogMIBConform, csilCommandsTable=csilCommandsTable, csilServerAddress=csilServerAddress, csilServerLoggingFileName=csilServerLoggingFileName, csilMaxServerAllowed=csilMaxServerAllowed, csilCommandsEntry=csilCommandsEntry, csilMaxCommandPerProfile=csilMaxCommandPerProfile, csilServerTable=csilServerTable)
| 124.858586 | 1,859 | 0.75633 | 1,395 | 12,361 | 6.700358 | 0.139785 | 0.010913 | 0.012838 | 0.017118 | 0.364395 | 0.271638 | 0.204558 | 0.19525 | 0.162512 | 0.161335 | 0 | 0.06949 | 0.084945 | 12,361 | 98 | 1,860 | 126.132653 | 0.756874 | 0.027344 | 0 | 0.066667 | 0 | 0 | 0.186949 | 0.07433 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b313b854dc4d32c2411e1c8ff84dd9293d4bdd28 | 7,335 | py | Python | api/core/models.py | Latiftanga/twysis-api | efec6164bb9b4e46647b55f03f29287418451896 | [
"MIT"
] | null | null | null | api/core/models.py | Latiftanga/twysis-api | efec6164bb9b4e46647b55f03f29287418451896 | [
"MIT"
] | null | null | null | api/core/models.py | Latiftanga/twysis-api | efec6164bb9b4e46647b55f03f29287418451896 | [
"MIT"
] | null | null | null | import os
from datetime import datetime
from django.db import models
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
PermissionsMixin
)
def school_logo_file_path(instance, filename):
"""Generate file path for new school logo"""
ext = filename.split('.')[-1] # [-1] returns the last item from a list
filename = f'{instance.name}_{instance.id}.{ext}'
return os.path.join('uploads/school/', filename)
class School(models.Model):
"""School object"""
SCHOOL_LEVELS = (
('PRIMARY', 'Primary'),
('JHS', 'Junior High'),
('SHS', 'Senior High')
)
name = models.CharField(max_length=255, unique=True)
level = models.CharField(max_length=8, choices=SCHOOL_LEVELS)
motto = models.CharField(max_length=255, blank=True)
code = models.CharField(max_length=20, blank=True)
address = models.CharField(max_length=255)
city = models.CharField(max_length=255)
region = models.CharField(max_length=255)
phone = models.CharField(max_length=20, blank=True)
postal_code = models.CharField(max_length=20, blank=True)
email = models.EmailField(max_length=255, blank=True)
created = models.DateTimeField(auto_now_add=True)
created_by = models.CharField(max_length=255, blank=True)
updated = models.DateTimeField(auto_now=True)
updated_by = models.CharField(max_length=255, blank=True)
programmes = models.ManyToManyField(
'Programme',
related_name='schools',
blank=True,
)
def __str__(self):
return self.name
class Programme(models.Model):
"""Students programme object"""
name = models.CharField(max_length=255, unique=True)
code = models.CharField(max_length=255, blank=True)
def __str__(self):
return self.name
class Grade(models.Model):
"""Students grade levels"""
YEAR_CHOICES = (
(1, 1),
(2, 2),
(3, 3),
(4, 4),
(5, 5),
(6, 6),
(7, 7),
(8, 8),
(9, 9),
(10, 10),
(11, 11),
(12, 12)
)
name = models.CharField(max_length=16, unique=True)
year = models.PositiveSmallIntegerField(
primary_key=True,
choices=YEAR_CHOICES
)
def __str__(self):
return self.name
class Class(models.Model):
"""Student class"""
division = models.CharField(max_length=16)
grade = models.ForeignKey('Grade', on_delete=models.CASCADE)
programme = models.ForeignKey(
'Programme',
on_delete=models.CASCADE,
blank=True, null=True
)
school = models.ForeignKey(
School,
on_delete=models.CASCADE,
related_name='classes',
)
created = models.DateTimeField(auto_now_add=True)
created_by = models.CharField(max_length=32, blank=True)
updated = models.DateTimeField(auto_now=True)
updated_by = models.CharField(max_length=32, blank=True)
@property
def name(self):
return f'{self.grade.name}{self.division}'
def __str__(self):
return self.name
class Room(models.Model):
"""Classrooms"""
name = models.CharField(max_length=16, unique=True)
capacity = models.PositiveIntegerField(blank=True, null=True)
description = models.CharField(max_length=64, blank=True)
created = models.DateTimeField(auto_now_add=True)
created_by = models.CharField(max_length=255, blank=True)
updated = models.DateTimeField(auto_now=True)
updated_by = models.CharField(max_length=255, blank=True)
school = models.ForeignKey(
'School',
on_delete=models.CASCADE,
related_name='rooms'
)
def __str__(self):
return self.name
class Period(models.Model):
"""School periods"""
name = models.CharField(max_length=16, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
use_for_attendance = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
created_by = models.CharField(max_length=255, blank=True)
updated = models.DateTimeField(auto_now=True)
updated_by = models.CharField(max_length=255, blank=True)
school = models.ForeignKey(
'School',
on_delete=models.CASCADE,
related_name='peroids'
)
@property
def length(self):
FMT = '%H:%M:%S'
return (
datetime.strptime(str(self.end_time), FMT) -
datetime.strptime(str(self.start_time), FMT)
)/60
def __str__(self):
return self.name
class House(models.Model):
"""Students house of affilation"""
name = models.CharField(max_length=255, unique=True)
school = models.ForeignKey(
'School',
on_delete=models.CASCADE,
related_name='houses'
)
created = models.DateTimeField(auto_now_add=True)
created_by = models.CharField(max_length=255, blank=True)
updated = models.DateTimeField(auto_now=True)
updated_by = models.CharField(max_length=255, blank=True)
def __str__(self):
return self.name
class UserManager(BaseUserManager):
""" """
def create_user(self, email, password=None, **extra_fields):
"""Create and save a new user """
if not email:
raise ValueError('Users must have a valid email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Create and save a new super user"""
user = self.create_user(email, password)
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
return user
def create_staff(self, email, password):
"""Create and save a new staff(admin) user"""
user = self.create_user(email, password)
user.is_staff = True
user.save(using=self._db)
return user
def create_teacher(self, email, password):
"""Create and save a new teacher user"""
user = self.create_user(email, password)
user.is_teacher = True
user.save(using=self._db)
return user
def create_student(self, email, password):
"""Create and save a new student user"""
user = self.create_user(email, password)
user.is_student = True
user.save(using=self._db)
return user
def create_parent(self, email, password):
"""Create and save a new parent user"""
user = self.create_user(email, password)
user.is_parent = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user model that supports using email instead of username"""
email = models.EmailField(max_length=255, unique=True)
school = models.ForeignKey(
'School',
on_delete=models.CASCADE,
related_name='users',
blank=True,
null=True
)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_teacher = models.BooleanField(default=False)
is_student = models.BooleanField(default=False)
is_parent = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
| 29.457831 | 76 | 0.649489 | 893 | 7,335 | 5.173572 | 0.191489 | 0.056494 | 0.105195 | 0.14026 | 0.590693 | 0.543074 | 0.530519 | 0.490043 | 0.378571 | 0.319048 | 0 | 0.019405 | 0.23422 | 7,335 | 248 | 77 | 29.576613 | 0.803098 | 0.065167 | 0 | 0.412698 | 0 | 0 | 0.038234 | 0.009891 | 0 | 0 | 0 | 0 | 0 | 1 | 0.084656 | false | 0.063492 | 0.021164 | 0.042328 | 0.560847 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
b3181b33b117a8ade8ea2aca94d2bffe254da2c0 | 1,750 | py | Python | src/vegas_test_results.py | arunmarria/DSCI522_Group413_WhatHappensInVegas | c7ab569cad8a453701d5218bbaf29cb5bc062cdd | [
"MIT"
] | 1 | 2020-02-04T19:03:17.000Z | 2020-02-04T19:03:17.000Z | src/vegas_test_results.py | arunmarria/DSCI522_Group413_WhatHappensInVegas | c7ab569cad8a453701d5218bbaf29cb5bc062cdd | [
"MIT"
] | 17 | 2020-01-18T00:52:23.000Z | 2020-02-06T04:37:25.000Z | src/vegas_test_results.py | arunmarria/DSCI522_Group413_WhatHappensInVegas | c7ab569cad8a453701d5218bbaf29cb5bc062cdd | [
"MIT"
] | 4 | 2020-01-18T00:08:32.000Z | 2020-01-29T23:00:01.000Z | # authors: Arun, Bronwyn, Manish
# date: 2020-01-23
"""Calculates MSE error for test set
Usage: src/vegas_test_results.py --test=<test> --out_dir=<out_dir>
Options:
--test=<test> Path (including filename) to training data
--out_dir=<out_dir> Path to directory where model results on test set need to be saved
"""
# importing required libraries
from docopt import docopt
import os
import matplotlib.pyplot as plt
from pandas.plotting import table
import numpy as np
import selenium
import pickle
import pandas as pd
# regressors / models
from sklearn.linear_model import LinearRegression, LogisticRegression, Lasso, Ridge
from sklearn.svm import SVR
from sklearn.ensemble import RandomForestRegressor
# Feature selection
from sklearn.feature_selection import RFE
# other
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score
from sklearn.feature_extraction.text import CountVectorizer
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
import altair as alt
opt = docopt(__doc__)
def main(test, out_dir):
test_data = pd.read_csv(test)
X = test_data.drop('score', axis =1)
y = test_data['score']
# loading required features based on training
cols_to_consider = np.load("results/features_to_use.npy", allow_pickle = True)
X = X[cols_to_consider]
# fetching trained model and predicting results
model = pickle.load(open("results/finalized_model.sav", 'rb'))
y_pred = model.predict(X)
print("Model evaluated successfully on test data, MSE error - " , round(mean_squared_error( y, y_pred),3))
#
if __name__ == "__main__":
main(opt["--test"], opt["--out_dir"])
| 26.515152 | 110 | 0.749143 | 246 | 1,750 | 5.142276 | 0.520325 | 0.06087 | 0.01581 | 0.018972 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006807 | 0.160571 | 1,750 | 65 | 111 | 26.923077 | 0.854323 | 0.271429 | 0 | 0 | 0 | 0 | 0.119142 | 0.042891 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033333 | false | 0 | 0.566667 | 0 | 0.6 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
b31f81845434d0fd36fd45a38fa1af84404bd392 | 895 | py | Python | Better4Industry/luigi_pipeline1/Feature_Generation/generate_features.py | hanhanwu/Hanhan_Data_Science_Practice | ac48401121062d03f9983f6a8c7f7adb0ab9b616 | [
"MIT"
] | 24 | 2016-05-20T00:50:57.000Z | 2020-10-01T15:42:16.000Z | Better4Industry/luigi_pipeline1/Feature_Generation/generate_features.py | hanhanwu/Hanhan_Data_Science_Practice | ac48401121062d03f9983f6a8c7f7adb0ab9b616 | [
"MIT"
] | null | null | null | Better4Industry/luigi_pipeline1/Feature_Generation/generate_features.py | hanhanwu/Hanhan_Data_Science_Practice | ac48401121062d03f9983f6a8c7f7adb0ab9b616 | [
"MIT"
] | 14 | 2018-01-29T06:08:59.000Z | 2020-10-03T08:08:31.000Z | import luigi
import pandas as pd
from Data_Prep.generate_base_data import GenerateBase
# Generate Features
class GenerateFeatures(luigi.Task):
current_dir = luigi.Parameter()
config = luigi.DictParameter()
def requires(self):
return GenerateBase(self.current_dir, self.config)
def output(self):
return luigi.LocalTarget(self.current_dir + self.config['feature_file'])
def is_zero(self, val):
if val == 0:
return 1
return 0
def run(self):
df = pd.read_csv(self.input().path)
print(df.columns)
new_df = df[list(self.config['origin_cols'])]
new_df['is_zero_duration'] = df.apply(lambda r: self.is_zero(r['duration']), axis=1)
new_df['avg_vol_per_ct'] = df['avg_vol']/df['avg_ct']
new_df.to_csv(self.output().path, index=False)
return new_df
| 30.862069 | 93 | 0.632402 | 123 | 895 | 4.406504 | 0.471545 | 0.046125 | 0.051661 | 0.066421 | 0.088561 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005944 | 0.248045 | 895 | 28 | 94 | 31.964286 | 0.799406 | 0.018994 | 0 | 0 | 1 | 0 | 0.087264 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.136364 | 0.090909 | 0.681818 | 0.045455 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b331f45ac0f8b9b280cbeead735fd1c4261a32c5 | 449 | py | Python | app/models.py | nferiel/shareabook | 3b8834d4203533e6afcc92b66d0fb44e9a52fee1 | [
"MIT"
] | null | null | null | app/models.py | nferiel/shareabook | 3b8834d4203533e6afcc92b66d0fb44e9a52fee1 | [
"MIT"
] | null | null | null | app/models.py | nferiel/shareabook | 3b8834d4203533e6afcc92b66d0fb44e9a52fee1 | [
"MIT"
] | 1 | 2018-04-22T23:13:25.000Z | 2018-04-22T23:13:25.000Z | from app import db
class Book(object):
pass
class User(db.DynamicDocument):
email = db.StringField(required=True, unique=True)
# hash_pw = db.BinaryField(required=True)
password = db.BinaryField()
firstname = db.StringField(default='')
lastname = db.StringField(default='')
meta = {'collection': 'users'}
class Book(db.DynamicDocument):
isbn = db.StringField(required=True)
meta = {'collection': 'books'}
| 20.409091 | 54 | 0.674833 | 51 | 449 | 5.921569 | 0.529412 | 0.172185 | 0.139073 | 0.165563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.184855 | 449 | 21 | 55 | 21.380952 | 0.825137 | 0.08686 | 0 | 0 | 0 | 0 | 0.073529 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.166667 | 0.083333 | 0 | 0.916667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
b332fd69b37397305a12fadff2c3f282ba18f273 | 8,527 | py | Python | src/copy.py | swerwath/monitor_mapper | 59a1f2977212e68abd6bc4906cd6d10b0430529e | [
"BSD-2-Clause"
] | null | null | null | src/copy.py | swerwath/monitor_mapper | 59a1f2977212e68abd6bc4906cd6d10b0430529e | [
"BSD-2-Clause"
] | null | null | null | src/copy.py | swerwath/monitor_mapper | 59a1f2977212e68abd6bc4906cd6d10b0430529e | [
"BSD-2-Clause"
] | null | null | null | from flask import Markup
CHEM_NAMES = {
"PM2.5" : "PM<sub>2.5</sub>",
"SO2" : "SO<sub>2</sub>",
"NO2" : "NO<sub>2</sub>",
"OZONE" : "ozone",
}
HEALTH_RISKS = {
"PM2.5" : "Long-term exposure increases the risk of death from <a href=\"https://www.ncbi.nlm.nih.gov/pubmed/20458016\">heart disease</a> and <a href=\"https://www.ncbi.nlm.nih.gov/pubmed/11879110\">lung cancer</a>. Prolonged exporsure can also lead to <a href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3637008/\">quicker rate of artery thickening</a>, which increases the lifetime risk of developing cardiovascular disease.",
"SO2" : "Acute short-term exposure to SO<sub>2</sub> can cause breathing difficulty, especially for people with asthma or other vulnerable populations.",
"NO2" : "Long-term exposure to NO<sub>2</sub> is suspected to contribute to the development of asthma. Acute exposure can cause <a href=\"https://www.epa.gov/no2-pollution/basic-information-about-no2#Effects\" target=\"_blank\">irritation of the lungs and aggravate existing respiratory diseases</a>. NO<sub>2</sub> and other NO<sub>x</sub> compounds also lead to the development of acid rain, which can damage important ecosystems.",
"OZONE" : "Long-term exposure to ozone increases the chance of lung infection and can <a href=\"https://www.epa.gov/ozone-pollution/health-effects-ozone-pollution\" target=\"_blank\">lead to the development of asthma</a>, especially in children. Acute short-term exposure can trigger asthma attacks or aggravate chronic bronchitis in people who already have those diseases. It can also cause difficulty of breathing and coughing, even in healthy people.",
}
EJ_EVIDENCE = {
"PM2.5" : "Additionally, PM<sub>2.5</sub> carries environmental justice concerns, since people of color in the US <a href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3137995/\" target=\"_blank\">are more likely to live in areas with high PM<sub>2.5</sub> levels</a>. ",
"SO2" : "",
"NO2" : "",
"OZONE" : "Additionally, ozone carries environmental justice concerns, since people of color in the US <a href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3137995/\" target=\"_blank\">are more likely to live in areas with high ozone levels</a>. ",
}
INFO = {
"PM2.5" : CHEM_NAMES["PM2.5"] + " refers to particulate matter that has a diameter of less than 2.5 micrometers. These fine particles can be emitted from a number of sources, including power plants, motor vehicles, and forest/residential fires. Because " + CHEM_NAMES["PM2.5"] + " particles are so small, they stay in the air for longer than their heavier counterparts, increasing the odds that a human breathes them in. Once they enter the body, the fine particles can penetrate into the lungs and circulatory system. " + HEALTH_RISKS["PM2.5"] + " " + EJ_EVIDENCE["PM2.5"],
"SO2" : "Sulfur Dioxide (SO<sub>2</sub>) is an air pollutant released primarily from the burning of fossil fuels at power plants and large industrial facilities. In addition to carrying its own health risks, SO<sub>2</sub> can react with other chemicals in the air to form PM<sub>2.5</sub> particulate matter (see above). " + HEALTH_RISKS["SO2"],
"NO2" : "Nitrogen Dioxide (NO<sub>2</sub>) is a highly reactive gas emitted primarily from the burning of fuel, both from motor vehicles and power plants. NO<sub>2</sub> is a member of and an indicator for a group of chemicals called nitrogen oxides (NO<sub>x</sub>). " + HEALTH_RISKS["NO2"],
"OZONE" : "Ozone (O<sub>3</sub>) is a gas found both in the upper atmosphere and at ground level. Ground level ozone is not released directly into the air; rather, it is created as a product of chemical reactions between other air pollutants. These chemical reactions are accelerated on hot days, leading to increased ozone levels. " + HEALTH_RISKS["OZONE"] + " " + EJ_EVIDENCE["OZONE"],
}
LINK = {
"PM2.5" : "#",
"SO2" : "#",
"NO2" : "#",
"OZONE" : "https://www.epa.gov/ozone-pollution/health-effects-ozone-pollution",
}
DIST_VAR = {
"PM2.5" : "While PM<sub>2.5</sub> levels at various stations within a single city tend to be highly correlated, <a href=\"https://doi.org/10.1080/10473289.2004.10470919\">there is still variation</a> in measurements.",
"SO2" : "",
"NO2" : "Since one of main sources of NO<sub>2</sub> is motor vehicles, <a href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5348563/\">proximity to traffic</a> is a highly predictive of pollution levels. Since motor vehicle traffic varies over time (e.g. \"rush hour\") and air monitoring has low temporal resolution, it's important to keep in mind that NO<sub>2</sub> levels may higher than measured at certain times of day.",
"OZONE" : "While ozone has <a href=\"https://www.sciencedirect.com/science/article/pii/0004698180900529\">relatively low spatial variation</a> in comparison to other pollutants, its distribution is still heavily dependent on nearby emission sources. If an ozone monitor is upwind of a large emission source, its reading may not be representative of the surrounding area.",
}
def dist_copy(dist, chem):
copy = "The nearest " + CHEM_NAMES[chem] + " monitoring station is " + ("%.1f" % dist) + " miles away from you"
if dist < 6.21:
copy += ", meaning that this monitor is likely close enough to give you an accurate estimate of the air pollution where you are."
elif dist < 9.3:
copy += ". While this station is not very close to you, it provides a moderately confident estimate of " + CHEM_NAMES[chem] + " levels near you. However, certain factors like wind and topological features may affect this accuracy."
else:
copy += ". Given how far away the monitor is, it's unlikely that its measurements are representative of " + CHEM_NAMES[chem] + " levels near you."
copy += " " + DIST_VAR[chem]
return copy
def aqi_copy(aqi, chem_name):
copy = "The <b>Air Quality Index</b> (AQI) is a number that tells you how much of a certain " + \
"chemical is in the air, and if that level of pollution carries any potential health concerns. " + \
"Based on the nearest available monitoring station, the estimated AQI for " + chem_name + " near you is " + str(aqi) + ", "
if aqi < 51:
copy += "which the EPA classifies as <font color=\"green\">good</font>. This means that levels of " + chem_name + " are low, and air pollution poses little to no health risks for long periods of exposure."
elif aqi < 101:
copy += "which the EPA classifies as <font color=\"orange\">moderate</font>. This means that levels of " + chem_name + " are within regulatory limits, but a very small number of sensitive people may experience health effects."
elif aqi < 151:
copy += "which the EPA classifies as <font color=\"red\">unhealthy for sensitive groups</font>. This means that levels of " + chem_name + " are at high levels. People with heart or lung disease, children, and older adults may begin to experience greater health risk."
else:
copy += "which the EPA classifies as <font color=\"maroon\">very unhealthy</font>. This means that levels of " + chem_name + " are at very high levels. All people may begin to experience health effects, and members of sensitive groups may experience very serious health risks."
copy += " For more information on the Air Quality Index, click <a href=\"https://airnow.gov/index.cfm?action=aqibasics.aqi\" target=\"_blank\">here</a>."
return copy
def get_copy(chem, nearest_monitors):
info = INFO[chem] + "<br /><br />"
if not chem in nearest_monitors.keys():
dist = "It looks like <b>there are no " + CHEM_NAMES[chem] + " monitors in your area!</b> Why does this matter? " + \
" Since you don't live by any " + CHEM_NAMES[chem] + \
" monitoring stations, there is no way for public health officials to estimate the exposure of people in your community. " + \
EJ_EVIDENCE[chem] + "If you're concerned about the lack of " + CHEM_NAMES[chem] + " monitoring in your community, you can " + \
"learn more about " + CHEM_NAMES[chem] + " using the link below, or keep scrolling to get involved with community air monitoring efforts and your local air district."
read = ""
else:
d = nearest_monitors[chem][1]
aqi = nearest_monitors[chem][0]['AQI']
dist = dist_copy(d, chem) + " <br /><br />"
read = aqi_copy(aqi, CHEM_NAMES[chem])
return (Markup(info), Markup(dist), Markup(read))
| 92.684783 | 578 | 0.705406 | 1,318 | 8,527 | 4.531866 | 0.320182 | 0.010715 | 0.012891 | 0.019588 | 0.197556 | 0.164072 | 0.150678 | 0.141303 | 0.117194 | 0.094425 | 0 | 0.023116 | 0.17814 | 8,527 | 91 | 579 | 93.703297 | 0.829195 | 0 | 0 | 0.092105 | 0 | 0.289474 | 0.706931 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039474 | false | 0 | 0.039474 | 0 | 0.118421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b33dc8d5e5e1e8c41ea27f07fd94c0b2c8d6ad02 | 878 | py | Python | awards/myusers/forms.py | dan-mutua/djangowk3 | 9af2651ade9b7cd9c02ee1f93dc60c4f9ea1adeb | [
"MIT"
] | null | null | null | awards/myusers/forms.py | dan-mutua/djangowk3 | 9af2651ade9b7cd9c02ee1f93dc60c4f9ea1adeb | [
"MIT"
] | null | null | null | awards/myusers/forms.py | dan-mutua/djangowk3 | 9af2651ade9b7cd9c02ee1f93dc60c4f9ea1adeb | [
"MIT"
] | null | null | null | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
class RegistrationForm(UserCreationForm):
email = forms.EmailField()
bio = forms.CharField()
class Meta:
model = User
fields=['username','email','bio', 'password1','password2']
def __init__(self, *args,**kwargs):
super(RegistrationForm,self).__init__(*args, **kwargs)
self.fields['username'].widget.attrs['class':'form-control']
self.fields['email'].widget.attrs['class':'form-control']
self.fields['password1'].widget.attrs['class':'form-control']
self.fields['password2'].widget.attrs['class':'form-control']
class EditProfile(UserChangeForm):
email = forms.EmailField()
bio = forms.CharField()
class Meta:
model = User
fields=['username','email','bio']
| 31.357143 | 70 | 0.690205 | 98 | 878 | 6.102041 | 0.336735 | 0.06689 | 0.107023 | 0.133779 | 0.488294 | 0.443144 | 0.443144 | 0.257525 | 0.257525 | 0.257525 | 0 | 0.005369 | 0.151481 | 878 | 28 | 71 | 31.357143 | 0.797315 | 0 | 0 | 0.380952 | 0 | 0 | 0.169511 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0.142857 | 0.142857 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
b35c59192b369f27f56811705c7cf55171b138aa | 10,807 | py | Python | clients/python/pqstream_pb2.py | backwardn/pqstream | c9027a0afa15cb7c5412d82eabf12a1dc3f96bd2 | [
"MIT"
] | 491 | 2017-09-04T05:19:47.000Z | 2021-12-17T14:47:43.000Z | clients/python/pqstream_pb2.py | backwardn/pqstream | c9027a0afa15cb7c5412d82eabf12a1dc3f96bd2 | [
"MIT"
] | 72 | 2017-09-04T05:19:52.000Z | 2021-07-05T22:32:37.000Z | clients/python/pqstream_pb2.py | backwardn/pqstream | c9027a0afa15cb7c5412d82eabf12a1dc3f96bd2 | [
"MIT"
] | 26 | 2017-09-04T10:50:18.000Z | 2021-11-09T04:42:37.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pqstream.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from github.com.golang.protobuf.ptypes.struct import struct_pb2 as github_dot_com_dot_golang_dot_protobuf_dot_ptypes_dot_struct_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pqstream.proto',
package='pqs',
syntax='proto3',
serialized_pb=_b('\n\x0epqstream.proto\x12\x03pqs\x1a\x35github.com/golang/protobuf/ptypes/struct/struct.proto\"%\n\rListenRequest\x12\x14\n\x0ctable_regexp\x18\x01 \x01(\t\"\xa6\x01\n\x08RawEvent\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\r\n\x05table\x18\x02 \x01(\t\x12\x1a\n\x02op\x18\x03 \x01(\x0e\x32\x0e.pqs.Operation\x12\n\n\x02id\x18\x04 \x01(\t\x12(\n\x07payload\x18\x05 \x01(\x0b\x32\x17.google.protobuf.Struct\x12)\n\x08previous\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct\"\xa2\x01\n\x05\x45vent\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\r\n\x05table\x18\x02 \x01(\t\x12\x1a\n\x02op\x18\x03 \x01(\x0e\x32\x0e.pqs.Operation\x12\n\n\x02id\x18\x04 \x01(\t\x12(\n\x07payload\x18\x05 \x01(\x0b\x32\x17.google.protobuf.Struct\x12(\n\x07\x63hanges\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct*J\n\tOperation\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06INSERT\x10\x01\x12\n\n\x06UPDATE\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\x0c\n\x08TRUNCATE\x10\x04\x32\x38\n\x08PQStream\x12,\n\x06Listen\x12\x12.pqs.ListenRequest\x1a\n.pqs.Event\"\x00\x30\x01\x62\x06proto3')
,
dependencies=[github_dot_com_dot_golang_dot_protobuf_dot_ptypes_dot_struct_dot_struct__pb2.DESCRIPTOR,])
_OPERATION = _descriptor.EnumDescriptor(
name='Operation',
full_name='pqs.Operation',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INSERT', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UPDATE', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DELETE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRUNCATE', index=4, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=451,
serialized_end=525,
)
_sym_db.RegisterEnumDescriptor(_OPERATION)
Operation = enum_type_wrapper.EnumTypeWrapper(_OPERATION)
UNKNOWN = 0
INSERT = 1
UPDATE = 2
DELETE = 3
TRUNCATE = 4
_LISTENREQUEST = _descriptor.Descriptor(
name='ListenRequest',
full_name='pqs.ListenRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_regexp', full_name='pqs.ListenRequest.table_regexp', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=78,
serialized_end=115,
)
_RAWEVENT = _descriptor.Descriptor(
name='RawEvent',
full_name='pqs.RawEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='schema', full_name='pqs.RawEvent.schema', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='table', full_name='pqs.RawEvent.table', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='op', full_name='pqs.RawEvent.op', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='pqs.RawEvent.id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payload', full_name='pqs.RawEvent.payload', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='previous', full_name='pqs.RawEvent.previous', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=118,
serialized_end=284,
)
_EVENT = _descriptor.Descriptor(
name='Event',
full_name='pqs.Event',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='schema', full_name='pqs.Event.schema', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='table', full_name='pqs.Event.table', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='op', full_name='pqs.Event.op', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='pqs.Event.id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payload', full_name='pqs.Event.payload', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='changes', full_name='pqs.Event.changes', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=287,
serialized_end=449,
)
_RAWEVENT.fields_by_name['op'].enum_type = _OPERATION
_RAWEVENT.fields_by_name['payload'].message_type = github_dot_com_dot_golang_dot_protobuf_dot_ptypes_dot_struct_dot_struct__pb2._STRUCT
_RAWEVENT.fields_by_name['previous'].message_type = github_dot_com_dot_golang_dot_protobuf_dot_ptypes_dot_struct_dot_struct__pb2._STRUCT
_EVENT.fields_by_name['op'].enum_type = _OPERATION
_EVENT.fields_by_name['payload'].message_type = github_dot_com_dot_golang_dot_protobuf_dot_ptypes_dot_struct_dot_struct__pb2._STRUCT
_EVENT.fields_by_name['changes'].message_type = github_dot_com_dot_golang_dot_protobuf_dot_ptypes_dot_struct_dot_struct__pb2._STRUCT
DESCRIPTOR.message_types_by_name['ListenRequest'] = _LISTENREQUEST
DESCRIPTOR.message_types_by_name['RawEvent'] = _RAWEVENT
DESCRIPTOR.message_types_by_name['Event'] = _EVENT
DESCRIPTOR.enum_types_by_name['Operation'] = _OPERATION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ListenRequest = _reflection.GeneratedProtocolMessageType('ListenRequest', (_message.Message,), dict(
DESCRIPTOR = _LISTENREQUEST,
__module__ = 'pqstream_pb2'
# @@protoc_insertion_point(class_scope:pqs.ListenRequest)
))
_sym_db.RegisterMessage(ListenRequest)
RawEvent = _reflection.GeneratedProtocolMessageType('RawEvent', (_message.Message,), dict(
DESCRIPTOR = _RAWEVENT,
__module__ = 'pqstream_pb2'
# @@protoc_insertion_point(class_scope:pqs.RawEvent)
))
_sym_db.RegisterMessage(RawEvent)
Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), dict(
DESCRIPTOR = _EVENT,
__module__ = 'pqstream_pb2'
# @@protoc_insertion_point(class_scope:pqs.Event)
))
_sym_db.RegisterMessage(Event)
_PQSTREAM = _descriptor.ServiceDescriptor(
name='PQStream',
full_name='pqs.PQStream',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=527,
serialized_end=583,
methods=[
_descriptor.MethodDescriptor(
name='Listen',
full_name='pqs.PQStream.Listen',
index=0,
containing_service=None,
input_type=_LISTENREQUEST,
output_type=_EVENT,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_PQSTREAM)
DESCRIPTOR.services_by_name['PQStream'] = _PQSTREAM
# @@protoc_insertion_point(module_scope)
| 37.010274 | 1,073 | 0.737207 | 1,451 | 10,807 | 5.206065 | 0.135768 | 0.050834 | 0.027667 | 0.040773 | 0.651046 | 0.619804 | 0.619804 | 0.58565 | 0.58565 | 0.555732 | 0 | 0.043168 | 0.131859 | 10,807 | 291 | 1,074 | 37.137457 | 0.761991 | 0.028408 | 0 | 0.607692 | 1 | 0.003846 | 0.165173 | 0.103793 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.030769 | 0 | 0.030769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2fa13ee457543f3924333a36c7c6e4d2244025ed | 803 | py | Python | incident/migrations/0008_auto_20200528_1307.py | BuildForSDG/team-271-backend | db2bd8eb5f4d9f46bd6baff05e0e705aba883a83 | [
"MIT"
] | 1 | 2020-08-20T01:24:46.000Z | 2020-08-20T01:24:46.000Z | incident/migrations/0008_auto_20200528_1307.py | BuildForSDG/team-271-backend | db2bd8eb5f4d9f46bd6baff05e0e705aba883a83 | [
"MIT"
] | 12 | 2020-05-13T04:40:32.000Z | 2022-03-12T00:39:09.000Z | incident/migrations/0008_auto_20200528_1307.py | BuildForSDG/team-271-backend | db2bd8eb5f4d9f46bd6baff05e0e705aba883a83 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-05-28 10:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('incident', '0007_incident_reporter'),
]
operations = [
migrations.AlterField(
model_name='incident',
name='plateNumber',
field=models.PositiveIntegerField(null=True),
),
migrations.AlterField(
model_name='incident',
name='postPlateCharacter',
field=models.CharField(blank=True, default=1, max_length=1),
preserve_default=False,
),
migrations.AlterField(
model_name='incident',
name='prePlateCharacters',
field=models.CharField(blank=True, max_length=3),
),
]
| 26.766667 | 72 | 0.590286 | 75 | 803 | 6.213333 | 0.56 | 0.128755 | 0.160944 | 0.186695 | 0.388412 | 0.263949 | 0 | 0 | 0 | 0 | 0 | 0.039146 | 0.300125 | 803 | 29 | 73 | 27.689655 | 0.790036 | 0.05604 | 0 | 0.391304 | 1 | 0 | 0.133598 | 0.029101 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.173913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2faff3758198e47eb0efab51b5ceb7515ba56700 | 1,345 | py | Python | sync_repo.py | ewhitesides/pulp_operations | b6a3541559e48c717926b245bbbf2dd87638e093 | [
"MIT"
] | null | null | null | sync_repo.py | ewhitesides/pulp_operations | b6a3541559e48c717926b245bbbf2dd87638e093 | [
"MIT"
] | 1 | 2021-06-17T04:35:05.000Z | 2021-06-17T04:35:05.000Z | sync_repo.py | ewhitesides/pulp_operations | b6a3541559e48c717926b245bbbf2dd87638e093 | [
"MIT"
] | null | null | null | """
script to sync repos from repo_data.py
"""
import urllib3
import pulp_operations
from repo_data import repo_data
#disable ssl warnings for now
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
#sync items from repo_data
for os in repo_data:
for repo in repo_data[os]:
for source_name, source_url in repo_data[os][repo].items():
#try syncing
repo_name = f"{os}-{repo}"
remote_name = f"{os}-{repo}-{source_name}"
remote_url = source_url
pulp_operations.sync(repo_name, remote_name, remote_url)
#if particular source fails, continue to next one
except Exception:
continue
#optional example for use with a configured signing service
# SIGNSERVICE_NAME = 'sign-metadata'
# for os in repo_data:
# for repo in repo_data[os]:
# for source_name, source_url in repo_data[os][repo].items():
# #try syncing
# repo_name = f"{os}-{repo}"
# remote_name = f"{os}-{repo}-{source_name}"
# remote_url = source_url
# pulp_operations.sync(repo_name, remote_name, remote_url, SIGNSERVICE_NAME)
# #if particular source fails, continue to next one
# except Exception:
# continue
| 32.02381 | 88 | 0.620074 | 170 | 1,345 | 4.694118 | 0.282353 | 0.100251 | 0.075188 | 0.06015 | 0.639098 | 0.639098 | 0.639098 | 0.639098 | 0.639098 | 0.639098 | 0 | 0.003158 | 0.29368 | 1,345 | 41 | 89 | 32.804878 | 0.836842 | 0.507063 | 0 | 0 | 0 | 0 | 0.060201 | 0.041806 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.230769 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2fb3c3428f697abdd7be188f6220ff2f03a32b0d | 666 | py | Python | scanner/dummy_GPIO.py | mcecchi/SuperScanTest | 63aeed26ccb6a80f4edd58ab523037d45f67625b | [
"Apache-2.0"
] | 81 | 2017-05-17T02:10:59.000Z | 2022-02-03T08:22:44.000Z | scanner/dummy_GPIO.py | smilejx/sweep-3d-scanner | 2d245517b04ca49ea3d1238d1625fa0c993274c0 | [
"MIT"
] | 43 | 2017-05-16T19:57:48.000Z | 2020-01-13T01:16:37.000Z | scanner/dummy_GPIO.py | smilejx/sweep-3d-scanner | 2d245517b04ca49ea3d1238d1625fa0c993274c0 | [
"MIT"
] | 53 | 2017-05-17T02:24:45.000Z | 2022-02-06T08:46:04.000Z | """ Dummy version of GPIO """
from random import randint
BCM = 1
IN = 2
PUD_DOWN = 3
FALLING = 4
RELEASE = 5
def setmode(mode=None):
""" Docstring """
pass
def setup(pin=None, IO=None, pull_up_down=None):
""" Docstring """
pass
def add_event_detect(pin=None, edge=None, callback=None, bouncetime=None):
""" Docstring """
pass
def input(pin=None):
""" Docstring """
# simulates an input 10% of the time
return randint(0, 10) < 1
def event_detected(pin=None):
""" Docstring """
# simulates an input 10% of the time
return randint(0, 10) < 1
def remove_event_detect(pin=None):
""" Docstring """
pass
| 16.243902 | 74 | 0.621622 | 94 | 666 | 4.319149 | 0.489362 | 0.192118 | 0.167488 | 0.147783 | 0.310345 | 0.310345 | 0.310345 | 0.310345 | 0.310345 | 0.310345 | 0 | 0.033531 | 0.238739 | 666 | 40 | 75 | 16.65 | 0.767258 | 0.238739 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.222222 | 0.055556 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
2fb4e2d871819b3c8343ec0beb98ef31b9d2ee82 | 547 | py | Python | contrib/tornado/test/curl_httpclient_test.py | loggly/alertbirds-community-edition | b35f0ffbe80049dfa74d79e9e45b4cce4cdbf47a | [
"Apache-2.0"
] | 2 | 2015-10-28T23:14:47.000Z | 2015-11-27T18:00:12.000Z | tornado/test/curl_httpclient_test.py | joetyson/tornado | 02ce53b1fd8b4acc4721e6616b73d11bf6c6a4fb | [
"Apache-2.0"
] | null | null | null | tornado/test/curl_httpclient_test.py | joetyson/tornado | 02ce53b1fd8b4acc4721e6616b73d11bf6c6a4fb | [
"Apache-2.0"
] | null | null | null | from tornado.test.httpclient_test import HTTPClientCommonTestCase
try:
import pycurl
except ImportError:
pycurl = None
if pycurl is not None:
from tornado.curl_httpclient import CurlAsyncHTTPClient
class CurlHTTPClientCommonTestCase(HTTPClientCommonTestCase):
def get_http_client(self):
return CurlAsyncHTTPClient(io_loop=self.io_loop)
# Remove the base class from our namespace so the unittest module doesn't
# try to run it again.
del HTTPClientCommonTestCase
if pycurl is None:
del CurlHTTPClientCommonTestCase
| 26.047619 | 73 | 0.800731 | 66 | 547 | 6.545455 | 0.621212 | 0.050926 | 0.046296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.159049 | 547 | 20 | 74 | 27.35 | 0.93913 | 0.16819 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.307692 | 0.076923 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
2fc46d8307244629aad2c8b5503e2a8f6924a734 | 384 | py | Python | art_gallery/point.py | chaitan94/2d-visibility-optimization | 2d1cc4aee18265814b072d89560284086680965d | [
"MIT"
] | null | null | null | art_gallery/point.py | chaitan94/2d-visibility-optimization | 2d1cc4aee18265814b072d89560284086680965d | [
"MIT"
] | null | null | null | art_gallery/point.py | chaitan94/2d-visibility-optimization | 2d1cc4aee18265814b072d89560284086680965d | [
"MIT"
] | null | null | null | class Point:
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
def collinear(self, p, q):
if p.x - self.x == 0 and q.x - self.x == 0:
return True
if p.x - self.x == 0 or q.x - self.x == 0:
return False
m1 = (p.y-self.y)/(p.x-self.x)
m2 = (q.y-self.y)/(q.x-self.x)
return m1 == m2
def __str__(self):
return "Point [%f, %f]" % (self.x, self.y)
| 22.588235 | 46 | 0.541667 | 80 | 384 | 2.5 | 0.275 | 0.225 | 0.18 | 0.14 | 0.24 | 0.24 | 0 | 0 | 0 | 0 | 0 | 0.027491 | 0.242188 | 384 | 16 | 47 | 24 | 0.659794 | 0 | 0 | 0 | 0 | 0 | 0.036458 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0 | 0.071429 | 0.571429 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2fd6324d0f091b31d84d2f540ed44c97d23a000a | 3,509 | py | Python | common/bin/namespace_manager.py | frankovacevich/aleph | 9b01dcabf3c074e8617e50fffd35c9ee1960eab6 | [
"MIT"
] | null | null | null | common/bin/namespace_manager.py | frankovacevich/aleph | 9b01dcabf3c074e8617e50fffd35c9ee1960eab6 | [
"MIT"
] | null | null | null | common/bin/namespace_manager.py | frankovacevich/aleph | 9b01dcabf3c074e8617e50fffd35c9ee1960eab6 | [
"MIT"
] | null | null | null | """
Namespace Manager
-----------------
The namespace manager is the interface between data and the databases. Use the
namespace manager to save data to the database and perform simple queries.
The namespace manager can handle many DBMS. See the db_connections folder to
see the files that connect different types of databases.
Modify the namespace manager to use the database system you want. By default,
the namespace manager uses an SQLite connection.
"""
import traceback
import json
import datetime
import os
from dateutil.tz import tzutc, tzlocal
from dateutil import parser
from .logger import Log
from .root_folder import aleph_root_folder
from .db_connections import functions as fn
from .db_connections.sqlite import SqliteConnection
class NamespaceManager:
def __init__(self):
self.conn = SqliteConnection(os.path.join(aleph_root_folder, "local", "backup", "msql.db"))
self.log = Log("namespace_manager.log")
# ==========================================================================
# Connect and close
# ==========================================================================
def connect(self):
self.conn.connect()
def close(self):
self.conn.close()
# ==========================================================================
# Operations (save, get, delete)
# ==========================================================================
def save_data(self, key, data):
data = fn.__format_data_for_saving__(data)
self.conn.save_data(key, data)
def get_data(self, key, field="*", since=365, until=0, count=100000):
since = fn.__parse_date__(since)
until = fn.__parse_date__(until, True)
return self.conn.get_data(key, field, since, until, count)
def get_data_by_id(self, key, id_):
return self.conn.get_data_by_id(key, id_)
def delete_data(self, key, since, until):
since = fn.__parse_date__(since)
until = fn.__parse_date__(until, True)
return self.conn.delete_data(key, since, until)
def delete_data_by_id(self, key, id_):
return self.conn.delete_data_by_id(key, id_)
# ==========================================================================
# Get keys and fields. Get and set metadata
# ==========================================================================
def get_keys(self):
return self.conn.get_keys()
def get_fields(self, key):
return self.conn.get_fields(key)
def set_metadata(self, key, field, alias, description=""):
if field in ["t", "id", "id_", "t_"]: raise Exception("Invalid field")
self.conn.set_metadata(key, field, str(alias), str(description))
def get_metadata(self, key):
return self.conn.get_metadata(key)
# ==========================================================================
# Remove and rename keys and fields
# ==========================================================================
def remove_key(self, key):
self.conn.remove_key(key)
def remove_field(self, key, field):
if field in ["t", "id", "id_", "t_"]: raise Exception("Invalid field")
self.conn.remove_field(key, field)
def rename_key(self, key, new_key):
self.conn.rename_key(key, new_key)
def rename_field(self, key, field, new_field):
if field in ["t", "id", "id_", "t_"]: raise Exception("Invalid field")
self.conn.rename_field(key, field, new_field)
| 34.742574 | 99 | 0.556854 | 414 | 3,509 | 4.507246 | 0.256039 | 0.068596 | 0.052519 | 0.045552 | 0.235263 | 0.207396 | 0.181672 | 0.181672 | 0.181672 | 0.148446 | 0 | 0.003487 | 0.182673 | 3,509 | 100 | 100 | 35.09 | 0.647141 | 0.335993 | 0 | 0.134615 | 0 | 0 | 0.044569 | 0.009087 | 0 | 0 | 0 | 0 | 0 | 1 | 0.307692 | false | 0 | 0.192308 | 0.096154 | 0.653846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
2fe94e879912d14ca18912d6024587424d28ccef | 260 | py | Python | tests/tests_integration/test_three.py | drewverlee/prims-algorithm | 2d3e14dbf45b211e77d3e7aab6b5b9c5ccdc1e4c | [
"MIT"
] | null | null | null | tests/tests_integration/test_three.py | drewverlee/prims-algorithm | 2d3e14dbf45b211e77d3e7aab6b5b9c5ccdc1e4c | [
"MIT"
] | null | null | null | tests/tests_integration/test_three.py | drewverlee/prims-algorithm | 2d3e14dbf45b211e77d3e7aab6b5b9c5ccdc1e4c | [
"MIT"
] | null | null | null | """
6 11
1 2 10
1 5 -3
1 4 5
1 3 4
2 6 6
2 3 7
3 6 -10
3 4 -1
4 6 2
4 5 -8
5 6 1
"""
from prims import prims
def test_prims_should_find_cost_of_neg_16(GF3, simple_que):
v = GF3.any_vertice()
mst = prims(GF3, simple_que, v)
assert mst.cost == -16
| 11.818182 | 59 | 0.626923 | 65 | 260 | 2.353846 | 0.446154 | 0.026144 | 0.156863 | 0.169935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.236842 | 0.269231 | 260 | 21 | 60 | 12.380952 | 0.568421 | 0.292308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2fee63db0e7d3a89bce8bac3805ad091e75a4cfd | 352 | py | Python | common/decorators.py | invinst/CPDB | c2d8ae8888b13d956cc1068742f18d45736d4121 | [
"Apache-2.0"
] | 16 | 2016-05-20T09:03:32.000Z | 2020-09-13T14:23:06.000Z | common/decorators.py | invinst/CPDB | c2d8ae8888b13d956cc1068742f18d45736d4121 | [
"Apache-2.0"
] | 2 | 2016-05-24T01:44:14.000Z | 2016-06-17T22:19:45.000Z | common/decorators.py | invinst/CPDB | c2d8ae8888b13d956cc1068742f18d45736d4121 | [
"Apache-2.0"
] | 2 | 2016-10-10T16:14:19.000Z | 2020-10-26T00:17:02.000Z | from tqdm import tqdm
def apply_with_progress_bar(desc=None):
def decorator(key_func):
def func_wrapper(iterable):
pbar = tqdm(total=len(iterable), desc=desc)
for obj in iterable:
pbar.update()
key_func(obj)
pbar.close()
return func_wrapper
return decorator
| 25.142857 | 55 | 0.585227 | 42 | 352 | 4.738095 | 0.571429 | 0.070352 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.338068 | 352 | 13 | 56 | 27.076923 | 0.854077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0.090909 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
2ff8b9a3f0d7b6e5bfe207c2912cea483c8c70a1 | 763 | py | Python | app.py | iAbdullahMughal/d-scan | 4cebc74e410dde556cb95fdd7dd49e1d5a6fe9bf | [
"MIT"
] | 6 | 2019-03-27T08:20:10.000Z | 2020-01-19T11:44:59.000Z | app.py | iAbdullahMughal/dscan | 4cebc74e410dde556cb95fdd7dd49e1d5a6fe9bf | [
"MIT"
] | null | null | null | app.py | iAbdullahMughal/dscan | 4cebc74e410dde556cb95fdd7dd49e1d5a6fe9bf | [
"MIT"
] | 2 | 2019-05-14T17:32:42.000Z | 2019-09-19T03:45:30.000Z | __author__ = 'Muhammad Abdullah Mughal'
__website__ = 'https://www.iabdullahmughal.com'
__twitter__ = '@iabdullahmughal'
from flask import Flask
from view.ui import index_page
from view.ui import analysis_report
from view.ui import project_settings
from view.ajax.upload_samples import ajax_sample_upload
from view.ajax.load_reports import ajax_reports
from view.ajax.update_config import ajax_update_config
app = Flask(__name__)
# Url
app.register_blueprint(index_page)
app.register_blueprint(analysis_report)
app.register_blueprint(project_settings)
# Ajax Calls
app.register_blueprint(ajax_sample_upload)
app.register_blueprint(ajax_reports)
app.register_blueprint(ajax_update_config)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| 25.433333 | 55 | 0.824377 | 109 | 763 | 5.321101 | 0.385321 | 0.082759 | 0.206897 | 0.082759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00578 | 0.093054 | 763 | 29 | 56 | 26.310345 | 0.83237 | 0.018349 | 0 | 0 | 0 | 0 | 0.115282 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.368421 | 0 | 0.368421 | 0.315789 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
2ffe7b0d97625dde8b31f2f082c39a509a37a416 | 326 | py | Python | Tests/image_tests/renderpasses/test_CameraAnimation.py | Nuclearfossil/Falcor | 667dc68a51bbaf87a2a063f4f0ef8928990ed203 | [
"BSD-3-Clause"
] | 62 | 2022-02-04T10:34:29.000Z | 2022-03-31T19:41:20.000Z | Tests/image_tests/renderpasses/test_CameraAnimation.py | Nuclearfossil/Falcor | 667dc68a51bbaf87a2a063f4f0ef8928990ed203 | [
"BSD-3-Clause"
] | 1 | 2021-02-18T16:38:38.000Z | 2021-02-18T16:38:38.000Z | Tests/image_tests/renderpasses/test_CameraAnimation.py | fromasmtodisasm/Falcor | 300aee1d7a9609e427f07e8887fd9bcb377426b0 | [
"BSD-3-Clause"
] | 4 | 2022-02-04T16:08:30.000Z | 2022-03-09T09:39:41.000Z | from helpers import render_frames
from graphs.ForwardRendering import ForwardRendering as g
from falcor import *
g.unmarkOutput("ForwardLightingPass.motionVecs")
m.addGraph(g)
m.loadScene("grey_and_white_room/grey_and_white_room.fbx")
ctx = locals()
# default
render_frames(ctx, 'default', frames=[1,16,64,128,256])
exit()
| 23.285714 | 58 | 0.797546 | 47 | 326 | 5.361702 | 0.638298 | 0.095238 | 0.095238 | 0.126984 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.037037 | 0.088957 | 326 | 13 | 59 | 25.076923 | 0.811448 | 0.021472 | 0 | 0 | 0 | 0 | 0.252366 | 0.230284 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.111111 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
640bc386a0295f1d1abcfe26406c2da2728aa0b4 | 235 | py | Python | python/tests/conftest.py | b-z/Tango | 8950b51ccccaef0b6ffc1575b8f2bdfb8a09d484 | [
"MIT"
] | null | null | null | python/tests/conftest.py | b-z/Tango | 8950b51ccccaef0b6ffc1575b8f2bdfb8a09d484 | [
"MIT"
] | null | null | null | python/tests/conftest.py | b-z/Tango | 8950b51ccccaef0b6ffc1575b8f2bdfb8a09d484 | [
"MIT"
] | null | null | null | import pytest
from application import create_app
@pytest.fixture()
def testapp(request):
app = create_app()
client = app.test_client()
def teardown():
pass
request.addfinalizer(teardown)
return client
| 13.823529 | 34 | 0.680851 | 27 | 235 | 5.814815 | 0.592593 | 0.11465 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.234043 | 235 | 16 | 35 | 14.6875 | 0.872222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0.1 | 0.2 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
641081af4bdafa1ec5ff97703b7c7bc6ce439430 | 3,165 | py | Python | get_archive.py | DarkElement75/object-detection-experiments | fc638f361f76d7bbb6e5cde9a3480c656b486ad6 | [
"MIT"
] | null | null | null | get_archive.py | DarkElement75/object-detection-experiments | fc638f361f76d7bbb6e5cde9a3480c656b486ad6 | [
"MIT"
] | null | null | null | get_archive.py | DarkElement75/object-detection-experiments | fc638f361f76d7bbb6e5cde9a3480c656b486ad6 | [
"MIT"
] | null | null | null | """
Generate an h5py file for our X and Y labelled data to use for training, and testing.
(not doing validation dataset due to the small number of optimizations on hyperparameters)
We do this by looping through our negative and positive dirs, and setting the Y label as 0 if negative, and 1 if positive
This is a really small script, and I may only use it once due to the extremely problem-specific nature of it.
-Blake Edwards / Dark Element
"""
import os, sys
import numpy as np
import cv2
import h5py
def recursive_get_paths(img_dir):
"""
Arguments:
img_dir : directory to recursively traverse. Should be a string.
Returns:
A list of tuples, where each tuple is of the form
(path and filename str, filename str), e.g.
("/home/darkelement/test.txt", "test.txt")
"""
paths = []
for (path, dirs, fnames) in os.walk(img_dir):
for fname in fnames:
paths.append((os.path.join(path, fname), fname))
return paths
"""
Directories to get our negative and positive samples from
"""
negatives_dir = "data/negatives/"
positives_dir = "data/positives/"
"""
H5py file directory to store samples
"""
archive_dir = "samples.h5"
"""
Dimensions of one x sample,
and if we have our samples as rgb or not
"""
sample_dims = [128, 128, 3]
rgb = True
"""
Get our filepaths
"""
negative_fpaths = recursive_get_paths(negatives_dir)
positive_fpaths = recursive_get_paths(positives_dir)
"""
Get the total number of samples via the total number of filepaths
"""
sample_n = len(negative_fpaths) + len(positive_fpaths)
"""
Create our x so that we can directly insert each image into it, we will wait to reshape it until they have all been added.
"""
X_dims = [sample_n]
X_dims.extend(sample_dims)
"""
Initialize X and Y with known data numbers
"""
#X = np.zeros(X_dims)
X = np.memmap("x.dat", dtype="uint8", mode="r+", shape=tuple(X_dims))
Y = np.zeros(sample_n,)
print "Getting Negative Samples..."
for i, negative_fpath_info in enumerate(negative_fpaths):
"""
Loop through our paths and get our negative samples,
setting their y value as 0
"""
negative_fpath, negative_fname = negative_fpath_info
if rgb:
X[i] = cv2.imread(negative_fpath)
else:
X[i] = cv2.imread(negative_fpath, 0)
Y[i] = 0
print "Getting Positive Samples..."
for i, positive_fpath_info in enumerate(positive_fpaths):
"""
Loop through our paths and get our positive samples,
setting their y value as 1
"""
positive_fpath, positive_fname = positive_fpath_info
if rgb:
X[i] = cv2.imread(positive_fpath)
else:
X[i] = cv2.imread(positive_fpath, 0)
Y[i] = 1
"""
Reshape accordingly, flattening our h and w of our image
"""
if rgb:
X = np.reshape(X, (sample_n, -1, 3))
else:
X = np.reshape(X, (sample_n, -1))
"""
Create our archive
"""
print "Creating Archive..."
with h5py.File(archive_dir, "w", chunks=True, compression="gzip") as hf:
hf.create_dataset("x", data=X)
hf.create_dataset("x_shape", data=X.shape)
hf.create_dataset("y", data=Y)
hf.create_dataset("y_shape", data=Y.shape)
| 26.596639 | 122 | 0.68278 | 491 | 3,165 | 4.291242 | 0.352342 | 0.016611 | 0.009492 | 0.020883 | 0.138586 | 0.138586 | 0.074039 | 0.056004 | 0 | 0 | 0 | 0.011591 | 0.209479 | 3,165 | 118 | 123 | 26.822034 | 0.830536 | 0.006319 | 0 | 0.125 | 1 | 0 | 0.088808 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.083333 | null | null | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6414b2bb81f916f42d0de106b6b344bee2274d8c | 67 | py | Python | epson_projector/version.py | xWizard360/epson_projector | 059cfa77b04cc31da6b191facf594788304ffaeb | [
"MIT"
] | null | null | null | epson_projector/version.py | xWizard360/epson_projector | 059cfa77b04cc31da6b191facf594788304ffaeb | [
"MIT"
] | null | null | null | epson_projector/version.py | xWizard360/epson_projector | 059cfa77b04cc31da6b191facf594788304ffaeb | [
"MIT"
] | null | null | null | """Version of Epson projector module."""
__version__ = '0.2.3.500'
| 22.333333 | 40 | 0.686567 | 10 | 67 | 4.2 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101695 | 0.119403 | 67 | 2 | 41 | 33.5 | 0.610169 | 0.507463 | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
643396d210afea6f4d0a557beafcca53eb48f6ef | 861 | py | Python | config/configuration_yaml.py | Surbeivol/PythonMatchingEngine | f94150294a85d7b415ca4518590b5a661d6f9958 | [
"MIT"
] | 52 | 2019-05-30T20:02:05.000Z | 2022-03-23T02:57:10.000Z | config/configuration_yaml.py | Surbeivol/PythonMatchingEngine | f94150294a85d7b415ca4518590b5a661d6f9958 | [
"MIT"
] | 2 | 2021-12-24T16:43:46.000Z | 2021-12-24T16:43:59.000Z | config/configuration_yaml.py | Surbeivol/PythonMatchingEngine | f94150294a85d7b415ca4518590b5a661d6f9958 | [
"MIT"
] | 19 | 2019-06-18T14:35:22.000Z | 2022-03-17T21:28:18.000Z | import os
import os.path
import yaml
class Configuration:
def __init__(self):
self.path = os.path.dirname(__file__)
def get_liq_bands(self):
return self.__load_config(os.path.join(self.path,'liq_bands.yml'))
def get_trades_bands(self):
return self.__load_config(os.path.join(self.path,'trades_bands.yml'))
def __load_config(self, file_path):
if not os.path.exists(file_path):
raise AttributeError(f"Config file not found:{file_path}")
# TODO: assert with a template
with open(file_path, 'r')as stream:
try:
yaml_conf = yaml.load(stream, Loader=yaml.SafeLoader)
except yaml.YAMLError as exc:
raise Exception(f'Error loading '
f'configuration file {file_path}: {exc}')
return yaml_conf
| 30.75 | 77 | 0.623693 | 114 | 861 | 4.447368 | 0.403509 | 0.059172 | 0.059172 | 0.074951 | 0.185404 | 0.185404 | 0.185404 | 0.185404 | 0.185404 | 0.185404 | 0 | 0 | 0.276423 | 861 | 27 | 78 | 31.888889 | 0.813804 | 0.03252 | 0 | 0 | 0 | 0 | 0.137184 | 0 | 0 | 0 | 0 | 0.037037 | 0 | 1 | 0.2 | false | 0 | 0.15 | 0.1 | 0.55 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
ff2b905e2aa8b8f34ba3c44fae5c1c032482b216 | 17,011 | py | Python | callmail_project/mail/views.py | q8groups/callnmail | e3f4c01050bee1442545c8e82eba3fb1efb5b3ed | [
"MIT"
] | null | null | null | callmail_project/mail/views.py | q8groups/callnmail | e3f4c01050bee1442545c8e82eba3fb1efb5b3ed | [
"MIT"
] | null | null | null | callmail_project/mail/views.py | q8groups/callnmail | e3f4c01050bee1442545c8e82eba3fb1efb5b3ed | [
"MIT"
] | null | null | null | import os
from django.contrib import messages
from django.db import IntegrityError
from django.core.mail import EmailMultiAlternatives
from django.shortcuts import render, HttpResponseRedirect, HttpResponse, get_object_or_404
from django.views import generic
from django.contrib.auth.models import User
from django.contrib.auth import login, logout, authenticate
from django.core.urlresolvers import reverse, reverse_lazy
from django.conf import settings
from django.contrib.messages.views import SuccessMessageMixin
from dateutil import parser
from braces.views import LoginRequiredMixin
from .forms import RegistrationForm, LoginForm, PasswordResetRequestForm, PasswordResetForm, ActivateForm, \
ChangePasswordForm, ProfileChangeForm, ContactUsForm,MailForwardForm
from .utils import send_sms, determine_mime_type, generate_random_number
from .models import Mail, MailAttachment, ForgotPasswordToken, MailForward, AccountActivation
from advertisement.models import UserProfile
from api.models import Country
class HomePage(generic.View):
def get(self, request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('mail:mailforward_list'))
return render(request, 'index.html')
class RegistrationView(generic.View):
def get(self, request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('mail:mailforward_list'))
rform = RegistrationForm()
form = LoginForm()
return render(request, 'login2.html', {'form': form, 'rform': rform})
def post(self, request):
rform = RegistrationForm(request.POST, request.FILES or None)
if rform.is_valid():
country_codes = request.POST.get('country_code')
phone_number = request.POST.get('phone_number')
phone_number = "+" + country_codes + phone_number
request.session['phone_number'] = phone_number
if User.objects.filter(username=phone_number, is_active=True).exists():
return render(request, 'login2.html', {'form': LoginForm(), 'rform': rform,
'number_error': 'User with that phone number already exists.'})
password = request.POST.get('password1')
first_name = request.POST.get('first_name')
last_name = request.POST.get('last_name')
avatar = request.FILES.get('avatar')
activation_code = generate_random_number()
message = settings.SMS_MSG_ACTIVATION.format(activation_code)
try:
user = User.objects.get(username=phone_number)
user.set_password(password)
if first_name:
user.first_name = first_name
if last_name:
user.last_name = last_name
user.save()
send_sms(phone_number, message)
profile = UserProfile.objects.get(user=user)
profile.avatar = avatar
profile.save()
except User.DoesNotExist:
user = User.objects.create_user(username=phone_number, password=password)
if first_name:
user.first_name = first_name
if last_name:
user.last_name = last_name
user.is_active = False
user.save()
try:
UserProfile.objects.create(user=user, avatar=avatar)
except IntegrityError:
pass
send_sms(phone_number, message)
activation = AccountActivation.objects.filter(user=user)
if activation.exists():
activation.delete()
AccountActivation.objects.create(user=user, activation_code=activation_code)
return HttpResponseRedirect(reverse('mail:activate_user'))
else:
return render(request, 'login2.html', {'form': LoginForm(), 'rform': rform})
class LoginView(generic.View):
def get(self, request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('mail:mailforward_list'))
form = LoginForm()
return render(request, 'login.html', {'form': form, 'rform': RegistrationForm()})
def post(self, request):
country_codes = request.POST.get('country_codes','')
phone_number = request.POST.get('phone_number','')
request.session['phone_number'] = phone_number
request.session['country_codes'] = country_codes
phone_number = "+"+country_codes + phone_number
request.POST = request.POST.copy()
request.POST['phone_number']=phone_number
form = LoginForm(request.POST or None)
if form.is_valid():
username = phone_number
print username
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None and user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
return render(request, 'login.html', {'form': form, 'error': 'Username or password not correct.'})
else:
return render(request, 'login.html', {'form': form})
class LogoutView(generic.View):
def get(self, request):
logout(request)
return HttpResponseRedirect('/')
class ProfileView(LoginRequiredMixin, generic.View):
def get(self, request):
data = {'first_name': request.user.first_name, 'last_name': request.user.last_name,
'gender': request.user.userprofile.gender, 'age': request.user.userprofile.age,
'country': request.user.userprofile.country}
form = ProfileChangeForm(initial=data)
return render(request, 'profile.html', {'form': form})
def post(self, request):
form = ProfileChangeForm(request.POST, request.FILES)
if form.is_valid():
gender = request.POST.get('gender')
birthday = request.POST.get('age')
country = request.POST.get('country')
first_name = request.POST.get('first_name')
last_name = request.POST.get('last_name')
profile = get_object_or_404(UserProfile, user=request.user)
if first_name:
request.user.first_name = first_name
if last_name:
request.user.last_name = last_name
request.user.save()
if gender:
profile.gender = gender
if birthday:
profile.age = parser.parse(birthday)
if country:
profile.country = country
profile.save()
messages.success(request, 'Profile successfully updated.')
return HttpResponseRedirect(reverse('mail:profile'))
else:
return render(request, 'profile.html', {'form': form})
class ActivateUser(generic.View):
def get(self, request):
form = ActivateForm()
return render(request, 'activate_account.html', {'form': form})
def post(self, request):
form = ActivateForm(request.POST or None)
try:
phone_number = request.session['phone_number']
except KeyError:
return render(request, 'activate_account.html', {'form': form, 'error': 'Please register first.'})
if form.is_valid():
username = phone_number
#del request.session['phone_number']
activation_code = request.POST.get('activation_code')
activation = AccountActivation.objects.filter(user__username=username, activation_code=activation_code)
if activation.exists():
user = User.objects.get(username=username)
user.is_active = True
user.save()
AccountActivation.objects.filter(user=user).delete()
messages.success(request, 'Account successfully activated.')
user.backend = 'django.contrib.auth.backends.ModelBackend'
login(request, user)
return HttpResponseRedirect('/')
else:
return render(request, 'activate_account.html', {'form': form, 'error': 'Invalid Activation Code.'})
else:
return render(request, 'activate_account.html', {'form': form})
class MailListView(LoginRequiredMixin, generic.ListView):
template_name = 'mail_list.html'
context_object_name = 'mails'
model = Mail
paginate_by = 20
def get_queryset(self):
return Mail.objects.filter(user=self.request.user)
class MailDetailView(LoginRequiredMixin, generic.DetailView):
template_name = 'mail_detail.html'
context_object_name = 'mail'
model = Mail
class PasswordResetRequestView(generic.View):
def get(self, request):
return render(request, 'password_reset_request_form.html', {'form': PasswordResetRequestForm()})
def post(self, request):
country_codes = request.POST.get('country_codes', '')
phone_number = request.POST.get('phone_number', '')
phone_number = "+"+country_codes + phone_number
request.POST = request.POST.copy()
request.POST['phone_number'] = phone_number
form = PasswordResetRequestForm(request.POST)
random_number = generate_random_number()
if form.is_valid():
user_obj = get_object_or_404(User, username=phone_number)
token_check = ForgotPasswordToken.objects.filter(user=user_obj)
if token_check:
token_check.delete()
ForgotPasswordToken.objects.create(user=user_obj, secret_token=random_number)
send_sms(phone_number, message=settings.SMS_MSG_PASSWORD.format(random_number))
request.session['username'] = phone_number
return HttpResponseRedirect(reverse('mail:validate_token'))
else:
return render(request, 'password_reset_request_form.html', {'form': form})
class PasswordResetValidateToken(generic.View):
def get(self, request):
form = ActivateForm()
return render(request, 'activate_account.html', {'form': form})
def post(self, request):
form = ActivateForm(request.POST or None)
if form.is_valid():
username = request.session['username']
activation_code = request.POST.get('activation_code')
activation = ForgotPasswordToken.objects.filter(user__username=username, secret_token=activation_code)
if activation.exists():
instance = get_object_or_404(ForgotPasswordToken, user__username=username, secret_token=activation_code)
instance.is_done = True
instance.save()
return HttpResponseRedirect(reverse('mail:reset_password'))
else:
return render(request, 'activate_account.html', {'form': form, 'error': 'Invalid Activation Code.'})
else:
return render(request, 'activate_account.html', {'form': form})
class PasswordResetView(generic.View):
def get(self, request):
return render(request, 'password_reset_form.html', {'form': PasswordResetForm()})
def post(self, request):
phone_number = request.session['username']
request.POST = request.POST.copy()
request.POST['phone_number'] = phone_number
form = PasswordResetForm(request.POST)
#Check if user activated the code or not
if not ForgotPasswordToken.objects.filter(user__username=phone_number, is_done=True):
return render(request, 'password_reset_form.html', {'form': form, 'error': 'Please use activation code first.'})
if not User.objects.filter(username=phone_number, is_active=True):
return render(request, 'password_reset_form.html', {'form': form, 'error': 'Phone number is not registered.'})
if form.is_valid():
password = request.POST.get('new_password1')
user_obj = get_object_or_404(User, username=phone_number)
user_obj.set_password(password)
user_obj.save()
ForgotPasswordToken.objects.filter(user=user_obj).delete()
#send_sms(phone_number, message='Password Successfully Reset.')
return HttpResponseRedirect(reverse('mail:login'))
else:
return render(request, 'password_reset_form.html', {'form': form})
class PasswordChangeView(LoginRequiredMixin, generic.View):
def get(self, request):
return render(request, 'change_password.html', {'form': ChangePasswordForm()})
def post(self, request):
form = ChangePasswordForm(request.POST)
if form.is_valid():
password = request.POST.get('new_password1')
old_password = request.POST.get('old_password')
if not request.user.check_password(old_password):
return render(request, 'change_password.html', {'form': form, 'error': 'Invalid old password.'})
request.user.set_password(password)
request.user.save()
messages.success(request, 'Password successfully changed')
return HttpResponseRedirect(reverse('mail:profile'))
else:
return render(request, 'change_password.html', {'form': form})
def download_attachment(request, pk):
obj = get_object_or_404(MailAttachment, pk=pk)
file_path = obj.attachment.name
filename = file_path.split('/')[-1]
mime_type = determine_mime_type(os.path.join(settings.MEDIA_ROOT, file_path))
response = HttpResponse(obj.attachment, content_type=mime_type)
response['Content-Disposition'] = 'attachment; filename=%s' %(filename)
return response
class MailForwardListView(LoginRequiredMixin, generic.ListView):
context_object_name = 'emails'
template_name = 'mail_forward_list.html'
model = MailForward
def get_queryset(self):
super(MailForwardListView, self).get_queryset()
return MailForward.objects.filter(user=self.request.user)
def delete_mail_forward(request, pk):
mail_forward = get_object_or_404(MailForward, pk=pk).delete()
return HttpResponseRedirect(reverse_lazy('mail:mailforward_list'))
class MailForwardCreateView(LoginRequiredMixin, generic.CreateView):
model = MailForward
template_name = 'mail_forward_create.html'
success_url = reverse_lazy('mail:mailforward_list')
form_class = MailForwardForm
def form_valid(self, form):
form.instance.user = self.request.user
return super(MailForwardCreateView, self).form_valid(form)
class MailForwardEditView(LoginRequiredMixin, generic.UpdateView):
model = MailForward
template_name = 'mail_forward_create.html'
success_url = reverse_lazy('mail:mailforward_list')
form_class = MailForwardForm
def form_valid(self, form):
form.instance.user = self.request.user
return super(MailForwardEditView, self).form_valid(form)
class AboutView(generic.TemplateView):
template_name = 'about.html'
class ContactView(SuccessMessageMixin, generic.CreateView):
template_name = 'contact.html'
form_class = ContactUsForm
success_url = reverse_lazy('mail:contact')
success_message = 'Thank you for contacting us. We will get back to you shortly.'
def form_valid(self, form):
headers = {'Reply-To': form.cleaned_data.get('contact_email')}
msg = EmailMultiAlternatives(form.cleaned_data.get('subject').strip(), form.cleaned_data.get('message'), form.cleaned_data.get('contact_email'), ['contact@callnmail.com',],headers=headers)
#html_content = "<p>{0}</p>".format(form.cleaned_data.get('message')).encode('utf8')
msg.attach_alternative(form.cleaned_data.get('message'), "text/html")
msg.send()
return super(ContactView, self).form_valid(form)
class FAQView(generic.TemplateView):
template_name = 'faq.html'
class PrivacyView(generic.TemplateView):
template_name = 'privacy.html'
class ChangeAvatar(LoginRequiredMixin, generic.View):
def get(self, request):
return HttpResponseRedirect(reverse('mail:profile'))
def post(self, request):
avatar = request.FILES.get('avatar')
path = request.POST.get('path')
if avatar:
profile = get_object_or_404(UserProfile, user=request.user)
profile.avatar = avatar
profile.save()
messages.success(request, 'Profile Avatar successfully changed.')
return HttpResponseRedirect(path)
class TestTemplate(generic.TemplateView):
template_name = "test.html"
def get_context_data(self, **kwargs):
context = super(TestTemplate, self).get_context_data(**kwargs)
country = Country.objects.all().order_by('priority')
for c in country:
c.prefix_val = c.prefix[1:]
context['codes'] = country
return context | 40.891827 | 196 | 0.654812 | 1,817 | 17,011 | 5.968079 | 0.137589 | 0.042604 | 0.043803 | 0.017245 | 0.500922 | 0.41931 | 0.366562 | 0.340557 | 0.29952 | 0.225194 | 0 | 0.002783 | 0.239433 | 17,011 | 416 | 197 | 40.891827 | 0.835369 | 0.012874 | 0 | 0.433333 | 0 | 0 | 0.119781 | 0.033653 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.124242 | 0.054545 | null | null | 0.00303 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
ff408e6ec0e521a42b4fc02a10360ca004f9194f | 347 | py | Python | ubuntu_requirement_fix.py | amitjoshi9627/WolfTube | fee0ca0f18dcfdec4d04b4145a80cafbd98f2174 | [
"MIT"
] | 6 | 2019-11-29T16:55:44.000Z | 2020-01-23T13:24:18.000Z | ubuntu_requirement_fix.py | amitjoshi9627/WolfTube | fee0ca0f18dcfdec4d04b4145a80cafbd98f2174 | [
"MIT"
] | 2 | 2019-11-29T11:37:49.000Z | 2020-01-23T06:04:17.000Z | ubuntu_requirement_fix.py | amitjoshi9627/WolfTube | fee0ca0f18dcfdec4d04b4145a80cafbd98f2174 | [
"MIT"
] | null | null | null | import os
import platform
def ubuntu_req_fix():
os_version = platform.version()
if '18.' in os_version:
os.system('sudo apt install ffmpeg')
else:
os.system('sudo add-apt-repository ppa:jonathonf/ffmpeg-4')
os.system('sudo apt update')
os.system('sudo apt remove ffmpeg && sudo apt install -y ffmpeg') | 31.545455 | 73 | 0.657061 | 50 | 347 | 4.48 | 0.5 | 0.142857 | 0.214286 | 0.200893 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011194 | 0.227666 | 347 | 11 | 73 | 31.545455 | 0.824627 | 0 | 0 | 0 | 0 | 0 | 0.399425 | 0.063218 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.2 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ff4320db5bd1ccf8a20540f20fece0a392103ce4 | 203 | py | Python | exo2/test_file.py | natoutatou/seatech-poo-python-robotics | 711988b3a0affcf9aa1234e97a35654d563b18d1 | [
"MIT"
] | null | null | null | exo2/test_file.py | natoutatou/seatech-poo-python-robotics | 711988b3a0affcf9aa1234e97a35654d563b18d1 | [
"MIT"
] | null | null | null | exo2/test_file.py | natoutatou/seatech-poo-python-robotics | 711988b3a0affcf9aa1234e97a35654d563b18d1 | [
"MIT"
] | null | null | null | from exo2_starter_template import Cyborg
from robot import Robot
from human import Human
h = Human('femme')
h.eat(['banane', 'chocolaaat', 'petit ecolier'])
print(h.estomac)
h.eat(['pizza', 'pizza']) | 22.555556 | 48 | 0.724138 | 30 | 203 | 4.833333 | 0.6 | 0.055172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005618 | 0.123153 | 203 | 9 | 49 | 22.555556 | 0.808989 | 0 | 0 | 0 | 0 | 0 | 0.215686 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 0.428571 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
ff497d7c839a46ddc9d1b2e37453ea4aee8683e0 | 949 | py | Python | tests/b901.py | admdev8/flake8-bugbear | 55533c95c6f30d8f88db847fb3f01d7ddb57b280 | [
"MIT"
] | 1 | 2020-09-04T17:13:23.000Z | 2020-09-04T17:13:23.000Z | tests/b901.py | admdev8/flake8-bugbear | 55533c95c6f30d8f88db847fb3f01d7ddb57b280 | [
"MIT"
] | 4 | 2020-09-04T17:13:36.000Z | 2020-09-04T17:32:57.000Z | tests/b901.py | admdev8/flake8-bugbear | 55533c95c6f30d8f88db847fb3f01d7ddb57b280 | [
"MIT"
] | null | null | null | """
Should emit:
B901 - on lines 9, 36
"""
def broken():
if True:
return [1, 2, 3]
yield 3
yield 2
yield 1
def not_broken():
if True:
return
yield 3
yield 2
yield 1
def not_broken2():
return not_broken()
def not_broken3():
return
yield from not_broken()
def broken2():
return [3, 2, 1]
yield from not_broken()
async def not_broken4():
import asyncio
await asyncio.sleep(1)
return 1
def actually_not_broken():
yield 2
return 1 # noqa
def not_broken5():
def inner():
return 2
yield inner()
def not_broken6():
return (yield from [])
def not_broken7():
x = yield from []
return x
def not_broken8():
x = None
def inner(ex):
nonlocal x
x = ex
inner((yield from []))
return x
class NotBroken9(object):
def __await__(self):
yield from function()
return 42
| 11.297619 | 29 | 0.563751 | 129 | 949 | 4.015504 | 0.333333 | 0.092664 | 0.046332 | 0.069498 | 0.092664 | 0.092664 | 0.092664 | 0.092664 | 0 | 0 | 0 | 0.05414 | 0.338251 | 949 | 83 | 30 | 11.433735 | 0.770701 | 0.04215 | 0 | 0.361702 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.276596 | false | 0 | 0.021277 | 0.06383 | 0.574468 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
ff5292a4cf5d4e050555109631f6aa34f2ae5737 | 866 | py | Python | WEEKS/CD_Sata-Structures/_MISC/algorithms/map/is_anagram.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | null | null | null | WEEKS/CD_Sata-Structures/_MISC/algorithms/map/is_anagram.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | null | null | null | WEEKS/CD_Sata-Structures/_MISC/algorithms/map/is_anagram.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | null | null | null | ##-------------------------------------------------------------------
"""
Given two strings s and t , write a function to determine if t is an anagram of s.
Example 1:
Input: s = "anagram", t = "nagaram"
Output: true
Example 2:
Input: s = "rat", t = "car"
Output: false
Note:
You may assume the string contains only lowercase alphabets.
Reference: https://leetcode.com/problems/valid-anagram/description/
##-------------------------------------------------------------------
"""
##-------------------------------------------------------------------
"""
def is_anagram(s, t):
"""
:type s: str
##-------------------------------------------------------------------
"""
:rtype: bool
"""
maps = {}
mapt = {}
for i in s:
maps[i] = maps.get(i, 0) + 1
for i in t:
mapt[i] = mapt.get(i, 0) + 1
return maps == mapt
| 24.055556 | 82 | 0.394919 | 91 | 866 | 3.747253 | 0.615385 | 0.035191 | 0.035191 | 0.035191 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008523 | 0.187067 | 866 | 35 | 83 | 24.742857 | 0.475852 | 0.232102 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ff5ff5b92e277867f0dafddc28fdde3023cdf3a1 | 3,486 | py | Python | python/src/shipping_allocation/envs/inventory_generators.py | jotaporras/ts_mcfrl | c8c77a8fbd58e80e926e6705320ca8bc1979efdd | [
"MIT"
] | null | null | null | python/src/shipping_allocation/envs/inventory_generators.py | jotaporras/ts_mcfrl | c8c77a8fbd58e80e926e6705320ca8bc1979efdd | [
"MIT"
] | 5 | 2020-09-26T01:26:21.000Z | 2022-02-10T02:45:51.000Z | python/src/shipping_allocation/envs/inventory_generators.py | jotaporras/ts_mcfrl | c8c77a8fbd58e80e926e6705320ca8bc1979efdd | [
"MIT"
] | null | null | null | from abc import ABC
from typing import List
import numpy as np
from network import physical_network
from experiment_utils.Order import Order
class InventoryGenerator(ABC):
# Generates new inventory and distributes it somehow to keep the network balanced for the selected locations.
# Returns a numpy array of shape (num_dcs,num_commodities) representing how much extra inventory is going to appear.
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
): # todo add type when it works.
pass
class NaiveInventoryGenerator(InventoryGenerator):
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
):
# logging.info("==> inventory generator")
total_inventory = sum(
map(lambda o: o.demand, open_orders)
) # TODO rename and do for many commmodities.
even = total_inventory // network.num_dcs
dc_inv = np.array([even] * network.num_dcs).reshape(
network.num_dcs, -1
) # To keep the (dc,product) shape. #todo validate with multiple commodities
# logging.info("Demand", total_inventory)
# logging.info("Pre level dc_inv")
# logging.info(dc_inv)
# logging.info("Total new inv",np.sum(dc_inv))
imbalance = total_inventory - np.sum(dc_inv, axis=0)
# if total_inventory // network.num_dcs != total_inventory / network.num_dcs:
dc_inv[0, :] = dc_inv[0, :] + imbalance
# logging.info("Rebalanced dc inv",dc_inv)
# logging.info("Rebalanced sum",np.sum(dc_inv))
if (np.sum(dc_inv, axis=0) != total_inventory).any():
raise Exception("np.sum(dc_inv) != total_inventory")
return dc_inv
class DirichletInventoryGenerator(InventoryGenerator):
def __init__(self, network: physical_network):
num_dcs = network.num_dcs
num_commodities = network.num_commodities
self.alpha = np.random.permutation(
num_dcs / np.arange(1, num_dcs + 1)
) # trying to make it skewed.
self.inventory_generation_distribution = np.random.dirichlet(
self.alpha, num_commodities
) # (num_dc,num_k) of dc distribution of inventory.
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
):
# logging.info("==> inventory generator")
total_inventory = sum(
map(lambda o: o.demand, open_orders)
) # TODO rename and do for many commmodities.
# even = total_inventory // network.num_dcs
inventory_distribution = self.inventory_generation_distribution
supply_per_dc = np.floor(
total_inventory.reshape(-1, 1) * inventory_distribution
)
imbalance = total_inventory - np.sum(supply_per_dc, axis=1)
supply_per_dc[:, 0] = supply_per_dc[:, 0] + imbalance
# logging.info("Demand", total_inventory)
# logging.info("Pre level dc_inv")
# logging.info(dc_inv)
# logging.info("Total new inv",np.sum(dc_inv))
# if total_inventory // network.num_dcs != total_inventory / network.num_dcs:
# logging.info("Rebalanced dc inv",dc_inv)
# logging.info("Rebalanced sum",np.sum(dc_inv))
if not np.isclose(np.sum(np.sum(supply_per_dc, axis=1) - total_inventory), 0.0):
raise RuntimeError("Demand was not correctly balanced")
return supply_per_dc.transpose()
| 42.512195 | 120 | 0.662651 | 445 | 3,486 | 4.986517 | 0.249438 | 0.042812 | 0.058585 | 0.031546 | 0.477693 | 0.456963 | 0.443443 | 0.420009 | 0.420009 | 0.420009 | 0 | 0.00565 | 0.238382 | 3,486 | 81 | 121 | 43.037037 | 0.830132 | 0.346242 | 0 | 0.254902 | 1 | 0 | 0.029307 | 0 | 0 | 0 | 0 | 0.012346 | 0 | 1 | 0.078431 | false | 0.019608 | 0.098039 | 0 | 0.27451 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ff9c1018217422bd2104e55c553d20f3cfdbfde1 | 371 | py | Python | cdn_static_website/settings/components/caches.py | soulraven/cdn_small | e2415b39089cb761ddfe1748e443abd9c4080e83 | [
"MIT"
] | null | null | null | cdn_static_website/settings/components/caches.py | soulraven/cdn_small | e2415b39089cb761ddfe1748e443abd9c4080e83 | [
"MIT"
] | null | null | null | cdn_static_website/settings/components/caches.py | soulraven/cdn_small | e2415b39089cb761ddfe1748e443abd9c4080e83 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Caching
# https://docs.djangoproject.com/en/3.2/topics/cache/
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
},
'staticfiles': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
| 23.1875 | 67 | 0.595687 | 36 | 371 | 6.138889 | 0.694444 | 0.117647 | 0.153846 | 0.199095 | 0.633484 | 0.633484 | 0.633484 | 0.633484 | 0.633484 | 0.633484 | 0 | 0.010239 | 0.210243 | 371 | 15 | 68 | 24.733333 | 0.744027 | 0.218329 | 0 | 0.4 | 0 | 0 | 0.596491 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4409dd247c3663721017d03477767acda24e995b | 459 | py | Python | release.py | Flexlolo/spelunky-diffscale | f66e9d6e07ae98f10017406520b852540ebc7fca | [
"MIT"
] | null | null | null | release.py | Flexlolo/spelunky-diffscale | f66e9d6e07ae98f10017406520b852540ebc7fca | [
"MIT"
] | null | null | null | release.py | Flexlolo/spelunky-diffscale | f66e9d6e07ae98f10017406520b852540ebc7fca | [
"MIT"
] | null | null | null | import os
import tarfile
from diffscale import generate, scale_dict
basedir = os.path.dirname(os.path.abspath(__file__))
release = os.path.join(basedir, 'release')
os.makedirs(release, exist_ok=True)
os.chdir(release)
for i in (2, 3, 4, 5, 8, 10, 16):
scale = scale_dict(i)
target = generate(scale, f'release/x{i}')
try:
tar = tarfile.open(target + '.tar.xz', 'x:xz')
tar.add(target.rsplit('/', 1)[1])
tar.close()
except FileExistsError:
pass | 21.857143 | 52 | 0.690632 | 74 | 459 | 4.189189 | 0.594595 | 0.058065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027919 | 0.141612 | 459 | 21 | 53 | 21.857143 | 0.758883 | 0 | 0 | 0 | 1 | 0 | 0.067391 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.0625 | 0.1875 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
44251827d345f8dbc8087275d48a4f3983a579e2 | 760 | py | Python | nhgis_to_postal.py | dcvanriper/census_dicts | c0ff87a9877cedeb79794de232dabd1140cc8ea3 | [
"MIT"
] | null | null | null | nhgis_to_postal.py | dcvanriper/census_dicts | c0ff87a9877cedeb79794de232dabd1140cc8ea3 | [
"MIT"
] | null | null | null | nhgis_to_postal.py | dcvanriper/census_dicts | c0ff87a9877cedeb79794de232dabd1140cc8ea3 | [
"MIT"
] | null | null | null | #Three character NHGIS codes to postal abbreviations
state_codes = {
'530':'WA',
'100':'DE',
'110':'DC',
'550':'WI',
'540':'WV',
'150':'HI',
'120':'FL',
'560':'WY',
'720':'PR',
'340':'NJ',
'350':'NM',
'480':'TX',
'220':'LA',
'370':'NC',
'380':'ND',
'310':'NE',
'470':'TN',
'360':'NY',
'420':'PA',
'020':'AK',
'320':'NV',
'330':'NH',
'510':'VA',
'080':'CO',
'060':'CA',
'010':'AL',
'050':'AR',
'500':'VT',
'170':'IL',
'130':'GA',
'180':'IN',
'190':'IA',
'250':'MA',
'040':'AZ',
'160':'ID',
'090':'CT',
'230':'ME',
'240':'MD',
'400':'OK',
'390':'OH',
'490':'UT',
'290':'MO',
'270':'MN',
'260':'MI',
'440':'RI',
'200':'KS',
'300':'MT',
'280':'MS',
'450':'SC',
'210':'KY',
'410':'OR',
'460':'SD',
'720':'PR'
}
| 13.333333 | 52 | 0.422368 | 115 | 760 | 2.782609 | 0.973913 | 0.03125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.247664 | 0.155263 | 760 | 56 | 53 | 13.571429 | 0.250779 | 0.067105 | 0 | 0 | 0 | 0 | 0.374294 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4427b4eb4260da30921c2e45029b52a169e0c1f3 | 319 | py | Python | oi_ghostwriter/management/commands/delete_printouts.py | olimpiada/oi-bus | c61805e6f51e933ceb9efdf556f6d5f229610707 | [
"MIT"
] | 2 | 2020-06-05T07:49:55.000Z | 2020-06-05T07:55:00.000Z | oi_ghostwriter/management/commands/delete_printouts.py | olimpiada/oi-bus | c61805e6f51e933ceb9efdf556f6d5f229610707 | [
"MIT"
] | null | null | null | oi_ghostwriter/management/commands/delete_printouts.py | olimpiada/oi-bus | c61805e6f51e933ceb9efdf556f6d5f229610707 | [
"MIT"
] | null | null | null | from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
from oi_ghostwriter.models import Backup
import json
class Command(BaseCommand):
help = 'Deletes all computers from database'
def handle(self, *args, **options):
Backup.objects.all().delete()
| 29 | 65 | 0.76489 | 40 | 319 | 6.075 | 0.75 | 0.082305 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147335 | 319 | 10 | 66 | 31.9 | 0.893382 | 0 | 0 | 0 | 0 | 0 | 0.109718 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.5 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
443127ababa992a1158ecc86780f973e2ea79292 | 6,897 | py | Python | tests/generation/vres/potentials/glaes/test_manager.py | montefesp/EPIPPy | 7de873cf70d06986e83a434b6ab4b8997694a269 | [
"MIT"
] | null | null | null | tests/generation/vres/potentials/glaes/test_manager.py | montefesp/EPIPPy | 7de873cf70d06986e83a434b6ab4b8997694a269 | [
"MIT"
] | null | null | null | tests/generation/vres/potentials/glaes/test_manager.py | montefesp/EPIPPy | 7de873cf70d06986e83a434b6ab4b8997694a269 | [
"MIT"
] | null | null | null | import pytest
from epippy.generation.vres.potentials.glaes import *
# All these tests were run with a pixelRes set to 1000
def check_correctness(expected, actual):
assert np.abs(actual - expected) / expected < 0.05
def test_get_glaes_prior_defaults_empty_config_list():
with pytest.raises(AssertionError):
get_glaes_prior_defaults([])
def test_get_glaes_prior_defaults_wrong_exclusion_file():
with pytest.raises(AssertionError):
get_glaes_prior_defaults(["wrong"])
def test_get_glaes_prior_defaults_wrong_subconfig():
with pytest.raises(AssertionError):
get_glaes_prior_defaults(["holtinger", "wrong"])
def test_get_glaes_prior_defaults_absent_prior():
with pytest.raises(AssertionError):
get_glaes_prior_defaults(["holtinger", "wind_onshore", "min"], ["wrong"])
def test_get_glaes_prior_defaults_all_priors():
dct = get_glaes_prior_defaults(["holtinger", "wind_onshore", "min"])
assert len(dct.keys()) == 18
def test_get_glaes_prior_defaults():
priors = ["airport_proximity", "river_proximity"]
dct = get_glaes_prior_defaults(["holtinger", "wind_onshore", "min"], priors)
assert len(dct.keys()) == len(priors)
assert all([p in dct for p in priors])
def test_compute_land_availability_missing_globals():
onshore_shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"]
with pytest.raises(NameError):
compute_land_availability(onshore_shape)
def test_compute_land_availability_empty_filters():
onshore_shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"]
init_land_availability_globals({})
availability = compute_land_availability(onshore_shape)
check_correctness(30683.0, availability)
offshore_shape = get_shapes(["BE"], "offshore").loc["BE", "geometry"]
init_land_availability_globals({})
availability = compute_land_availability(offshore_shape)
check_correctness(3454.0, availability)
def test_compute_land_availability_esm():
onshore_shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"]
filters = {'esm': True}
init_land_availability_globals(filters)
availability = compute_land_availability(onshore_shape)
check_correctness(11542.83, availability)
def test_compute_land_availability_glaes_priors():
onshore_shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"]
filters = {'glaes_priors': {'settlement_proximity': (None, 1000)}}
init_land_availability_globals(filters)
availability = compute_land_availability(onshore_shape)
check_correctness(6122.68, availability)
offshore_shape = get_shapes(["BE"], "offshore").loc["BE", "geometry"]
filters = {'glaes_priors': {'shore_proximity': [(None, 20e3), (370e3, None)]}}
init_land_availability_globals(filters)
availability = compute_land_availability(offshore_shape)
check_correctness(2125.0, availability)
def test_compute_land_availability_natura():
onshore_shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"]
filters = {'natura': 1}
init_land_availability_globals(filters)
availability = compute_land_availability(onshore_shape)
check_correctness(26821.79, availability)
offshore_shape = get_shapes(["BE"], "offshore").loc["BE", "geometry"]
filters = {'natura': 1}
init_land_availability_globals(filters)
availability = compute_land_availability(offshore_shape)
check_correctness(2197.84, availability)
def test_compute_land_availability_gebco():
onshore_shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"]
filters = {'altitude_threshold': 300}
init_land_availability_globals(filters)
availability = compute_land_availability(onshore_shape)
check_correctness(24715.12, availability)
offshore_shape = get_shapes(["BE"], "offshore").loc["BE", "geometry"]
filters = {'depth_thresholds': {'low': -50, 'high': -10}}
init_land_availability_globals(filters)
availability = compute_land_availability(offshore_shape)
check_correctness(2828.41, availability)
def test_compute_land_availability_emodnet():
offshore_shape = get_shapes(["BE"], "offshore").loc["BE", "geometry"]
filters = {'cables': 500}
init_land_availability_globals(filters)
availability = compute_land_availability(offshore_shape)
check_correctness(3115.0, availability)
filters = {'pipelines': 500}
init_land_availability_globals(filters)
availability = compute_land_availability(offshore_shape)
check_correctness(3287.0, availability)
filters = {'shipping': (100, None)}
init_land_availability_globals(filters)
availability = compute_land_availability(offshore_shape)
check_correctness(1661.0, availability)
def test_get_land_availability_for_shapes_empty_list_of_shapes():
with pytest.raises(AssertionError):
get_land_availability_for_shapes([], {})
def test_get_land_availability_for_shapes_mp_vs_non_mp():
onshore_shapes = get_shapes(["BE", "NL"], "onshore")["geometry"]
filters = {'glaes_priors': {'settlement_proximity': (None, 1000)}}
availabilities_mp = get_land_availability_for_shapes(onshore_shapes, filters)
availabilities_non_mp = get_land_availability_for_shapes(onshore_shapes, filters, 1)
assert len(availabilities_mp) == 2
assert all([availabilities_mp[i] == availabilities_non_mp[i] for i in range(2)])
def test_get_capacity_potential_for_shapes():
onshore_shapes = get_shapes(["BE", "NL"], "onshore")["geometry"]
filters = {'glaes_priors': {'settlement_proximity': (None, 1000)}}
power_density = 10
capacities = get_capacity_potential_for_shapes(onshore_shapes, filters, power_density)
assert len(capacities) == 2
check_correctness(61.2268, capacities[0])
check_correctness(198.8756, capacities[1])
offshore_shapes = get_shapes(["BE", "NL"], "offshore")["geometry"]
filters = {'natura': 1}
power_density = 15
capacities = get_capacity_potential_for_shapes(offshore_shapes, filters, power_density)
assert len(capacities) == 2
check_correctness(32.9676, capacities[0])
check_correctness(715.9119, capacities[1])
def test_get_capacity_potential_per_country():
filters = {'glaes_priors': {'settlement_proximity': (None, 1000)}}
power_density = 10
capacities_ds = get_capacity_potential_per_country(["BE", "NL"], True, filters, power_density)
assert isinstance(capacities_ds, pd.Series)
assert len(capacities_ds) == 2
check_correctness(61.2268, capacities_ds["BE"])
check_correctness(198.8756, capacities_ds["NL"])
filters = {'natura': 1}
power_density = 15
capacities_ds = get_capacity_potential_per_country(["BE", "NL"], False, filters, power_density)
assert isinstance(capacities_ds, pd.Series)
assert len(capacities_ds) == 2
check_correctness(32.9676, capacities_ds["BE"])
check_correctness(715.9119, capacities_ds["NL"])
| 39.1875 | 99 | 0.741047 | 833 | 6,897 | 5.762305 | 0.17407 | 0.123333 | 0.095833 | 0.0525 | 0.825208 | 0.751042 | 0.691458 | 0.603125 | 0.559167 | 0.461458 | 0 | 0.032388 | 0.136001 | 6,897 | 175 | 100 | 39.411429 | 0.773116 | 0.00754 | 0 | 0.461538 | 0 | 0 | 0.096741 | 0 | 0 | 0 | 0 | 0 | 0.130769 | 1 | 0.138462 | false | 0 | 0.015385 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4445147b7da188f6330a5a30ccddf0a8c0007b74 | 11,186 | py | Python | tests/old/test_tifread.py | richardt94/landshark | e4f347857a750d050d2cd568c6bcbd8f4a6c1f7f | [
"Apache-2.0"
] | 10 | 2019-03-05T23:53:58.000Z | 2021-12-17T08:27:05.000Z | tests/old/test_tifread.py | richardt94/landshark | e4f347857a750d050d2cd568c6bcbd8f4a6c1f7f | [
"Apache-2.0"
] | 7 | 2019-03-05T05:39:02.000Z | 2020-02-03T01:10:40.000Z | tests/old/test_tifread.py | richardt94/landshark | e4f347857a750d050d2cd568c6bcbd8f4a6c1f7f | [
"Apache-2.0"
] | 8 | 2019-03-23T22:55:25.000Z | 2021-01-12T05:14:31.000Z | # """Tests for the tif reading importer module."""
# from collections import namedtuple
# import numpy as np
# import rasterio.transform
# import pytest
# from landshark.importers import tifread
# def test_match():
# """
# Checks that _match can pull out a property from a bunch of image-like
# objects when that property is the same for each (default behaviour).
# """
# Im = namedtuple('Im', ['prop'])
# name = 'myprop'
# true_answer = 1
# images = [Im(prop=true_answer) for k in range(10)]
# prop = tifread._match(lambda x: x.prop, images, name)
# assert prop == true_answer
# def test_match_nomatch(mocker):
# """
# Checks that _match correctly identifies a non-matching property and
# calls the right error functions.
# """
# Im = namedtuple('Im', ['prop'])
# name = 'myprop'
# images = [Im(prop=k) for k in range(10)]
# mocked_mismatch = mocker.patch('landshark.importers.tifread._fatal_mismatch')
# tifread._match(lambda x: x.prop, images, name)
# mocked_mismatch.assert_called_once_with(list(range(10)), images, name)
# def test_fatal_mismatch(mocker):
# """Checks fatal mismatch calls log.fatal with some sensible text."""
# mock_error = mocker.patch('landshark.importers.tifread.log.error')
# property_list = list(range(3))
# Im = namedtuple('Im', ['name'])
# images = [Im(name="n{}".format(i)) for i in range(3)]
# name = "myname"
# with pytest.raises(Exception):
# tifread._fatal_mismatch(property_list, images, name)
# true_answer = 'No match for myname:\nn0: 0\nn1: 1\nn2: 2'
# mock_error.assert_called_once_with(true_answer)
# def test_names():
# """Checks names are generated sanely for bands."""
# Im = namedtuple('Im', ['name', 'count'])
# im1 = Im(name="A", count=1)
# im2 = Im(name="B", count=2)
# bands = [tifread.Band(image=im1, index=1),
# tifread.Band(image=im2, index=1),
# tifread.Band(image=im2, index=2)]
# name = tifread._names(bands)
# true_answer = ["A", "B_1", "B_2"]
# assert name == true_answer
# def test_missing():
# """Checks missing correctly converts types of nodatavals."""
# Im = namedtuple('Im', ['nodatavals', 'count'])
# im1 = Im(count=1, nodatavals=[1.0])
# im2 = Im(count=2, nodatavals=[2.0, 3.0])
# bands = [tifread.Band(image=im1, index=1),
# tifread.Band(image=im2, index=1),
# tifread.Band(image=im2, index=2)]
# res = tifread._missing(bands, np.int32)
# true_answer = [1, 2, 3]
# assert res == true_answer
# im3 = Im(count=2, nodatavals=[1.0, None])
# bands[0] = tifread.Band(image=im3, index=2)
# res2 = tifread._missing(bands, np.int32)
# true_answer2 = [None, 2, 3]
# assert res2 == true_answer2
# def test_bands():
# """Checks that bands are correctly listed from images."""
# Im = namedtuple('Im', ['dtypes'])
# im1 = Im(dtypes=[np.float32, np.float32, np.float32])
# im2 = Im(dtypes=[np.int32, np.int32])
# true_band = [
# tifread.Band(image=im1, index=1),
# tifread.Band(image=im1, index=2),
# tifread.Band(image=im1, index=3),
# tifread.Band(image=im2, index=1),
# tifread.Band(image=im2, index=2)
# ]
# res = tifread._bands([im1, im2])
# assert res == true_band
# def test_blockrows():
# """Checks blocksize does something sane."""
# Im = namedtuple('Im', ['block_shapes'])
# im1 = Im(block_shapes=[(1, 10), (2, 100)])
# im2 = Im(block_shapes=[(3, 30)])
# bands = [tifread.Band(image=im1, index=1),
# tifread.Band(image=im1, index=2),
# tifread.Band(image=im2, index=1)]
# blocksize = tifread._block_rows(bands)
# assert blocksize == 3
# def test_windows():
# """Checks window list covers whole image."""
# w_list = tifread._windows(1024, 768, 10)
# assert np.all([k[1] == (0, 1024) for k in w_list])
# assert np.all([k[0][1] - k[0][0] == 10 for k in w_list[:-1]])
# assert w_list[-1][0][0] < 768
# assert w_list[-1][0][1] == 768
# w_list = tifread._windows(1024, 450, 5)
# assert np.all([k[1] == (0, 1024) for k in w_list])
# assert np.all([k[0][1] - k[0][0] == 5 for k in w_list])
# assert w_list[-1][0][0] == 445
# assert w_list[-1][0][1] == 450
# def test_read(mocker):
# """Checks that read calls the right image functions in the right order."""
# a1 = [np.random.rand(10, 25) * 100,
# np.random.rand(10, 25) * 50,
# np.random.rand(10, 25) * 10]
# a2 = [np.random.rand(10, 25) * 100,
# np.random.rand(10, 25) * 50,
# np.random.rand(10, 25) * 10]
# answers = [np.concatenate((i1[..., np.newaxis],
# i2[..., np.newaxis]), axis=-1).astype(np.int32)
# for i1, i2 in zip(a1, a2)]
# im = mocker.Mock()
# im.read = mocker.Mock(side_effect=a1)
# im2 = mocker.Mock()
# im2.read = mocker.Mock(side_effect=a2)
# bands = [tifread.Band(image=im, index=1), tifread.Band(image=im2, index=2)]
# windows = [((0, 10), (0, 25)), ((10, 20), (0, 25)), ((20, 30), (0, 25))]
# it = tifread._read(bands, windows, dtype=np.int32)
# for res, ans in zip(it, answers):
# assert np.all(res == ans)
# assert im.read.call_count == 3
# assert im2.read.call_count == 3
# for im_calls, im2_calls, w in zip(im.read.call_args_list,
# im2.read.call_args_list,
# windows):
# assert im_calls[0][0] == 1
# assert im2_calls[0][0] == 2
# assert im_calls[1] == {'window': w}
# assert im2_calls[1] == {'window': w}
# @pytest.mark.parametrize("block_rows", [None, 3])
# def test_imagestack(mocker, block_rows):
# """Constructs and image stack ensuring it calls all the right fns."""
# call = mocker.mock_module.call
# m_open = mocker.patch('landshark.importers.tifread.rasterio.open')
# m_open.return_value = [mocker.Mock(), mocker.Mock()]
# width = 10
# height = 20
# affine = rasterio.transform.IDENTITY
# m_match = mocker.patch('landshark.importers.tifread._match')
# m_match.side_effect = [width, height, affine]
# m_bands = mocker.patch('landshark.importers.tifread._bands')
# # m_bands.return_value = tifread.BandCollection(ordinal=[mocker.Mock()],
# # categorical=[mocker.Mock()])
# m_bands.return_value = [mocker.Mock()]
# m_names = mocker.patch('landshark.importers.tifread._names')
# m_names.return_value = mocker.Mock()
# m_block_rows = mocker.patch('landshark.importers.tifread._block_rows')
# m_block_rows.return_value = 2
# m_missing = mocker.patch('landshark.importers.tifread._missing')
# m_missing.return_value = mocker.Mock()
# m_windows = mocker.patch('landshark.importers.tifread._windows')
# m_windows.return_value = mocker.Mock()
# m_pixels = mocker.patch('landshark.importers.tifread.pixel_coordinates')
# m_pixels.return_value = (np.zeros((10, 2)), np.zeros((10, 2)))
# ord_paths = ['my/ord/path', 'my/other/ord/path']
# cat_paths = ['my/cat/path', 'my/other/cat/path']
# stack = tifread.ImageStack(ord_paths, cat_paths, block_rows)
# m_open_calls = [call(ord_paths[0], 'r'), call(ord_paths[1], 'r'),
# call(cat_paths[0], 'r'), call(cat_paths[1], 'r')]
# m_open.assert_has_calls(m_open_calls, any_order=False)
# assert stack.width == width
# assert stack.height == height
# assert stack.affine == affine
# assert stack.ordinal_bands == m_bands.return_value
# assert stack.categorical_bands == m_bands.return_value
# assert stack.ordinal_names == m_names.return_value
# assert stack.categorical_names == m_names.return_value
# assert stack.ordinal_dtype == np.float32
# assert stack.categorical_dtype == np.int32
# assert stack.windows == m_windows.return_value
# assert stack.block_rows == (block_rows if block_rows
# else m_block_rows.return_value)
# m_missing_calls = [
# call(m_bands.return_value, dtype=stack.ordinal_dtype),
# call(m_bands.return_value, dtype=stack.categorical_dtype)
# ]
# m_missing.assert_has_calls(m_missing_calls, any_order=True)
# m_read = mocker.patch('landshark.importers.tifread._read')
# stack.categorical_blocks()
# m_read.assert_called_with(stack.categorical_bands,
# stack.windows,
# stack.categorical_dtype)
# stack.ordinal_blocks()
# m_read.assert_called_with(stack.ordinal_bands,
# stack.windows,
# stack.ordinal_dtype)
# class FakeImage:
# def __init__(self, name, width, height, affine, dtypes, block_rows):
# self.name = name
# self.width = width
# self.affine = affine
# self.height = height
# self.dtypes = dtypes
# self.count = len(self.dtypes)
# self.nodatavals = [-1.0 for i in range(self.count)]
# self.block_shapes = [(block_rows, width) for w in range(self.count)]
# def test_imagestack_real(mocker):
# affine = rasterio.transform.IDENTITY
# im1 = FakeImage(name='im1', width=10, height=5, affine=affine,
# dtypes=[np.dtype('uint8'), np.dtype('int32')],
# block_rows=2)
# im2 = FakeImage(name='im2', width=10, height=5, affine=affine,
# dtypes=[np.dtype('float32'), np.dtype('float64')],
# block_rows=3)
# m_open = mocker.patch('landshark.importers.tifread.rasterio.open')
# m_open.side_effect = iter([im1, im2])
# cat_paths = ['path1']
# ord_paths = ['path2']
# stack = tifread.ImageStack(cat_paths, ord_paths)
# cat_bands = [tifread.Band(image=im1, index=1),
# tifread.Band(image=im1, index=2)]
# ord_bands = [tifread.Band(image=im2, index=1),
# tifread.Band(image=im2, index=2)]
# assert stack.affine == affine
# assert stack.width == 10
# assert stack.height == 5
# assert stack.block_rows == 3
# assert stack.categorical_bands == cat_bands
# assert stack.ordinal_bands == ord_bands
# assert stack.categorical_dtype == np.int32
# assert stack.ordinal_dtype == np.float32
# assert stack.categorical_missing == [-1, -1]
# assert stack.ordinal_missing == [-1., -1.]
# assert stack.categorical_names == ['im1_1', 'im1_2']
# assert stack.ordinal_names == ['im2_1', 'im2_2']
# assert stack.windows == [((0, 3), (0, 10)), ((3, 5), (0, 10))]
# assert np.all(stack.coordinates_x == np.arange(10 + 1, dtype=float))
# assert np.all(stack.coordinates_y == np.arange(5 + 1, dtype=float))
# def test_block_shape():
# """Checks the (simple) multiplication for total size."""
# width = 4
# height = 5
# nbands = 3
# w = ((1, 1 + height), (3, 3 + width))
# r = tifread._block_shape(w, nbands)
# assert r == (height, width, nbands)
| 39.111888 | 83 | 0.601645 | 1,494 | 11,186 | 4.350736 | 0.145917 | 0.040615 | 0.051692 | 0.053538 | 0.378154 | 0.251077 | 0.212154 | 0.174154 | 0.150154 | 0.121538 | 0 | 0.042171 | 0.238959 | 11,186 | 285 | 84 | 39.249123 | 0.721367 | 0.952977 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
44701f3ed1ab060c31e0e937302e5f452238be28 | 286 | py | Python | src/vendor/dhcpgeni3/plugin.py | radomirklacza/C-BAS | 5005cf43f57302dc0f58b9d1b9cf7e4e3ab70e32 | [
"BSD-3-Clause"
] | null | null | null | src/vendor/dhcpgeni3/plugin.py | radomirklacza/C-BAS | 5005cf43f57302dc0f58b9d1b9cf7e4e3ab70e32 | [
"BSD-3-Clause"
] | null | null | null | src/vendor/dhcpgeni3/plugin.py | radomirklacza/C-BAS | 5005cf43f57302dc0f58b9d1b9cf7e4e3ab70e32 | [
"BSD-3-Clause"
] | 2 | 2017-08-07T15:24:05.000Z | 2018-10-11T10:53:23.000Z | import eisoil.core.pluginmanager as pm
from dhcpgenithreedelegate import DHCPGENI3Delegate
def setup():
# setup config keys
# config = pm.getService("config")
delegate = DHCPGENI3Delegate()
handler = pm.getService('geniv3handler')
handler.setDelegate(delegate) | 28.6 | 51 | 0.737762 | 28 | 286 | 7.535714 | 0.642857 | 0.113744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012712 | 0.174825 | 286 | 10 | 52 | 28.6 | 0.881356 | 0.174825 | 0 | 0 | 0 | 0 | 0.055556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
44703fdce6aac0f2688aaf5f9ec6e514f85d4eba | 394 | py | Python | seo/proxies.py | ceb10n/seo | 49737fdfc545aeafa918900a5defec11c1d6797c | [
"MIT"
] | 1 | 2021-06-19T09:26:03.000Z | 2021-06-19T09:26:03.000Z | seo/proxies.py | ceb10n/seo | 49737fdfc545aeafa918900a5defec11c1d6797c | [
"MIT"
] | null | null | null | seo/proxies.py | ceb10n/seo | 49737fdfc545aeafa918900a5defec11c1d6797c | [
"MIT"
] | 1 | 2020-04-13T17:37:29.000Z | 2020-04-13T17:37:29.000Z | import random
import itertools
class Proxy:
def __init__(self, proxies):
self.proxies = proxies
self.proxy_count = len(self.proxies)
self._rotating_proxy = itertools.cycle(proxies)
def get_random(self):
index = random.randint(0, self.proxy_count)
return self.proxies[index]
def get_next(self):
return next(self._rotating_proxy)
| 20.736842 | 55 | 0.667513 | 49 | 394 | 5.122449 | 0.387755 | 0.175299 | 0.119522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003356 | 0.243655 | 394 | 18 | 56 | 21.888889 | 0.838926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.166667 | 0.083333 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
4475590c1b70563bd14935c95f52893c912778f8 | 1,550 | py | Python | tests/repo/test_models.py | trewjames/receipt-tracker | 1f1768741f8599252f9bb153f111b245bdc995d4 | [
"MIT"
] | null | null | null | tests/repo/test_models.py | trewjames/receipt-tracker | 1f1768741f8599252f9bb153f111b245bdc995d4 | [
"MIT"
] | null | null | null | tests/repo/test_models.py | trewjames/receipt-tracker | 1f1768741f8599252f9bb153f111b245bdc995d4 | [
"MIT"
] | null | null | null | from receipt_tracker.repo.models import Seller, Buyer, Receipt
def test_table_initial_inserts(db_session):
query_buyer = db_session.query(Buyer).all()
query_seller = db_session.query(Seller).all()
query_receipt = db_session.query(Receipt).all()
assert str(query_buyer) == '[Buyer(1, James Trew), Buyer(2, Eugene Min), Buyer(3, Anna Trew)]'
assert str(query_seller) == '[Seller(1, Steam), Seller(2, No Frills), Seller(3, Amazon), Seller(4, Always Clean Coin Laundry), Seller(5, Eagle Dynamics)]'
assert str(query_receipt) == '[Receipt(1, James Trew, Steam, 2020-08-16, 9.67, Steam game), Receipt(2, James Trew, No Frills, 2020-08-17, 17.86, Groceries), Receipt(3, Eugene Min, Amazon, 2020-08-18, 57.36, Random amazon purchases), Receipt(4, Eugene Min, Always Clean Coin Laundry, 2020-08-19, 2.5, None)]'
def test_table_relationship(db_session):
james = db_session.query(Buyer).filter(Buyer.id == 1).one()
steam = db_session.query(Seller).filter(Seller.id == 1).one()
assert james.name == 'James Trew'
assert str(james.purchases) == '[Receipt(1, James Trew, Steam, 2020-08-16, 9.67, Steam game), Receipt(2, James Trew, No Frills, 2020-08-17, 17.86, Groceries)]'
assert steam.name == 'Steam'
assert str(steam.sales) == '[Receipt(1, James Trew, Steam, 2020-08-16, 9.67, Steam game)]'
def test_table_column_names():
test_data = Buyer.__table__.columns.keys()
assert test_data == ['id', 'name']
def test_table_name():
test_name = Buyer.__tablename__
assert test_name == 'buyers'
| 44.285714 | 311 | 0.695484 | 239 | 1,550 | 4.359833 | 0.284519 | 0.060461 | 0.080614 | 0.054703 | 0.209213 | 0.209213 | 0.209213 | 0.209213 | 0.209213 | 0.209213 | 0 | 0.072838 | 0.149677 | 1,550 | 34 | 312 | 45.588235 | 0.717754 | 0 | 0 | 0 | 0 | 0.238095 | 0.438065 | 0 | 0 | 0 | 0 | 0 | 0.428571 | 1 | 0.190476 | false | 0 | 0.047619 | 0 | 0.238095 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
44758aff9616b14583078b1405c0e1e11d71f51f | 1,892 | py | Python | mach_cad/model_obj/dimensions/dim_base.py | Severson-Group/MachEval | dbb7999188133f8744636da53cab475ae538ce80 | [
"BSD-3-Clause"
] | 6 | 2021-11-02T20:12:32.000Z | 2021-11-13T10:50:35.000Z | mach_cad/model_obj/dimensions/dim_base.py | Severson-Group/MachEval | dbb7999188133f8744636da53cab475ae538ce80 | [
"BSD-3-Clause"
] | 18 | 2021-11-29T20:14:55.000Z | 2022-03-02T07:17:37.000Z | mach_cad/model_obj/dimensions/dim_base.py | Severson-Group/MachEval | dbb7999188133f8744636da53cab475ae538ce80 | [
"BSD-3-Clause"
] | 1 | 2022-01-29T00:52:38.000Z | 2022-01-29T00:52:38.000Z | from abc import abstractmethod, ABC
class DimBase(float, ABC):
@abstractmethod
def _conversion_factor(self):
pass
def __new__(cls, value):
return float.__new__(cls, value)
def __add__(self, other):
add = self._to_dimensionless() + other._to_dimensionless()
return type(self)._from_dimensionless(type(self), add)
def __sub__(self, other):
sub = self._to_dimensionless() - other._to_dimensionless()
return type(self)._from_dimensionless(type(self), sub)
def __mul__(self, other):
if isinstance(self, DimBase) and isinstance(other, DimBase):
raise Exception('Multiplication Not valid')
if isinstance(self, DimBase):
mul = other * (self._to_dimensionless())
return type(self)._from_dimensionless(type(self), mul)
def __rmul__(self, other):
if isinstance(self, DimBase) and isinstance(other, DimBase):
raise Exception('Multiplication Not valid')
if isinstance(self, DimBase):
mul = other * (self._to_dimensionless())
type(self)._from_dimensionless(type(self), mul)
def __truediv__(self, other):
if isinstance(other, DimBase):
div = self._to_dimensionless() / other._to_dimensionless()
return div
else:
div = (self._to_dimensionless()) / (other)
return type(self)._from_dimensionless(type(self), div)
def __rtruediv__(self, other):
raise Exception('Division not valid')
def _to_dimensionless(self):
return float(self) * self._conversion_factor
def _from_dimensionless(cls, value):
x = value / cls._conversion_factor
return cls(x)
def __neg__(self):
return self * -1
def __pos__(self):
if self < 0:
return self * -1
else:
return self
| 29.5625 | 70 | 0.623679 | 210 | 1,892 | 5.247619 | 0.2 | 0.136116 | 0.103448 | 0.11343 | 0.580762 | 0.553539 | 0.553539 | 0.477314 | 0.436479 | 0.401089 | 0 | 0.00218 | 0.272727 | 1,892 | 63 | 71 | 30.031746 | 0.798692 | 0 | 0 | 0.26087 | 0 | 0 | 0.034902 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.26087 | false | 0.021739 | 0.021739 | 0.065217 | 0.543478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
447baaff28e1f04802fc5c18c46accd09170234c | 579 | py | Python | molo/commenting/wagtail_hooks.py | praekelt/molo.commenting | f48010f575558aabd9deb5f2281df2a61f512a10 | [
"BSD-2-Clause"
] | null | null | null | molo/commenting/wagtail_hooks.py | praekelt/molo.commenting | f48010f575558aabd9deb5f2281df2a61f512a10 | [
"BSD-2-Clause"
] | 68 | 2015-08-07T08:52:55.000Z | 2018-11-15T09:55:49.000Z | molo/commenting/wagtail_hooks.py | praekeltfoundation/molo.commenting | f48010f575558aabd9deb5f2281df2a61f512a10 | [
"BSD-2-Clause"
] | null | null | null | from django.conf.urls import re_path
from molo.commenting.admin import CommentingModelAdminGroup
from molo.commenting.admin_views import MoloCommentsAdminReplyView
from wagtail.core import hooks
from wagtail.contrib.modeladmin.options import modeladmin_register
@hooks.register('register_admin_urls')
def register_molo_comments_admin_reply_url():
return [
re_path(
r'comment/(?P<parent>\d+)/reply/$',
MoloCommentsAdminReplyView.as_view(),
name='molo-comments-admin-reply'),
]
modeladmin_register(CommentingModelAdminGroup)
| 30.473684 | 66 | 0.763385 | 65 | 579 | 6.6 | 0.507692 | 0.027972 | 0.083916 | 0.107226 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.150259 | 579 | 18 | 67 | 32.166667 | 0.871951 | 0 | 0 | 0 | 0 | 0 | 0.129534 | 0.096718 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | true | 0 | 0.357143 | 0.071429 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
44843e2d4064ac5f438dd898e802636ccdcfef97 | 3,913 | py | Python | 4P80_seminar.py | JoelGritter/LSTM-predict-note | 4e50fa7e87010c7731a0bc7a286e29a58ef99f78 | [
"MIT"
] | null | null | null | 4P80_seminar.py | JoelGritter/LSTM-predict-note | 4e50fa7e87010c7731a0bc7a286e29a58ef99f78 | [
"MIT"
] | null | null | null | 4P80_seminar.py | JoelGritter/LSTM-predict-note | 4e50fa7e87010c7731a0bc7a286e29a58ef99f78 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""4P80_Seminar
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1_LxG1cSVyaKEmU93ZLvF8SVfwt7X--Tj
Joel Gritter & Kindeep Singh Kargil
COSC 4P80 - Seminar Demo
March 29, 2021
"""
# Music output
!sudo apt-get install fluidsynth
!pip install midi2audio
!pip install mingus
from mingus.containers import Note, NoteContainer, Track
from mingus.midi.midi_file_out import write_NoteContainer, write_Track
from midi2audio import FluidSynth
fsy = FluidSynth()
# imports for data manipulation
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
# imports for machine learning
import keras
from keras.models import Sequential
from keras.layers import Dense, LSTM
# read in the notes, make an array with 0's, except for the current note
def read_and_format(input_filepath):
input_data = []
with open(input_filepath) as input_file:
for line in input_file:
values = line.split(",")
for value in values:
tmp = [0.0] * 88
v = int(value)
tmp[v-1] = 1.0
input_data.append(tmp)
return input_data
input_data = read_and_format("k330-allegro-moderato.csv")
# get the previous 20 notes, predict the next note
def generate_datasets(input_array, n_prev = 20):
temp_x = [input_array[i:i+n_prev] for i in range(len(input_array) - n_prev)]
temp_y = [input_array[i+n_prev] for i in range(len(input_array) - n_prev)]
return np.array(temp_x), np.array(temp_y)
x, y = generate_datasets(input_data)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.20, shuffle=True)
print(x_train.shape, y_train.shape)
print(x_test.shape, y_test.shape)
print(y_train[0])
# build the model itself
model = Sequential()
model.add(LSTM(30))
model.add(Dense(88, activation="softmax"))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# train the model
model.fit(x_train, y_train, batch_size=10, epochs=100, validation_split=0.05)
# test the model
model.evaluate(x_test, y_test)
# See incorrectly predicted
predictions = model.predict(x_test)
incorrect_indices = []
for (index, (prediction, target)) in enumerate(zip(predictions, y_test)):
pred = np.argmax(prediction)
tar = np.argmax(target)
if pred != tar:
incorrect_indices.append(index)
print(", ".join(map(str, incorrect_indices)))
# Predict song
test_in = x_test[0]
test_out = y_test[0]
# initial - provide inital 20 notes
# n - how many predicted notes to add (i.e. expand by this number)
def make_big_song(initial, n):
res =[ x for x in initial]
for _ in range(n):
next = model.predict(np.array([res[-20:],]))[0]
res.append(next)
return np.array(res)
test = make_big_song(test_in, 60)
print(test.shape)
# Expects n x 88
def vector_to_midi(arr, filename="nice.midi"):
track = Track()
for note_arr in arr:
note_num = int(np.argmax(note_arr))
note = Note()
note.from_int(note_num - 3)
track.add_notes(note)
write_Track(filename, track)
print("Done!")
vector_to_midi(test)
def predict_to_file(first_20_notes, expected, filename="nice"):
next = model.predict(np.array([first_20_notes]))
actual_next = np.array([expected])
next_file = filename + "_predicted_note"
actual_next_file = filename + "_actual_note"
orig_file = filename + "_first_20_notes"
vector_to_midi(next, next_file + ".midi")
vector_to_midi(actual_next, actual_next_file + ".midi")
vector_to_midi(first_20_notes, orig_file + ".midi")
# This conversion not seem to work
# fsy.midi_to_audio(next_file + ".midi", next_file + ".mp3")
# fsy.midi_to_audio(actual_next_file + ".midi", actual_next_file + ".mp3")
# fsy.midi_to_audio(orig_file + ".midi", orig_file + ".mp3")
predict_to_file(test_in, test_out)
inci = incorrect_indices[0]
predict_to_file(x_test[inci], y_test[inci], 'first_incorrect') | 28.355072 | 87 | 0.73013 | 618 | 3,913 | 4.399676 | 0.322006 | 0.023538 | 0.022067 | 0.01655 | 0.078705 | 0.061787 | 0.044134 | 0.025745 | 0.025745 | 0.025745 | 0 | 0.022892 | 0.151546 | 3,913 | 138 | 88 | 28.355072 | 0.796084 | 0.165091 | 0 | 0 | 1 | 0 | 0.053631 | 0.016322 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.1125 | null | null | 0.075 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
448de02dde6c93bb39ed01aff3ae25f8f06a3e82 | 3,128 | py | Python | svm-wsd/generate_dictionary.py | NLeSC/EviDENce | ff51f27e392076e51ad56236d039d38cb5fcadce | [
"Apache-2.0"
] | 3 | 2018-03-23T15:17:30.000Z | 2021-09-20T20:00:59.000Z | svm-wsd/generate_dictionary.py | ADAH-EviDENce/NewsReader | ff51f27e392076e51ad56236d039d38cb5fcadce | [
"Apache-2.0"
] | 26 | 2018-03-14T08:48:48.000Z | 2018-05-15T15:45:35.000Z | svm-wsd/generate_dictionary.py | NLeSC/EviDENce | ff51f27e392076e51ad56236d039d38cb5fcadce | [
"Apache-2.0"
] | 4 | 2019-01-14T10:05:46.000Z | 2021-09-20T20:01:05.000Z | #!/usr/bin/env python
'''
Generates a dictionary for lemmas from Cornetto, with the format:
lemma POS luid1 luid2...
'''
import sys
from lxml import etree
def normalise_pos(long_pos):
pos = long_pos
if long_pos == 'adjective':
pos = 'a'
elif long_pos == 'adverb':
pos = 'r'
elif long_pos == 'noun':
pos = 'n'
elif long_pos == 'other':
pos = 'other'
elif long_pos == 'verb':
pos = 'v'
return pos
def generate_dictionary(path_to_cornetto, map_cornettoluid_odwnluid, map_odwnLU_to_odwnSY):
#Load mapping from cornetto_lu to odwn_lu
f = open(map_cornettoluid_odwnluid,'r')
cornetto_lu_to_odwn_lu = {}
for line in f:
fields = line.strip().split()
cornetto_lu_to_odwn_lu[fields[0]] = fields[1]
f.close()
#########
#Load map odwn LU to odwn SY
odwn_lu_to_odwn_synset = {}
f = open(map_odwnLU_to_odwnSY)
for line in f:
fields = line.strip().split()
odwn_lu_to_odwn_synset[fields[0]] = fields[1]
f.close()
luids_for_lemma_pos = {}
tree = etree.parse(path_to_cornetto,etree.XMLParser(remove_blank_text=1))
for lex_entry in tree.findall('Lexicon/LexicalEntry'):
long_pos = lex_entry.get('partOfSpeech')
short_pos = normalise_pos(long_pos)
lemma_obj = lex_entry.find('Lemma')
if lemma_obj is not None:
lemma = lemma_obj.get('writtenForm')
sense = lex_entry.find('Sense').get('senseId')
if (lemma,short_pos) not in luids_for_lemma_pos:
luids_for_lemma_pos[(lemma,short_pos)] = [sense]
else:
luids_for_lemma_pos[(lemma,short_pos)].append(sense)
print>>sys.stderr,len(luids_for_lemma_pos)
print '###lemma pos cornettoLU1 odwnLU1 odwnSYNSET1 [cornettoLU2 odwnLU2 odwnSYNSET2] [... ... ...]'
for (lemma,short_pos), luids in luids_for_lemma_pos.items():
print lemma.encode('utf-8')+' '+short_pos.encode('utf-8'),
for cornettoLU in set(luids):
#1 Map to odwn LU
odwnLU = None
if cornettoLU[0] in ['r','c']:
odwnLU = cornettoLU
elif cornettoLU[0] == 'd':
odwnLU = cornetto_lu_to_odwn_lu.get(cornettoLU)
else:
#Ignore the rest
odwnLU = None
# 2 map go odwn Synset
odwnSY = None
if odwnLU is not None:
odwnSY = odwn_lu_to_odwn_synset.get(odwnLU)
print cornettoLU+' '+str(odwnLU)+' '+str(odwnSY),
print
if __name__ == '__main__':
path_to_cornetto_kyoto = '/home/izquierdo/wordnets/cornetto2.1_lmf/cornetto2.1.lmf.xml'
map_cornettoluid_odwnluid = '/home/izquierdo/wordnets/cornetto2.1_lmf/mapping_cornettoLU_2_odwnLU'
map_odwnLU_to_odwnSY = '/home/izquierdo/wordnets/cornetto2.1_lmf/mapping_odwnLU_2_odwnSY'
path_to_cornetto = path_to_cornetto_kyoto
generate_dictionary(path_to_cornetto, map_cornettoluid_odwnluid, map_odwnLU_to_odwnSY) | 34.373626 | 104 | 0.61413 | 406 | 3,128 | 4.428571 | 0.285714 | 0.035039 | 0.035595 | 0.053393 | 0.322581 | 0.232481 | 0.191324 | 0.113459 | 0.080089 | 0.080089 | 0 | 0.012832 | 0.277494 | 3,128 | 91 | 105 | 34.373626 | 0.782743 | 0.044437 | 0 | 0.15625 | 1 | 0 | 0.141119 | 0.066736 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.03125 | null | null | 0.078125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9227496ea43089c72c557f09743a7d47c902d053 | 1,573 | py | Python | pyhafas/profile/base/__init__.py | TheMinefighter/pyhafas | 0f13a690c7585126b4ec11d95a8489da94d0c538 | [
"MIT"
] | 12 | 2020-02-03T14:22:28.000Z | 2020-07-30T18:04:56.000Z | pyhafas/profile/base/__init__.py | TheMinefighter/pyhafas | 0f13a690c7585126b4ec11d95a8489da94d0c538 | [
"MIT"
] | 4 | 2020-07-30T10:40:21.000Z | 2020-08-28T17:10:31.000Z | pyhafas/profile/base/__init__.py | TheMinefighter/pyhafas | 0f13a690c7585126b4ec11d95a8489da94d0c538 | [
"MIT"
] | 9 | 2021-01-16T23:03:18.000Z | 2022-02-27T15:39:22.000Z | from typing import Dict, List
from pyhafas.profile.base.helper.date_time import BaseDateTimeHelper
from pyhafas.profile.base.helper.format_products_filter import \
BaseFormatProductsFilterHelper
from pyhafas.profile.base.helper.parse_leg import BaseParseLegHelper
from pyhafas.profile.base.helper.parse_lid import BaseParseLidHelper
from pyhafas.profile.base.helper.request import BaseRequestHelper
from pyhafas.profile.base.requests.journey import BaseJourneyRequest
from pyhafas.profile.base.requests.journeys import BaseJourneysRequest
from pyhafas.profile.base.requests.location import BaseLocationRequest
from pyhafas.profile.base.requests.station_board import BaseStationBoardRequest
from pyhafas.profile.base.requests.trip import BaseTripRequest
from pyhafas.profile.interfaces import ProfileInterface
class BaseProfile(
BaseRequestHelper,
BaseFormatProductsFilterHelper,
BaseParseLidHelper,
BaseDateTimeHelper,
BaseParseLegHelper,
BaseLocationRequest,
BaseJourneyRequest,
BaseJourneysRequest,
BaseStationBoardRequest,
BaseTripRequest,
ProfileInterface):
"""
Profile for a "normal" HaFAS. Only for other profiles usage as basis.
"""
baseUrl: str = ""
defaultUserAgent: str = 'pyhafas'
addMicMac: bool = False
addChecksum: bool = False
salt: str = ""
requestBody: dict = {}
availableProducts: Dict[str, List[int]] = {}
defaultProducts: List[str] = []
def __init__(self, ua=defaultUserAgent):
self.userAgent = ua
| 34.195652 | 79 | 0.758423 | 153 | 1,573 | 7.732026 | 0.431373 | 0.102282 | 0.167371 | 0.185968 | 0.253593 | 0.05579 | 0 | 0 | 0 | 0 | 0 | 0 | 0.172282 | 1,573 | 45 | 80 | 34.955556 | 0.908602 | 0.043865 | 0 | 0 | 0 | 0 | 0.004704 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.028571 | false | 0 | 0.342857 | 0 | 0.628571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
9236dc631fb10e266c6f9aa7998f3c61a6b179d4 | 787 | py | Python | videoflow/engines/task_functions.py | Muflhi01/videoflow | c49d3fe6c814574bcda1a4e907ce52ea86e1617c | [
"MIT"
] | 1,022 | 2019-05-24T21:27:49.000Z | 2022-03-30T04:08:35.000Z | videoflow/engines/task_functions.py | Muflhi01/videoflow | c49d3fe6c814574bcda1a4e907ce52ea86e1617c | [
"MIT"
] | 57 | 2019-05-25T06:48:44.000Z | 2021-06-23T17:17:51.000Z | videoflow/engines/task_functions.py | Muflhi01/videoflow | c49d3fe6c814574bcda1a4e907ce52ea86e1617c | [
"MIT"
] | 88 | 2019-05-23T14:24:14.000Z | 2022-03-28T05:06:33.000Z | from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import os
from multiprocessing import Process, Queue, Event, Lock
from ..core.task import Task
def task_executor_fn(task : Task):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
task.run()
def task_executor_gpu_fn(task : Task, gpu_id : int):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)
task.run()
def create_process_task(task):
proc = Process(target = task_executor_fn, args = (task,))
return proc
def create_process_task_gpu(task, gpu_id):
proc = Process(target = task_executor_gpu_fn, args = (task, gpu_id))
return proc
| 26.233333 | 72 | 0.738247 | 116 | 787 | 4.62069 | 0.327586 | 0.089552 | 0.097015 | 0.070896 | 0.328358 | 0.220149 | 0.220149 | 0.220149 | 0.220149 | 0.220149 | 0 | 0.001513 | 0.160102 | 787 | 29 | 73 | 27.137931 | 0.80938 | 0 | 0 | 0.285714 | 0 | 0 | 0.122137 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.190476 | false | 0 | 0.333333 | 0 | 0.619048 | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
9275f58ad097ef17e0dc4bf9dfff3e98ceed9613 | 525 | py | Python | users/admin.py | ephyle/Legit-Info | 7f3845563a64299aa64e4fdba75949276ed9a711 | [
"BSD-2-Clause",
"CC-BY-4.0",
"Apache-2.0"
] | 44 | 2020-10-19T13:06:10.000Z | 2022-01-23T10:56:31.000Z | users/admin.py | ephyle/Legit-Info | 7f3845563a64299aa64e4fdba75949276ed9a711 | [
"BSD-2-Clause",
"CC-BY-4.0",
"Apache-2.0"
] | 111 | 2020-10-20T22:12:58.000Z | 2022-03-28T00:25:13.000Z | users/admin.py | ephyle/Legit-Info | 7f3845563a64299aa64e4fdba75949276ed9a711 | [
"BSD-2-Clause",
"CC-BY-4.0",
"Apache-2.0"
] | 31 | 2021-02-08T22:32:37.000Z | 2022-03-11T10:57:29.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
users/admin.py -- Decide what appears on Admin screen
Written by Tony Pearson, IBM, 2020
Licensed under Apache 2.0, see LICENSE for details
"""
# System imports
from django.contrib import admin
from django.contrib.auth.models import Group
# Register your models here.
from .models import Profile
admin.site.unregister(Group)
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
""" Specify columns to display """
list_display = ("user", "criteria")
| 21 | 53 | 0.729524 | 72 | 525 | 5.305556 | 0.763889 | 0.052356 | 0.089005 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017978 | 0.152381 | 525 | 24 | 54 | 21.875 | 0.840449 | 0.48381 | 0 | 0 | 0 | 0 | 0.047244 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
927759ee473be3c569c013ea9fbbbf0ffc8fd0b7 | 3,751 | py | Python | pyhow/samples/syntax/str_format.py | yoeo/pyhow | e882cc3a7b9765d6d4472de08128ac7b3c98c7c1 | [
"MIT"
] | 2 | 2016-02-29T13:57:58.000Z | 2016-03-21T16:40:46.000Z | pyhow/samples/syntax/str_format.py | yoeo/pyhow | e882cc3a7b9765d6d4472de08128ac7b3c98c7c1 | [
"MIT"
] | 1 | 2018-06-21T08:58:54.000Z | 2018-06-21T08:58:54.000Z | pyhow/samples/syntax/str_format.py | yoeo/pyhow | e882cc3a7b9765d6d4472de08128ac7b3c98c7c1 | [
"MIT"
] | null | null | null | """String formating language samples."""
import collections
import locale
# category: exemples
def basic_formating():
"""Simple replacement..."""
return "{}".format('infinite')
def deep_formating():
"""Mix of many formating possibilities."""
return "{value.__class__.__bases__[0].__name__!r}".format(
**{'value': ...})
def composed_formating():
"""Composed formating."""
return "{!s:.{}}".format(..., 1)
# category: value adaptation
def value_conversion():
"""Convert to: !r ↔ repr, !s ↔ str, !a ↔ ascii."""
return "{0!r} or {0!s} or {0!a}".format('infinîte')
def value_formating():
"""Format a value with the underlying format function."""
happy_value = type('HappyFormater', (object,), {
'__format__': lambda _, template: template.replace('lol', '(^_^)')})()
return "{:lol}".format(happy_value)
# category: field
def field_index():
"""Get field at a given position."""
return "{1} and not {0}".format('infinite', 666)
def field_name():
"""Get field by name."""
return "{value} and not {0}".format(9, value='sixty-nine')
def field_element_index():
"""Get field element by index."""
return "{value[1]}".format(value='lol')
def field_attribute_index():
"""Get field attribute."""
make_value = collections.namedtuple('Value', ['absolute_value'])
return "{0.absolute_value}".format(make_value('absolute_zero'))
# category: spec align
def align_left():
"""Left aligned."""
return "{:<10}!".format(-666)
def align_right():
"""Right aligned."""
return "{:>10}!".format(-666)
def center():
"""Centered."""
return "{:^10}!".format(-666)
def numerical_center():
"""Numerically centered."""
return "{:=10}!".format(-666)
def fill():
"""Fill empty space."""
return "{0:~^10}! or {0:010}".format(-666)
def truncate():
"""Truncate the value."""
return "{:.7}".format('big_number')
def sign():
"""Show sign for positive numbers: + ↔ yes, - ↔ no, ' ' ↔ show space."""
return "x may be {0:*^+5}, {0:*^-5}, {0:*^ 5}!".format(1)
# category: spec numerical presentation
def alternate_form():
"""Alternate form with extra 0x, 0b, 0o for int and .000 for float."""
return "{0:#b} or {0:#g}".format(2**8-1)
def numerical_separator():
"""Use comma as big number separator."""
return "{:,}!".format(10**6)
def precision():
"""Float precision."""
return "{:.3f}".format(1000/3)
# category: spec type conversion
def binary():
"""Binary representation."""
return "{:b}".format(2**8-1)
def character():
"""Character representation."""
return "{:c}".format(65)
def decimal():
"""Decimal representation. Default integer format."""
return "{:d}".format(2**8-1)
def octal():
"""Octal representation."""
return "{:o}".format(2**8-1)
def hexadecimal():
"""Hexadecimal representation."""
return "{0:x} or {0:X}".format(2**8-1)
def localized_number():
"""Localized number representation."""
locale.setlocale(locale.LC_NUMERIC, ('fr_FR', 'UTF-8'))
message = "{:n}".format(10**6)
locale.setlocale(locale.LC_NUMERIC, (None, None))
return message
def float_exponent():
"""Float exponent representation."""
return "{0:.2e} or {0:.2E}".format(1/3)
def float_fixed_point():
"""Float fixed point representation."""
return "({0:.2f}, {1:.2f}) or ({0:.2F}, {1:.2F})".format(1/3, 1e1000)
def float_general_format():
"""Float general format representation. Default float format."""
return "({0:.2g}, {1:.2g}) or ({0:.2g}, {1:.2G})".format(1/3, 10**-9/3)
def string():
"""String representation."""
return "{:s}".format('something')
| 18.387255 | 78 | 0.596907 | 471 | 3,751 | 4.658174 | 0.326964 | 0.025524 | 0.027347 | 0.02051 | 0.104831 | 0.050137 | 0 | 0 | 0 | 0 | 0 | 0.041721 | 0.188483 | 3,751 | 203 | 79 | 18.477833 | 0.67707 | 0.28579 | 0 | 0 | 0 | 0.029851 | 0.210067 | 0.016251 | 0 | 0 | 0 | 0 | 0 | 1 | 0.432836 | false | 0 | 0.029851 | 0 | 0.895522 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
92835ed7ba90a4604478bcb622df269984ecdd34 | 7,812 | py | Python | tests/test_model_base.py | erikw/python-taiga | 006fce1c793a345cc4464ca7a1bfdd1beedb7744 | [
"MIT"
] | null | null | null | tests/test_model_base.py | erikw/python-taiga | 006fce1c793a345cc4464ca7a1bfdd1beedb7744 | [
"MIT"
] | null | null | null | tests/test_model_base.py | erikw/python-taiga | 006fce1c793a345cc4464ca7a1bfdd1beedb7744 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from taiga.requestmaker import RequestMaker
from taiga.models.base import InstanceResource, ListResource, SearchableList
import unittest
from mock import patch
import datetime
from .tools import MockResponse
class Fake(InstanceResource):
endpoint = 'fakes'
allowed_params = ['param1', 'param2']
repr_attribute = 'param1'
def my_method(self):
response = self.requester.get('/users/{id}/starred', id=self.id)
return projects.Projects.parse(response.json(), self.requester)
class Fakes(ListResource):
instance = Fake
class TestModelBase(unittest.TestCase):
def test_encoding(self):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
param2 = {
'list' : [u'Caf\xe9 project', 'Andrea'],
'dict' : {
'el1' : 'Andrea',
'el2' : u'Caf\xe9 project'
}
}
fake = Fake(rm, id=1, param1=u'Caf\xe9 project', param2=param2)
self.assertEqual(fake.param1, 'Café project')
self.assertEqual(fake.param2['list'][0], 'Café project')
self.assertEqual(fake.param2['dict']['el2'], 'Café project')
@patch('taiga.requestmaker.RequestMaker.put')
def test_call_model_base_update(self, mock_requestmaker_put):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two')
fake.update()
mock_requestmaker_put.assert_called_once_with('/{endpoint}/{id}', endpoint='fakes',
id=1, payload=fake.to_dict())
@patch('taiga.requestmaker.RequestMaker.put')
def test_call_model_base_update_with_params(self, mock_requestmaker_put):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two')
fake.update(comment='comment')
dict_res = fake.to_dict()
dict_res['comment'] = 'comment'
mock_requestmaker_put.assert_called_once_with(
'/{endpoint}/{id}', endpoint='fakes',
id=1, payload=dict_res
)
@patch('taiga.requestmaker.RequestMaker.put')
def test_call_model_base_update_with_version(self, mock_requestmaker_put):
mock_requestmaker_put.return_value = MockResponse(200, "{\"version\": 2}")
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two')
fake.update()
mock_requestmaker_put.assert_called_once_with(
'/{endpoint}/{id}', endpoint='fakes',
id=1, payload=fake.to_dict()
)
self.assertEqual(fake.version, 2)
@patch('taiga.requestmaker.RequestMaker.delete')
def test_call_model_base_delete(self, mock_requestmaker_delete):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two')
fake.delete()
mock_requestmaker_delete.assert_called_once_with('/{endpoint}/{id}', endpoint='fakes', id=1)
@patch('taiga.requestmaker.RequestMaker.get')
def test_call_model_base_get_element(self, mock_requestmaker_get):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fakes = Fakes(rm)
fakes.get(1)
mock_requestmaker_get.assert_called_once_with('/{endpoint}/{id}', endpoint='fakes', id=1)
@patch('taiga.requestmaker.RequestMaker.delete')
def test_call_model_base_delete_element(self, mock_requestmaker_delete):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two')
fake.delete()
mock_requestmaker_delete.assert_called_once_with('/{endpoint}/{id}', endpoint='fakes', id=1)
@patch('taiga.requestmaker.RequestMaker.delete')
def test_call_model_base_delete_element_from_list(self, mock_requestmaker_delete):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fakes = Fakes(rm)
fakes.delete(1)
mock_requestmaker_delete.assert_called_once_with('/{endpoint}/{id}', endpoint='fakes', id=1)
@patch('taiga.requestmaker.RequestMaker.get')
def test_call_model_base_list_elements(self, mock_requestmaker_get):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fakes = Fakes(rm)
fakes.list()
mock_requestmaker_get.assert_called_with('fakes', query={})
fakes.list(project_id=1)
mock_requestmaker_get.assert_called_with('fakes', query={'project_id':1})
def test_to_dict_method(self):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two', param3='three')
expected_dict = {'param1':'one', 'param2':'two'}
self.assertEqual(len(fake.to_dict()), 2)
self.assertEqual(fake.to_dict(), expected_dict)
def test_searchable_list_filter(self):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake1 = Fake(rm, id=1, param1='one', param2='a')
fake2 = Fake(rm, id=1, param1='one', param2='b')
fake3 = Fake(rm, id=1, param1='two', param2='c')
searchable_list = SearchableList()
searchable_list.append(fake1)
searchable_list.append(fake2)
searchable_list.append(fake3)
self.assertEqual(len(searchable_list.filter(param1='one')), 2)
self.assertEqual(len(searchable_list.filter(param1='notexists')), 0)
self.assertEqual(len(searchable_list.filter(param1='one', param2='a')), 1)
self.assertEqual(len(searchable_list.filter()), 3)
def test_searchable_list_get(self):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake1 = Fake(rm, id=1, param1='one', param2='a')
fake2 = Fake(rm, id=1, param1='one', param2='b')
fake3 = Fake(rm, id=1, param1='two', param2='c')
searchable_list = SearchableList()
searchable_list.append(fake1)
searchable_list.append(fake2)
searchable_list.append(fake3)
self.assertTrue(searchable_list.get(param1='one'))
self.assertFalse(searchable_list.get(param1='notexists'), 0)
self.assertTrue(searchable_list.get(param1='one', param2='a'), 1)
self.assertTrue(searchable_list.get())
@patch('taiga.requestmaker.RequestMaker.put')
def test_call_model_base_update(self, mock_requestmaker_put):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two')
fake.update()
mock_requestmaker_put.assert_called_once_with('/{endpoint}/{id}', endpoint='fakes',
id=1, payload=fake.to_dict())
@patch('taiga.requestmaker.RequestMaker.put')
def test_datetime_parsing(self, mock_requestmaker_put):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(
rm, id=1,
created_date='2015-02-10T17:55:05+0000',
modified_date='2015-02-10T17:55:05+0000'
)
self.assertTrue(isinstance(fake.created_date, datetime.datetime))
self.assertTrue(isinstance(fake.modified_date, datetime.datetime))
fake = Fake(
rm, id=1,
created_date='2015-02-10T17:55:0',
modified_date='2015-02-10T17:55:05+0000'
)
self.assertFalse(isinstance(fake.created_date, datetime.datetime))
self.assertTrue(isinstance(fake.modified_date, datetime.datetime))
def test_repr(self):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
fake = Fake(rm, id=1, param1='one', param2='two', param3='three')
rep = fake._rp()
self.assertEqual(rep, 'one')
self.assertEqual(fake._rp(), str(fake))
fake.repr_attribute = 'notexisting'
rep = fake._rp()
self.assertEqual(rep, '{0}({1})'.format(fake.__class__.__name__, fake.id))
| 42.227027 | 100 | 0.644521 | 941 | 7,812 | 5.157279 | 0.128587 | 0.016691 | 0.028024 | 0.031527 | 0.750876 | 0.745312 | 0.699155 | 0.66495 | 0.627859 | 0.613435 | 0 | 0.031386 | 0.204685 | 7,812 | 184 | 101 | 42.456522 | 0.749718 | 0.002688 | 0 | 0.503226 | 0 | 0 | 0.171781 | 0.055334 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.103226 | false | 0 | 0.03871 | 0 | 0.193548 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
929ed90fa5e0e6aea6966379410615ca207da4dc | 3,305 | py | Python | pscheduler-server/pscheduler-server/api-server/pschedulerapiserver/response.py | igarny/pscheduler | 0ab6e68bb3adb808e1116bab0eb7438bf4c31e2c | [
"Apache-2.0"
] | null | null | null | pscheduler-server/pscheduler-server/api-server/pschedulerapiserver/response.py | igarny/pscheduler | 0ab6e68bb3adb808e1116bab0eb7438bf4c31e2c | [
"Apache-2.0"
] | null | null | null | pscheduler-server/pscheduler-server/api-server/pschedulerapiserver/response.py | igarny/pscheduler | 0ab6e68bb3adb808e1116bab0eb7438bf4c31e2c | [
"Apache-2.0"
] | null | null | null | #
# HTTP Response Functions
#
import pscheduler
from werkzeug.datastructures import Headers
from flask import Response
from flask import request
from .args import arg_boolean
from .log import log
# TODO: Duplicative, but easier than the cross-module imports. :-@
def response_json_dump(dump, sanitize=True):
if sanitize:
sanitized = pscheduler.json_decomment(dump, prefix="_", null=True)
return pscheduler.json_dump(sanitized, pretty=arg_boolean('pretty'))
else:
return pscheduler.json_dump(dump, pretty=arg_boolean('pretty'))
# Responses
def json_response(data):
text = response_json_dump(data)
log.debug("Response 200+JSON: %s", text)
return Response(text + '\n',
mimetype='application/json')
def ok(message="OK", mimetype=None):
log.debug("Response 200: %s", message)
return Response(message + '\n',
status=200,
mimetype=mimetype)
def ok_json(data=None, sanitize=True):
text = response_json_dump(data, sanitize=sanitize)
log.debug("Response 200+JSON: %s", text)
return Response(text + '\n',
mimetype='application/json')
def bad_request(message="Bad request"):
log.debug("Response 400: %s", message)
return Response(message + '\n', status=400, mimetype="text/plain")
def forbidden(message="Forbidden."):
log.debug("Response 403: %s", message)
log.info("Forbade %s %s %s: %s", request.remote_addr, request.method, request.base_url, message)
return Response(message + "\n", status=403, mimetype="text/plain")
def not_found(message="Resource Not found.", mimetype="text/plain"):
log.debug("Response 404: %s", message)
return Response(message + "\n", status=404, mimetype="text/plain")
def not_allowed():
log.debug("Response 405: %s not allowed.", request.method)
log.info("Disallowed %s %s %s", request.remote_addr, request.method, request.base_url)
return Response("%s not allowed on this resource\n" % (request.method),
status=405, mimetype="text/plain")
def conflict(message="Request would create a conflict."):
log.debug("Response 409: Conflict")
return Response(message + '\n', status=409, mimetype="text/plain")
def no_can_do(message=None):
log.debug("Response 422: %s", message)
return Response("Unable to complete request" \
+ ((": " + message ) \
if message is not None \
else ".") \
+ '\n',
status=422, mimetype="text/plain")
def error(message="Unknown internal error"):
log.debug("Response 500: %s", message)
log.error("Internal error %s %s %s: %s", request.remote_addr, request.method, request.base_url, message)
return Response(message + '\n', status=500, mimetype="text/plain")
def not_implemented(message="Not implemented."):
log.debug("Response 501: %s", message)
log.warning("Not implemented %s %s %s: %s", request.remote_addr, request.method, request.base_url, message)
return Response(message + "\n", status=501, mimetype="text/plain")
def see_other(url):
log.debug("Response 303: Redirect to %s", url)
return Response(url + "\n", status=303,
headers=Headers([("Location", url)]))
| 37.134831 | 111 | 0.649017 | 418 | 3,305 | 5.059809 | 0.229665 | 0.04539 | 0.09078 | 0.07565 | 0.328605 | 0.260047 | 0.260047 | 0.208983 | 0.208983 | 0.208983 | 0 | 0.025239 | 0.208775 | 3,305 | 88 | 112 | 37.556818 | 0.783556 | 0.029652 | 0 | 0.092308 | 0 | 0 | 0.20806 | 0 | 0 | 0 | 0 | 0.011364 | 0 | 1 | 0.2 | false | 0 | 0.092308 | 0 | 0.507692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
92a5cd83e8005d4b235832d6f72f6a9471931e29 | 1,475 | py | Python | drawchallenge.py | Lacko545/TIMER | ab63f427fb9c59b8fbd6e13702e0fe0c9541f7e8 | [
"WTFPL"
] | null | null | null | drawchallenge.py | Lacko545/TIMER | ab63f427fb9c59b8fbd6e13702e0fe0c9541f7e8 | [
"WTFPL"
] | null | null | null | drawchallenge.py | Lacko545/TIMER | ab63f427fb9c59b8fbd6e13702e0fe0c9541f7e8 | [
"WTFPL"
] | null | null | null | import time
import sys
import os
import platform
import subprocess
if platform.system()=="Linux" :
from os import popen
elif platform.system()=="Windows" :
import winsound
def timemyshit(minutes,seconds):
time_start = time.time()
const=seconds
totaltime=seconds + minutes*60
passedtime=0
while True:
try:
secs=(totaltime-passedtime)%60
mins=(totaltime-passedtime)/60
sys.stdout.write("\r{minutes} Minutes {seconds} Seconds".format(minutes=mins, seconds=secs))
sys.stdout.flush()
time.sleep(1)
passedtime = ( int(time.time()-time_start) )
if totaltime-passedtime < 0 :
return 0
except KeyboardInterrupt, e:
break
def beep(beeppath):
if platform.system() =="Linux" :
popen("mplayer " +
os.path.join(os.path.dirname(os.path.realpath(__file__)), beeppath) +
" > /dev/null 2>&1 || true")
elif platform.system() =="Windows" :
winsound.PlaySound(beeppath)
if len(sys.argv) != 2:
print ("Too many/too few arguments. Only provide path to alarm sound as argument!")
sys.exit(1)
print ("START DRAWING AFTER BEEP")
beep(sys.argv[1])
timemyshit(4,0)
beep(sys.argv[1])
print("-------NEXT DRAWING")
timemyshit(2,0)
beep(sys.argv[1])
print("------NEXT DRAWING")
timemyshit(1,0)
beep(sys.argv[1])
print("------NEXT DRAWING")
timemyshit(0,30)
beep(sys.argv[1])
print("------NEXT DRAWING")
timemyshit(0,15)
beep(sys.argv[1])
print("------NEXT DRAWING")
timemyshit(0,5)
beep(sys.argv[1])
print("------DONE")
| 19.932432 | 95 | 0.674576 | 208 | 1,475 | 4.754808 | 0.370192 | 0.056623 | 0.077856 | 0.084934 | 0.215369 | 0.19818 | 0.19818 | 0.19818 | 0.19818 | 0 | 0 | 0.027822 | 0.147119 | 1,475 | 73 | 96 | 20.205479 | 0.758347 | 0 | 0 | 0.267857 | 0 | 0 | 0.197966 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.089286 | 0.125 | null | null | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
92ab3e93dcfa20bbc722f22a63e247eb8ce32632 | 3,117 | py | Python | 18-Inheritance/18.py | ericchen12377/CS61A_LearningDoc | 31f23962b0e2834795bf61eeb0f4884cc5da1809 | [
"MIT"
] | 2 | 2020-04-24T18:36:53.000Z | 2020-04-25T00:15:55.000Z | 18-Inheritance/18.py | ericchen12377/CS61A_LearningDoc | 31f23962b0e2834795bf61eeb0f4884cc5da1809 | [
"MIT"
] | null | null | null | 18-Inheritance/18.py | ericchen12377/CS61A_LearningDoc | 31f23962b0e2834795bf61eeb0f4884cc5da1809 | [
"MIT"
] | null | null | null | class Account:
"""An account has a balance and a holder.
>>> a = Account('John')
>>> a.holder
'John'
>>> a.deposit(100)
100
>>> a.withdraw(90)
10
>>> a.withdraw(90)
'Insufficient funds'
>>> a.balance
10
>>> a.interest
0.02
"""
interest = 0.02 # A class attribute
def __init__(self, account_holder):
self.holder = account_holder
self.balance = 0
def deposit(self, amount):
"""Add amount to balance."""
self.balance = self.balance + amount
return self.balance
def withdraw(self, amount):
"""Subtract amount from balance if funds are available."""
if amount > self.balance:
return 'Insufficient funds'
self.balance = self.balance - amount
return self.balance
class CheckingAccount(Account):
"""A bank account that charges for withdrawals.
>>> ch = CheckingAccount('Jack')
>>> ch.balance = 20
>>> ch.withdraw(5)
14
>>> ch.interest
0.01
"""
withdraw_fee = 1
interest = 0.01
def withdraw(self, amount):
return Account.withdraw(self, amount + self.withdraw_fee)
# Alternatively:
return super().withdraw(amount + self.withdraw_fee)
class Bank:
"""A bank has accounts and pays interest.
>>> bank = Bank()
>>> john = bank.open_account('John', 10)
>>> jack = bank.open_account('Jack', 5, CheckingAccount)
>>> jack.interest
0.01
>>> john.interest = 0.06
>>> bank.pay_interest()
>>> john.balance
10.6
>>> jack.balance
5.05
"""
def __init__(self):
self.accounts = []
def open_account(self, holder, amount, account_type=Account):
"""Open an account_type for holder and deposit amount."""
account = account_type(holder)
account.deposit(amount)
self.accounts.append(account)
return account
def pay_interest(self):
"""Pay interest to all accounts."""
for account in self.accounts:
account.deposit(account.balance * account.interest)
# Inheritance Example
class A:
z = -1
def f(self, x):
return B(x-1)
class B(A):
n = 4
def __init__(self, y):
if y:
self.z = self.f(y)
else:
self.z = C(y+1)
class C(B):
def f(self, x):
return x
def WWPD():
"""What would Python Display?
>>> a = A()
>>> b = B(1)
>>> b.n = 5
>>> C(2).n
4
>>> C(2).z
2
>>> a.z == C.z
True
>>> a.z == b.z
False
>>> b.z.z.z
1
"""
# Multiple Inheritance
class SavingsAccount(Account):
"""A bank account that charges for deposits."""
deposit_fee = 2
def deposit(self, amount):
return Account.deposit(self, amount - self.deposit_fee)
class AsSeenOnTVAccount(CheckingAccount, SavingsAccount):
"""A bank account that charges for everything."""
def __init__(self, account_holder):
self.holder = account_holder
self.balance = 1 # A free dollar!
supers = [c.__name__ for c in AsSeenOnTVAccount.mro()]
| 21.204082 | 66 | 0.571704 | 389 | 3,117 | 4.485861 | 0.226221 | 0.056734 | 0.025215 | 0.027507 | 0.187966 | 0.170774 | 0.155874 | 0.118052 | 0.066476 | 0.066476 | 0 | 0.02779 | 0.295797 | 3,117 | 146 | 67 | 21.349315 | 0.767198 | 0.352583 | 0 | 0.222222 | 0 | 0 | 0.010164 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.240741 | false | 0 | 0 | 0.055556 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
92ad6e79f18154a6e10ad7160b2112d49f9c9fab | 422 | py | Python | recipes/2019_05_24_use_cluster_metadata.py | Qotto/tonga | a6ae223ebf0fb7b317118b762102f1909435d1cf | [
"MIT"
] | 1 | 2019-12-17T10:06:03.000Z | 2019-12-17T10:06:03.000Z | recipes/2019_05_24_use_cluster_metadata.py | Qotto/tonga | a6ae223ebf0fb7b317118b762102f1909435d1cf | [
"MIT"
] | 1 | 2019-07-04T15:22:58.000Z | 2019-07-05T07:23:31.000Z | recipes/2019_05_24_use_cluster_metadata.py | Qotto/tonga | a6ae223ebf0fb7b317118b762102f1909435d1cf | [
"MIT"
] | 2 | 2019-06-05T15:40:49.000Z | 2019-12-10T09:24:23.000Z | from kafka.client import KafkaClient
from kafka.cluster import ClusterMetadata
client = KafkaClient(bootstrap_servers='localhost:9092', client_id='test_store_builder')
response_metadata = client.poll(future=client.cluster.request_update())
cluster_metadata = ClusterMetadata(bootstrap_servers='localhost:9092')
cluster_metadata.update_metadata(response_metadata[0])
cluster_metadata.partitions_for_topic('test-assignor')
| 46.888889 | 88 | 0.85545 | 51 | 422 | 6.803922 | 0.509804 | 0.129683 | 0.144092 | 0.167147 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022444 | 0.049763 | 422 | 8 | 89 | 52.75 | 0.842893 | 0 | 0 | 0 | 0 | 0 | 0.13981 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
92aead2ccb81a82566caf325419582de15dd2307 | 118 | py | Python | configs/cascade_rcnn/ranksort_cascade_rcnn_r50_fpn_1x_coco_stdw.py | yinchimaoliang/ranksortloss | d2103598448633a108dbda4143ab8c9c3fd515f8 | [
"Apache-2.0"
] | 210 | 2021-07-27T01:30:05.000Z | 2022-03-29T07:52:35.000Z | configs/cascade_rcnn/ranksort_cascade_rcnn_r50_fpn_1x_coco_stdw.py | yinchimaoliang/ranksortloss | d2103598448633a108dbda4143ab8c9c3fd515f8 | [
"Apache-2.0"
] | 12 | 2021-07-31T06:36:36.000Z | 2022-01-04T01:51:29.000Z | configs/cascade_rcnn/ranksort_cascade_rcnn_r50_fpn_1x_coco_stdw.py | yinchimaoliang/ranksortloss | d2103598448633a108dbda4143ab8c9c3fd515f8 | [
"Apache-2.0"
] | 23 | 2021-07-28T01:30:33.000Z | 2022-03-03T11:18:56.000Z | _base_ = 'ranksort_cascade_rcnn_r50_fpn_1x_coco.py'
model = dict(roi_head=dict(stage_loss_weights=[1, 0.50, 0.25]))
| 23.6 | 63 | 0.771186 | 22 | 118 | 3.636364 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.092593 | 0.084746 | 118 | 4 | 64 | 29.5 | 0.648148 | 0 | 0 | 0 | 0 | 0 | 0.338983 | 0.338983 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
92b74679ba57125d3c1b729ba738e5bfeaf108da | 5,245 | py | Python | gse_infra_configuration/kube_init_manager/function.py | cynpna/gs-engine | 6137d3c53621cfa044a90822c18bfceea16caa0a | [
"Apache-2.0"
] | 13 | 2020-10-14T07:45:08.000Z | 2021-10-01T08:19:56.000Z | gse_infra_configuration/kube_init_manager/function.py | cynpna/gs-engine | 6137d3c53621cfa044a90822c18bfceea16caa0a | [
"Apache-2.0"
] | null | null | null | gse_infra_configuration/kube_init_manager/function.py | cynpna/gs-engine | 6137d3c53621cfa044a90822c18bfceea16caa0a | [
"Apache-2.0"
] | 17 | 2020-11-09T05:16:42.000Z | 2021-12-28T08:04:33.000Z | import paramiko, json, os
from kubernetes import client, config
from apps.common import static_value
from apps.common.utils import init_kubernetes
from apps.network_manager.api import NicApi
def get_master_server_info():
base_path = os.path.dirname(os.path.abspath(__file__))
with open(base_path+'/master_server_info.json','r', encoding='utf-8') as f:
master_server_info = json.load(f)
return master_server_info
def init_kube(network_type,tplg_plcy):
tplg_plcy_opt = ''
if tplg_plcy == 'none':
tplg_plcy_opt = ''
else:
tplg_plcy_opt = tplg_plcy
set_reset_status('RUN')
server_info = get_master_server_info()
cli = paramiko.SSHClient()
cli.set_missing_host_key_policy(paramiko.AutoAddPolicy)
cli.connect(server_info['SERVER'],port=22,username=server_info['USER'],password=server_info['PWD'])
set_reset_status('PROC')
stdin, stdout, stderr = cli.exec_command("bash " + server_info['K8S_INIT_FILE'] + " " + network_type + " " + tplg_plcy_opt)
while True:
lines = stdout.readline()
if not lines:
break
stdin, stdout, stderr = cli.exec_command("bash " + server_info['K8S_TOKEN_FILE'])
while True:
lines = stdout.readline()
if not lines:
break
stdin, stdout, stderr = cli.exec_command("bash " + server_info['K8S_API_KEY_FILE'])
while True:
lines = stdout.readline()
if not lines:
break
for node_ip in server_info['WORKER_NODE_IP']:
stdin, stdout, stderr = cli.exec_command("sshpass -p \"" + server_info['PWD'] + "\" ssh gedge@" + node_ip + " 'bash -s' < " + server_info['K8S_WORKER_INIT_FILE'])
while True:
lines = stdout.readline()
if not lines:
break
cli.close()
set_reset_status('END')
def renew_acc_key():
cli = paramiko.SSHClient()
cli.set_missing_host_key_policy(paramiko.AutoAddPolicy)
server_info = get_master_server_info()
cli.connect(server_info['SERVER'],port=22,username=server_info['USER'],password=server_info['PWD'])
stdin, stdout, stderr = cli.exec_command("bash " + server_info['K8S_API_KEY_FILE'])
api_key = stdout.readline()[:-1]
cli.close()
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','w', encoding='utf-8') as f:
kubeconfig['acc_key'] = api_key
json.dump(kubeconfig,f,indent='\t')
init_kubernetes()
def set_kube_network(network):
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','w', encoding='utf-8') as f:
kubeconfig['network'] = network
json.dump(kubeconfig,f,indent='\t')
def get_kube_network():
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
return kubeconfig['network']
def set_topology_policy(tplg_plcy):
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','w', encoding='utf-8') as f:
if tplg_plcy == 'none':
tplg_plcy = 'None'
elif tplg_plcy == 'single':
tplg_plcy = 'Single-Numa-Node'
elif tplg_plcy == 'best':
tplg_plcy = 'Best-Effort'
elif tplg_plcy == 'restricted':
tplg_plcy = 'Restricted'
kubeconfig['topology_policy'] = tplg_plcy
json.dump(kubeconfig,f,indent='\t')
def get_topology_policy():
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
return kubeconfig['topology_policy']
def set_reset_status(status):
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','w', encoding='utf-8') as f:
kubeconfig['reset_status'] = status
json.dump(kubeconfig,f,indent='\t')
def get_reset_status():
with open(static_value.KUBE_CONFIG_PATH+'/kubernetes_config.json','r', encoding='utf-8') as f:
kubeconfig = json.load(f)
return kubeconfig['reset_status']
def create_default_multus():
base_path = os.path.dirname(os.path.abspath(__file__))
with open(base_path+'/init_json/nic-config.json','r', encoding='utf-8') as f:
nic_json = json.load(f)
nic_api = NicApi()
response = nic_api.create_namespaced_nic(namespace=static_value.NAMESPACE,body=nic_json['multus'])
def create_default_sriov():
base_path = os.path.dirname(os.path.abspath(__file__))
with open(base_path+'/init_json/nic-config.json','r', encoding='utf-8') as f:
nic_json = json.load(f)
nic_api = NicApi()
response = nic_api.create_namespaced_nic(namespace=static_value.NAMESPACE,body=nic_json['sriov'])
| 36.423611 | 170 | 0.668255 | 719 | 5,245 | 4.600834 | 0.159944 | 0.063482 | 0.050786 | 0.05925 | 0.720979 | 0.720979 | 0.690447 | 0.666868 | 0.637848 | 0.637848 | 0 | 0.005694 | 0.196378 | 5,245 | 143 | 171 | 36.678322 | 0.779122 | 0 | 0 | 0.577982 | 0 | 0 | 0.150048 | 0.062726 | 0 | 0 | 0 | 0 | 0 | 1 | 0.100917 | false | 0.027523 | 0.045872 | 0 | 0.183486 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
92b9193e4bbb63e5538518949a6bbe74a91b48de | 493 | py | Python | module/exceptions.py | Xetera/IreneBot | e768bb3a0d2517ecb00d50da89d66ac0dd1498d0 | [
"MIT"
] | 1 | 2021-10-02T16:05:11.000Z | 2021-10-02T16:05:11.000Z | module/exceptions.py | Xetera/IreneBot | e768bb3a0d2517ecb00d50da89d66ac0dd1498d0 | [
"MIT"
] | null | null | null | module/exceptions.py | Xetera/IreneBot | e768bb3a0d2517ecb00d50da89d66ac0dd1498d0 | [
"MIT"
] | null | null | null | class TooLarge(Exception):
"""The input was too long."""
def __init__(self):
super(TooLarge, self).__init__("That number was too large.")
class ImproperFormat(Exception):
"""Invalid Format was given."""
def __init__(self):
super(ImproperFormat, self).__init__("An Invalid Format was given.")
class NoTimeZone(Exception):
"""No Timezone was found."""
def __init__(self):
super(NoTimeZone, self).__init__("The user did not have a timezone.")
| 27.388889 | 77 | 0.667343 | 60 | 493 | 5.083333 | 0.483333 | 0.068852 | 0.108197 | 0.157377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198783 | 493 | 17 | 78 | 29 | 0.772152 | 0.146045 | 0 | 0.333333 | 0 | 0 | 0.214815 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
2b7bcf15263636b3a1ae53117583a824dc22764b | 6,502 | py | Python | unit_tests/LLC1/test_llc1_description.py | LandRegistry/maintain-frontend | d92446a9972ebbcd9a43a7a7444a528aa2f30bf7 | [
"MIT"
] | 1 | 2019-10-03T13:58:29.000Z | 2019-10-03T13:58:29.000Z | unit_tests/LLC1/test_llc1_description.py | LandRegistry/maintain-frontend | d92446a9972ebbcd9a43a7a7444a528aa2f30bf7 | [
"MIT"
] | null | null | null | unit_tests/LLC1/test_llc1_description.py | LandRegistry/maintain-frontend | d92446a9972ebbcd9a43a7a7444a528aa2f30bf7 | [
"MIT"
] | 1 | 2021-04-11T05:24:57.000Z | 2021-04-11T05:24:57.000Z | from flask_testing import TestCase
from unit_tests.utilities import Utilities
from unittest.mock import MagicMock, patch
from maintain_frontend import main
from maintain_frontend.dependencies.session_api.session import Session
from maintain_frontend.constants.permissions import Permissions
from maintain_frontend.models import LLC1Search
from maintain_frontend.main import app
from flask import url_for, g
import json
class TestLLC1Description(TestCase):
def create_app(self):
main.app.testing = True
Utilities.mock_session_cookie_flask_test(self)
return main.app
def setUp(self):
main.app.config['Testing'] = True
main.app.config['WTF_CSRF_ENABLED'] = False
def test_get_redirects_when_no_state(self):
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.llc1_state = None
self.mock_session.return_value.user.permissions = [Permissions.request_llc1]
response = self.client.get(url_for("create_llc1.llc1_get_description"))
self.assert_status(response, 302)
self.assertRedirects(response, url_for("create_llc1.create_llc1"))
def test_get_renders_when_state_ok(self):
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.llc1_state = LLC1Search()
self.mock_session.return_value.user.permissions = [Permissions.request_llc1]
response = self.client.get(url_for("create_llc1.llc1_get_description"))
self.assert_status(response, 200)
self.assert_template_used("search_description.html")
def test_post_redirects_when_no_state(self):
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.llc1_state = None
self.mock_session.return_value.user.permissions = [Permissions.request_llc1]
response = self.client.post(url_for("create_llc1.llc1_set_description"),
data={'charge-geographic-description': 'foo', 'hasAddress': None})
self.assert_status(response, 302)
self.assertRedirects(response, url_for("create_llc1.create_llc1"))
def test_post_renders_error_when_no_description(self):
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.llc1_state = LLC1Search()
self.mock_session.return_value.user.permissions = [Permissions.request_llc1]
response = self.client.post(url_for("create_llc1.llc1_set_description"),
data={'has-address': 'No', 'charge-geographic-description': ''})
self.assertStatus(response, 400)
self.assert_template_used('search_description.html')
@patch('maintain_frontend.app.requests.Session')
@patch('maintain_frontend.add_land_charge.address_confirmation.AddressConverter')
def test_post_redirects_when_single_address_chosen(self, session, mock_address_converter):
with app.test_request_context():
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.llc1_state = LLC1Search()
self.mock_session.return_value.user.permissions = [Permissions.request_llc1]
g.session = MagicMock()
response = MagicMock()
response.status_code = 201
session.return_value.post.return_value = response
selected_address = {
'address': 'display address',
'line_1': 'Flat 1',
'line_2': 'Place',
'line_3': 'Holder',
'line_4': 'Flat 1',
'line_5': 'Flat 1',
'line_6': 'Flat 1',
'postcode': 'postcode',
'uprn': 123456789
}
mock_address_converter.to_charge_address.return_value = selected_address
response = self.client.post(url_for("create_llc1.llc1_set_description"), data={
'has-address': 'ProvideAddress',
'selected-address': json.dumps(selected_address),
})
self.assertEqual(self.mock_session.return_value.llc1_state.description, 'Flat 1, Place, Holder, Flat 1, '
'Flat 1, Flat 1 postcode')
self.assert_status(response, 302)
self.assertRedirects(response, url_for("create_llc1.llc1_get_result"))
@patch('maintain_frontend.app.requests.Session')
def test_post_redirects_when_no_single_address_chosen_with_description(self, session):
with app.test_request_context():
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.llc1_state = LLC1Search()
self.mock_session.return_value.user.permissions = [Permissions.request_llc1]
g.session = MagicMock()
response = MagicMock()
response.status_code = 201
session.return_value.post.return_value = response
response = self.client.post(url_for("create_llc1.llc1_set_description"), data={
'has-address': 'No',
'charge-geographic-description': 'This is a valid description',
})
self.assertEqual(self.mock_session.return_value.llc1_state.description, 'This is a valid description')
self.assert_status(response, 302)
self.assertRedirects(response, url_for("create_llc1.llc1_get_result"))
def test_get_without_permission(self):
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = []
response = self.client.get(url_for("create_llc1.llc1_get_description"))
self.assertStatus(response, 302)
self.assertRedirects(response, '/not-authorised')
def test_post_without_permission(self):
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = []
response = self.client.post(url_for("create_llc1.llc1_set_description"),
data={'charge-geographic-description': '', 'hasAddress': 'No'})
self.assertStatus(response, 302)
self.assertRedirects(response, '/not-authorised')
| 50.015385 | 117 | 0.679329 | 744 | 6,502 | 5.634409 | 0.16129 | 0.055105 | 0.07729 | 0.080153 | 0.729962 | 0.724237 | 0.696803 | 0.667939 | 0.667939 | 0.636927 | 0 | 0.019121 | 0.219779 | 6,502 | 129 | 118 | 50.403101 | 0.807215 | 0 | 0 | 0.513761 | 0 | 0 | 0.184405 | 0.102276 | 0 | 0 | 0 | 0 | 0.165138 | 1 | 0.091743 | false | 0 | 0.091743 | 0 | 0.201835 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.