hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0d535fd4b7b47a77e6a1510072aa453f39a3e0ae | 399 | py | Python | xfdlfw/nn/__init__.py | vkola-lab/multi-GPU | d50cff0d587b640fb3af94329102d3eacdd70aa5 | [
"MIT"
] | null | null | null | xfdlfw/nn/__init__.py | vkola-lab/multi-GPU | d50cff0d587b640fb3af94329102d3eacdd70aa5 | [
"MIT"
] | null | null | null | xfdlfw/nn/__init__.py | vkola-lab/multi-GPU | d50cff0d587b640fb3af94329102d3eacdd70aa5 | [
"MIT"
] | null | null | null | """
Created on Thu Aug 26 15:42:38 2021
@author: cxue2
"""
from .gan_discriminator_loss import GANDiscriminatorLoss
from .gan_generator_loss import GANGeneratorLoss
from .gan_modified_generator_loss import GANModifiedGeneratorLoss
from .wgan_discriminator_loss import WGANDiscriminatorLoss
from .wgan_generator_loss import WGANGeneratorLoss
from .wgan_grad_penalty_loss import WGANGradPenaltyLoss | 30.692308 | 65 | 0.867168 | 49 | 399 | 6.77551 | 0.55102 | 0.180723 | 0.171687 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035912 | 0.092732 | 399 | 13 | 66 | 30.692308 | 0.881215 | 0.12782 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
0d57a880eab83a081769f8a06ba2613fecd1c89d | 2,793 | py | Python | formula-parser-tests.py | wp777/stv-compute | 313b574c43ef87b629e70c25c38dbb7b24d1f130 | [
"MIT"
] | null | null | null | formula-parser-tests.py | wp777/stv-compute | 313b574c43ef87b629e70c25c38dbb7b24d1f130 | [
"MIT"
] | null | null | null | formula-parser-tests.py | wp777/stv-compute | 313b574c43ef87b629e70c25c38dbb7b24d1f130 | [
"MIT"
] | null | null | null | from stv.parsers import FormulaParser
def testEval(formula, varValues, expected):
print(" using", varValues, "and expecting", expected)
evaluated = formula.expression.evaluate(varValues)
print(" evaluated to", evaluated)
assert evaluated == expected
fp = FormulaParser()
fs = "<<X,YYY>>F(A&B&C&D&EFG)"
print("Test #1: parsing '" + fs + "':")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X, YYY>>F(A & (B & (C & (D & EFG))))"
print()
fs = "<<X,YYY>>G(A|B|C|D|EFG)"
print("Test #2: parsing '" + fs + "':")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X, YYY>>G(A | (B | (C | (D | EFG))))"
print()
fs = "<<X,YYY>>F(A&B|C|D|E&F&G|H)"
print("Test #3: parsing '" + fs + "':")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X, YYY>>F((A & B) | (C | (D | ((E & (F & G)) | H))))"
print()
fs = "<<X,YYY>>F(A & (B|C) | D | E & F & G | H)"
print("Test #4: parsing '" + fs + "':")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X, YYY>>F((A & (B | C)) | (D | ((E & (F & G)) | H)))"
print()
fs = "<<X,YYY>>F(A&!B|C!=2&(AAA=BBB&CX)&A=3)"
print("Test #5: parsing '" + fs + "':")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X, YYY>>F((A & !B) | ((C != 2) & (((AAA = BBB) & CX) & (A = 3))))"
print()
fs = "<<X>>F(A&B)"
print("Test #6: parsing & evaluating '" + fs + "'")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X>>F(A & B)"
testEval(f, {"A": 1, "B": 0}, False)
testEval(f, {"A": 1, "B": 1}, True)
testEval(f, {"A": 0, "B": 0}, False)
print()
fs = "<<X>>F(A|B&C)"
print("Test #7: parsing & evaluating '" + fs + "'")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X>>F(A | (B & C))"
print("using: A=1, B=1, C=1")
testEval(f, {"A": 1, "B": 1, "C": 1}, True)
testEval(f, {"A": 0, "B": 1, "C": 1}, True)
testEval(f, {"A": 0, "B": 0, "C": 1}, False)
testEval(f, {"A": 1, "B": 0, "C": 1}, True)
print()
fs = "<<X>>F(A=3)"
print("Test #8: parsing & evaluating '" + fs + "'")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X>>F(A = 3)"
testEval(f, {"A": 1}, False)
testEval(f, {"A": 3}, True)
testEval(f, {"A": 33}, False)
print()
fs = "<<X>>F(A!=test)"
print("Test #9: parsing & evaluating '" + fs + "'")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X>>F(A != test)"
testEval(f, {"A": "blah"}, True)
testEval(f, {"A": "test"}, False)
print()
fs = "<<X>>F(A&!B | C != test & D)"
print("Test #10: parsing & evaluating '" + fs + "'")
f = fp.parseAtlFormula(fs)
print(f)
assert str(f) == "<<X>>F((A & !B) | ((C != test) & D))"
testEval(f, {"A": 1, "B": 0, "C": "test", "D": 0}, True)
testEval(f, {"A": 1, "B": 1, "C": "test", "D": 0}, False)
testEval(f, {"A": 1, "B": 1, "C": "test", "D": 1}, False)
testEval(f, {"A": 1, "B": 1, "C": "xxxx", "D": 1}, True)
print()
print("OK")
print()
| 27.382353 | 86 | 0.509488 | 480 | 2,793 | 2.964583 | 0.11875 | 0.047786 | 0.112439 | 0.033732 | 0.729445 | 0.711174 | 0.676739 | 0.631764 | 0.562895 | 0.499649 | 0 | 0.023509 | 0.177587 | 2,793 | 101 | 87 | 27.653465 | 0.595995 | 0 | 0 | 0.360465 | 0 | 0.069767 | 0.346223 | 0.039742 | 0 | 0 | 0 | 0 | 0.127907 | 1 | 0.011628 | false | 0 | 0.011628 | 0 | 0.023256 | 0.406977 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
b49cf3d9b5af46637124a9db7ce87edb90719eb6 | 755 | py | Python | www/handlers.py | YutingPang/webapp | 2cd8320eb8553778f2d5234f58ce8f05ea8d8852 | [
"Apache-2.0"
] | null | null | null | www/handlers.py | YutingPang/webapp | 2cd8320eb8553778f2d5234f58ce8f05ea8d8852 | [
"Apache-2.0"
] | null | null | null | www/handlers.py | YutingPang/webapp | 2cd8320eb8553778f2d5234f58ce8f05ea8d8852 | [
"Apache-2.0"
] | null | null | null | <<<<<<< HEAD
#!/usr/bin/env python3
=======
#!/usr/bin/env pyhton3
>>>>>>> 90218e27d60363c308d3845106509d5782bf7926
# -*- coding: utf-8 -*-
'''
url handlers
'''
import re
import time
import json
import logging
import hashlib
import base64
<<<<<<< HEAD
import asycio
from coroweb import get, post
from models import User, Comment, Blogs, next_id
@get('/')
async def index(request):
users = await User.findAll()
return {
'__template__': 'test.html',
'user': users
=======
import asyncio
from coroweb import get, post
from models import User, Comment, Blog, next_id
@get('/')
async def index(request):
return {
'__template__': 'test.html',
'users': users
>>>>>>> 90218e27d60363c308d3845106509d5782bf7926
}
| 18.414634 | 48 | 0.655629 | 85 | 755 | 5.705882 | 0.505882 | 0.024742 | 0.037113 | 0.082474 | 0.329897 | 0.329897 | 0.329897 | 0.210309 | 0.210309 | 0.210309 | 0 | 0.115447 | 0.18543 | 755 | 40 | 49 | 18.875 | 0.673171 | 0 | 0 | 0.533333 | 0 | 0 | 0.079341 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.4 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
b4a62421835340b5438b91eb06c732bb5d79b00f | 6,913 | py | Python | mavsdk/log_files_pb2.py | thomas-watters-skydio/MAVSDK-Python | e0f9db072e802a06a792a4ed6c64ce75f900167f | [
"BSD-3-Clause"
] | null | null | null | mavsdk/log_files_pb2.py | thomas-watters-skydio/MAVSDK-Python | e0f9db072e802a06a792a4ed6c64ce75f900167f | [
"BSD-3-Clause"
] | null | null | null | mavsdk/log_files_pb2.py | thomas-watters-skydio/MAVSDK-Python | e0f9db072e802a06a792a4ed6c64ce75f900167f | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: log_files/log_files.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import mavsdk_options_pb2 as mavsdk__options__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x19log_files/log_files.proto\x12\x14mavsdk.rpc.log_files\x1a\x14mavsdk_options.proto"\x13\n\x11GetEntriesRequest"\x82\x01\n\x12GetEntriesResponse\x12>\n\x10log_files_result\x18\x01 \x01(\x0b\x32$.mavsdk.rpc.log_files.LogFilesResult\x12,\n\x07\x65ntries\x18\x02 \x03(\x0b\x32\x1b.mavsdk.rpc.log_files.Entry"[\n\x1fSubscribeDownloadLogFileRequest\x12*\n\x05\x65ntry\x18\x01 \x01(\x0b\x32\x1b.mavsdk.rpc.log_files.Entry\x12\x0c\n\x04path\x18\x02 \x01(\t"\x8f\x01\n\x17\x44ownloadLogFileResponse\x12>\n\x10log_files_result\x18\x01 \x01(\x0b\x32$.mavsdk.rpc.log_files.LogFilesResult\x12\x34\n\x08progress\x18\x02 \x01(\x0b\x32".mavsdk.rpc.log_files.ProgressData")\n\x0cProgressData\x12\x19\n\x08progress\x18\x01 \x01(\x02\x42\x07\x82\xb5\x18\x03NaN"5\n\x05\x45ntry\x12\n\n\x02id\x18\x01 \x01(\r\x12\x0c\n\x04\x64\x61te\x18\x02 \x01(\t\x12\x12\n\nsize_bytes\x18\x03 \x01(\r"\xa1\x02\n\x0eLogFilesResult\x12;\n\x06result\x18\x01 \x01(\x0e\x32+.mavsdk.rpc.log_files.LogFilesResult.Result\x12\x12\n\nresult_str\x18\x02 \x01(\t"\xbd\x01\n\x06Result\x12\x12\n\x0eRESULT_UNKNOWN\x10\x00\x12\x12\n\x0eRESULT_SUCCESS\x10\x01\x12\x0f\n\x0bRESULT_NEXT\x10\x02\x12\x16\n\x12RESULT_NO_LOGFILES\x10\x03\x12\x12\n\x0eRESULT_TIMEOUT\x10\x04\x12\x1b\n\x17RESULT_INVALID_ARGUMENT\x10\x05\x12\x1b\n\x17RESULT_FILE_OPEN_FAILED\x10\x06\x12\x14\n\x10RESULT_NO_SYSTEM\x10\x07\x32\x83\x02\n\x0fLogFilesService\x12\x61\n\nGetEntries\x12\'.mavsdk.rpc.log_files.GetEntriesRequest\x1a(.mavsdk.rpc.log_files.GetEntriesResponse"\x00\x12\x8c\x01\n\x18SubscribeDownloadLogFile\x12\x35.mavsdk.rpc.log_files.SubscribeDownloadLogFileRequest\x1a-.mavsdk.rpc.log_files.DownloadLogFileResponse"\x08\x80\xb5\x18\x00\x88\xb5\x18\x01\x30\x01\x42$\n\x13io.mavsdk.log_filesB\rLogFilesProtob\x06proto3'
)
_GETENTRIESREQUEST = DESCRIPTOR.message_types_by_name["GetEntriesRequest"]
_GETENTRIESRESPONSE = DESCRIPTOR.message_types_by_name["GetEntriesResponse"]
_SUBSCRIBEDOWNLOADLOGFILEREQUEST = DESCRIPTOR.message_types_by_name[
"SubscribeDownloadLogFileRequest"
]
_DOWNLOADLOGFILERESPONSE = DESCRIPTOR.message_types_by_name["DownloadLogFileResponse"]
_PROGRESSDATA = DESCRIPTOR.message_types_by_name["ProgressData"]
_ENTRY = DESCRIPTOR.message_types_by_name["Entry"]
_LOGFILESRESULT = DESCRIPTOR.message_types_by_name["LogFilesResult"]
_LOGFILESRESULT_RESULT = _LOGFILESRESULT.enum_types_by_name["Result"]
GetEntriesRequest = _reflection.GeneratedProtocolMessageType(
"GetEntriesRequest",
(_message.Message,),
{
"DESCRIPTOR": _GETENTRIESREQUEST,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.GetEntriesRequest)
},
)
_sym_db.RegisterMessage(GetEntriesRequest)
GetEntriesResponse = _reflection.GeneratedProtocolMessageType(
"GetEntriesResponse",
(_message.Message,),
{
"DESCRIPTOR": _GETENTRIESRESPONSE,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.GetEntriesResponse)
},
)
_sym_db.RegisterMessage(GetEntriesResponse)
SubscribeDownloadLogFileRequest = _reflection.GeneratedProtocolMessageType(
"SubscribeDownloadLogFileRequest",
(_message.Message,),
{
"DESCRIPTOR": _SUBSCRIBEDOWNLOADLOGFILEREQUEST,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.SubscribeDownloadLogFileRequest)
},
)
_sym_db.RegisterMessage(SubscribeDownloadLogFileRequest)
DownloadLogFileResponse = _reflection.GeneratedProtocolMessageType(
"DownloadLogFileResponse",
(_message.Message,),
{
"DESCRIPTOR": _DOWNLOADLOGFILERESPONSE,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.DownloadLogFileResponse)
},
)
_sym_db.RegisterMessage(DownloadLogFileResponse)
ProgressData = _reflection.GeneratedProtocolMessageType(
"ProgressData",
(_message.Message,),
{
"DESCRIPTOR": _PROGRESSDATA,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.ProgressData)
},
)
_sym_db.RegisterMessage(ProgressData)
Entry = _reflection.GeneratedProtocolMessageType(
"Entry",
(_message.Message,),
{
"DESCRIPTOR": _ENTRY,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.Entry)
},
)
_sym_db.RegisterMessage(Entry)
LogFilesResult = _reflection.GeneratedProtocolMessageType(
"LogFilesResult",
(_message.Message,),
{
"DESCRIPTOR": _LOGFILESRESULT,
"__module__": "log_files.log_files_pb2"
# @@protoc_insertion_point(class_scope:mavsdk.rpc.log_files.LogFilesResult)
},
)
_sym_db.RegisterMessage(LogFilesResult)
_LOGFILESSERVICE = DESCRIPTOR.services_by_name["LogFilesService"]
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b"\n\023io.mavsdk.log_filesB\rLogFilesProto"
_PROGRESSDATA.fields_by_name["progress"]._options = None
_PROGRESSDATA.fields_by_name["progress"]._serialized_options = b"\202\265\030\003NaN"
_LOGFILESSERVICE.methods_by_name["SubscribeDownloadLogFile"]._options = None
_LOGFILESSERVICE.methods_by_name[
"SubscribeDownloadLogFile"
]._serialized_options = b"\200\265\030\000\210\265\030\001"
_GETENTRIESREQUEST._serialized_start = 73
_GETENTRIESREQUEST._serialized_end = 92
_GETENTRIESRESPONSE._serialized_start = 95
_GETENTRIESRESPONSE._serialized_end = 225
_SUBSCRIBEDOWNLOADLOGFILEREQUEST._serialized_start = 227
_SUBSCRIBEDOWNLOADLOGFILEREQUEST._serialized_end = 318
_DOWNLOADLOGFILERESPONSE._serialized_start = 321
_DOWNLOADLOGFILERESPONSE._serialized_end = 464
_PROGRESSDATA._serialized_start = 466
_PROGRESSDATA._serialized_end = 507
_ENTRY._serialized_start = 509
_ENTRY._serialized_end = 562
_LOGFILESRESULT._serialized_start = 565
_LOGFILESRESULT._serialized_end = 854
_LOGFILESRESULT_RESULT._serialized_start = 665
_LOGFILESRESULT_RESULT._serialized_end = 854
_LOGFILESSERVICE._serialized_start = 857
_LOGFILESSERVICE._serialized_end = 1116
# @@protoc_insertion_point(module_scope)
| 49.028369 | 1,762 | 0.786055 | 793 | 6,913 | 6.482976 | 0.237074 | 0.054464 | 0.038514 | 0.056215 | 0.292161 | 0.148998 | 0.142385 | 0.137327 | 0.125267 | 0.125267 | 0 | 0.070416 | 0.102271 | 6,913 | 140 | 1,763 | 49.378571 | 0.757976 | 0.109648 | 0 | 0.126126 | 1 | 0.018018 | 0.372271 | 0.316878 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.054054 | 0 | 0.054054 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b4b4f0ee2187a25a3e53a039c1ab38685e7e790d | 230 | py | Python | Visualization/Helper_Scripts/restore_chimerax_session.py | WesLudwig/3DFI | a4edb05bfea911a992be73d27542c359570082bf | [
"MIT"
] | 8 | 2021-05-24T14:47:18.000Z | 2022-03-17T01:36:20.000Z | Visualization/Helper_Scripts/restore_chimerax_session.py | WesLudwig/3DFI | a4edb05bfea911a992be73d27542c359570082bf | [
"MIT"
] | 1 | 2021-04-02T04:01:08.000Z | 2021-04-02T16:19:14.000Z | Visualization/Helper_Scripts/restore_chimerax_session.py | WesLudwig/3DFI | a4edb05bfea911a992be73d27542c359570082bf | [
"MIT"
] | 3 | 2021-12-06T18:57:43.000Z | 2022-03-13T01:45:16.000Z | from chimerax.atomic import Structure
from chimerax.core.commands import run
run(session,"view")
for m in session.models:
if isinstance(m, Structure):
m._set_chain_descriptions(session)
m._report_chain_descriptions(session) | 25.555556 | 39 | 0.808696 | 33 | 230 | 5.454545 | 0.606061 | 0.133333 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.104348 | 230 | 9 | 39 | 25.555556 | 0.873786 | 0 | 0 | 0 | 0 | 0 | 0.017316 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b4ba8432ad4eb1743bfd789da5d9171069fa487f | 277 | py | Python | adverity/settings_docker.py | Gizmomac/adverity | 55afa9a246a5a1b62e85cde002a108ecb2ae3eb4 | [
"MIT"
] | null | null | null | adverity/settings_docker.py | Gizmomac/adverity | 55afa9a246a5a1b62e85cde002a108ecb2ae3eb4 | [
"MIT"
] | 6 | 2020-06-05T22:50:23.000Z | 2022-02-10T13:43:50.000Z | adverity/settings_docker.py | Gizmomac/adverity | 55afa9a246a5a1b62e85cde002a108ecb2ae3eb4 | [
"MIT"
] | null | null | null | from .settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'adverity',
'PASSWORD': 'adverity',
'USER': 'adverity',
'HOST': 'postgres',
}
}
DEBUG = False
ALLOWED_HOSTS = ['*']
| 18.466667 | 59 | 0.541516 | 23 | 277 | 6.434783 | 0.913043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005 | 0.277978 | 277 | 14 | 60 | 19.785714 | 0.735 | 0 | 0 | 0 | 0 | 0 | 0.375451 | 0.137184 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.083333 | 0.083333 | 0 | 0.083333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
b4c2dc7ab19ddd2717e0b6d2612e3222a2c80808 | 381 | py | Python | projects/migrations/0009_auto_20190527_2005.py | KamenSentai/Portfolio-Django | 93b73d14b469a948ac010cf9767e747c38d32f55 | [
"MIT"
] | null | null | null | projects/migrations/0009_auto_20190527_2005.py | KamenSentai/Portfolio-Django | 93b73d14b469a948ac010cf9767e747c38d32f55 | [
"MIT"
] | 14 | 2020-02-12T00:23:46.000Z | 2022-03-11T23:48:23.000Z | projects/migrations/0009_auto_20190527_2005.py | KamenSentai/Portfolio-Django | 93b73d14b469a948ac010cf9767e747c38d32f55 | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2019-05-27 20:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0008_remove_projectmodel_cover'),
]
operations = [
migrations.DeleteModel(
name='ImageModel',
),
migrations.DeleteModel(
name='ProjectModel',
),
]
| 19.05 | 55 | 0.593176 | 35 | 381 | 6.371429 | 0.714286 | 0.188341 | 0.224215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067669 | 0.301837 | 381 | 19 | 56 | 20.052632 | 0.770677 | 0.112861 | 0 | 0.307692 | 1 | 0 | 0.178571 | 0.089286 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b4db4414011b7fc916834fd986ebb318c4145226 | 2,508 | py | Python | test/test_load.py | dls-controls/pytac | ea3847d010f5cc5c007676b2ab58495b7ce7b179 | [
"Apache-2.0"
] | 4 | 2017-06-01T16:09:29.000Z | 2018-07-05T08:13:22.000Z | test/test_load.py | T-Nicholls/pytac | ea3847d010f5cc5c007676b2ab58495b7ce7b179 | [
"Apache-2.0"
] | 66 | 2017-05-24T13:17:50.000Z | 2018-08-02T12:55:38.000Z | test/test_load.py | T-Nicholls/pytac | ea3847d010f5cc5c007676b2ab58495b7ce7b179 | [
"Apache-2.0"
] | 2 | 2018-10-15T13:09:12.000Z | 2022-03-26T10:43:12.000Z | from unittest.mock import patch
import pytest
import pytac
from pytac.load_csv import load
@pytest.fixture
def mock_cs_raises_ImportError():
"""We create a mock control system to replace CothreadControlSystem, so
that we can check that when it raises an ImportError load_csv.load
catches it and raises a ControlSystemException instead.
N.B. Our new CothreadControlSystem is nested inside a fixture so it can be
patched into pytac.cothread_cs to replace the existing
CothreadControlSystem class. The new CothreadControlSystem created here is
a function not a class (like the original) to prevent it from raising the
ImportError when the code is compiled.
"""
def CothreadControlSystem():
raise ImportError
return CothreadControlSystem
def test_default_control_system_import():
"""In this test we:
- assert that the lattice is indeed loaded if no execeptions are raised
- assert that the default control system is indeed cothread and that it
is loaded onto the lattice correctly
"""
assert bool(load("VMX"))
assert isinstance(load("VMX")._cs, pytac.cothread_cs.CothreadControlSystem)
def test_import_fail_raises_ControlSystemException(mock_cs_raises_ImportError):
"""In this test we:
- check that load corectly fails if cothread cannot be imported
- check that when the import of the CothreadControlSystem fails the
ImportError raised is replaced with a ControlSystemException
"""
with patch("pytac.cothread_cs.CothreadControlSystem", mock_cs_raises_ImportError):
with pytest.raises(pytac.exceptions.ControlSystemException):
load("VMX")
def test_elements_loaded(lattice):
assert len(lattice) == 4
assert len(lattice.get_elements("drift")) == 2
assert lattice.get_length() == 2.6
def test_element_details_loaded(lattice):
quad = lattice.get_elements("quad")[0]
assert quad.cell == 1
assert quad.s == 1.0
assert quad.index == 2
def test_devices_loaded(lattice):
quads = lattice.get_elements("quad")
assert len(quads) == 1
assert quads[0].get_pv_name(field="b1", handle=pytac.RB) == "Q1:RB"
assert quads[0].get_pv_name(field="b1", handle=pytac.SP) == "Q1:SP"
def test_families_loaded(lattice):
assert lattice.get_all_families() == set(
["drift", "sext", "quad", "ds", "qf", "qs", "sd"]
)
assert lattice.get_elements("quad")[0].families == set(["quad", "qf", "qs"])
| 34.356164 | 86 | 0.709729 | 337 | 2,508 | 5.154303 | 0.356083 | 0.02418 | 0.041451 | 0.039724 | 0.071387 | 0.044905 | 0.044905 | 0.044905 | 0.044905 | 0.044905 | 0 | 0.008475 | 0.200159 | 2,508 | 72 | 87 | 34.833333 | 0.857428 | 0.372807 | 0 | 0 | 0 | 0 | 0.07367 | 0.026603 | 0 | 0 | 0 | 0 | 0.371429 | 1 | 0.228571 | false | 0 | 0.257143 | 0 | 0.514286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b4de05fec92a7cd52dc3fb940fe67fd859a9b06a | 147 | py | Python | fixtures/python_output/post_with_data_raw.py | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 4,955 | 2015-01-02T09:04:20.000Z | 2021-10-06T03:54:43.000Z | fixtures/python_output/post_with_data_raw.py | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 242 | 2015-03-27T05:59:11.000Z | 2021-10-03T08:36:05.000Z | fixtures/python_output/post_with_data_raw.py | martinsirbe/curlconverter | c5324e85d2ca24ef4743fb2bb36139d23367e293 | [
"MIT"
] | 504 | 2015-01-02T16:04:36.000Z | 2021-10-01T03:43:55.000Z | import requests
data = {
'msg1': 'wow',
'msg2': 'such',
'msg3': '@rawmsg'
}
response = requests.post('http://example.com/post', data=data)
| 14.7 | 62 | 0.605442 | 18 | 147 | 4.944444 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02439 | 0.163265 | 147 | 9 | 63 | 16.333333 | 0.699187 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b4e38a0a1db3e4e83e1a293dce9243cf6efa43cf | 50 | py | Python | physics/gravity/__init__.py | mflibby/physics | 5c0258ddc301c7d7c7b7b37bed861893dbfa20dc | [
"MIT"
] | null | null | null | physics/gravity/__init__.py | mflibby/physics | 5c0258ddc301c7d7c7b7b37bed861893dbfa20dc | [
"MIT"
] | null | null | null | physics/gravity/__init__.py | mflibby/physics | 5c0258ddc301c7d7c7b7b37bed861893dbfa20dc | [
"MIT"
] | null | null | null | __all__ = [ 'newtonian'
, 'einsteinian'] | 25 | 26 | 0.54 | 3 | 50 | 7.666667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.3 | 50 | 2 | 26 | 25 | 0.657143 | 0 | 0 | 0 | 0 | 0 | 0.392157 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b4e6f43da5d8a8b27fe53748efc482de3875b2f1 | 397 | py | Python | test/common/test_group_by.py | babatana/stograde | c1c447e99c44c23cef9dd857e669861f3708ae77 | [
"MIT"
] | 7 | 2016-08-05T00:41:11.000Z | 2019-08-22T11:12:10.000Z | test/common/test_group_by.py | babatana/stograde | c1c447e99c44c23cef9dd857e669861f3708ae77 | [
"MIT"
] | 145 | 2016-08-04T01:07:11.000Z | 2019-09-09T22:07:13.000Z | test/common/test_group_by.py | babatana/stograde | c1c447e99c44c23cef9dd857e669861f3708ae77 | [
"MIT"
] | 3 | 2017-02-06T21:52:46.000Z | 2019-02-18T10:35:01.000Z | from inspect import isgenerator
from stograde.common.group_by import group_by
def test_group_by():
assert isgenerator(group_by([1, 2, 3], lambda s: s % 2 == 0))
assert dict(group_by(['1', '2', '3'], lambda s: s.isdigit())) == {True: ['1', '2', '3']}
assert dict(group_by(
[{'a': 1}, {'a': 2}, {'a': 1}],
lambda i: i['a'])) == {1: [{'a': 1}, {'a': 1}], 2: [{'a': 2}]}
| 36.090909 | 92 | 0.518892 | 65 | 397 | 3.061538 | 0.338462 | 0.211055 | 0.045226 | 0.090452 | 0.180905 | 0.180905 | 0.180905 | 0.180905 | 0 | 0 | 0 | 0.06051 | 0.209068 | 397 | 10 | 93 | 39.7 | 0.573248 | 0 | 0 | 0 | 0 | 0 | 0.032746 | 0 | 0 | 0 | 0 | 0 | 0.375 | 1 | 0.125 | true | 0 | 0.25 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
3706ffb47d0c140f0515d71ce50452135be841b8 | 693 | py | Python | jubakit/test/loader/test_twitter.py | vishalbelsare/jubakit | f6252ba627ce4e2e42eb9aafaaf05c882bc1c678 | [
"MIT"
] | 12 | 2016-04-11T04:49:08.000Z | 2019-02-08T01:43:46.000Z | jubakit/test/loader/test_twitter.py | vishalbelsare/jubakit | f6252ba627ce4e2e42eb9aafaaf05c882bc1c678 | [
"MIT"
] | 138 | 2016-04-11T05:57:48.000Z | 2020-09-26T03:09:31.000Z | jubakit/test/loader/test_twitter.py | vishalbelsare/jubakit | f6252ba627ce4e2e42eb9aafaaf05c882bc1c678 | [
"MIT"
] | 10 | 2016-04-11T03:18:45.000Z | 2018-04-14T10:11:15.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase
from jubakit.loader.twitter import TwitterStreamLoader, TwitterOAuthHandler
class TwitterStreamLoaderTest(TestCase):
oauth = TwitterOAuthHandler(consumer_key='x', consumer_secret='x', access_token='x', access_secret='x')
def test_simple(self):
loader = TwitterStreamLoader(self.oauth)
def test_errors(self):
# auth info (both os.environ and auth argument) is not set
self.assertRaises(RuntimeError, TwitterStreamLoader)
# invalid stream name
self.assertRaises(RuntimeError, TwitterStreamLoader, self.oauth, 'invalid_mode')
| 33 | 105 | 0.777778 | 79 | 693 | 6.64557 | 0.620253 | 0.026667 | 0.106667 | 0.179048 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001658 | 0.12987 | 693 | 20 | 106 | 34.65 | 0.868988 | 0.141414 | 0 | 0 | 0 | 0 | 0.027073 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.2 | false | 0 | 0.3 | 0 | 0.7 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
3707ef04b5da1bfca1e027c01d4cac4c875b36fd | 1,417 | py | Python | proxy_pb2_grpc.py | Informatic/idrac-kvmclient | 73cab098fa0155d7c9c9db4ca190d8ef816005f3 | [
"MIT"
] | 13 | 2019-02-10T19:32:03.000Z | 2021-11-18T22:15:44.000Z | proxy_pb2_grpc.py | Informatic/idrac-kvmclient | 73cab098fa0155d7c9c9db4ca190d8ef816005f3 | [
"MIT"
] | null | null | null | proxy_pb2_grpc.py | Informatic/idrac-kvmclient | 73cab098fa0155d7c9c9db4ca190d8ef816005f3 | [
"MIT"
] | 3 | 2021-03-17T00:45:03.000Z | 2021-12-01T01:49:04.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import proxy_pb2 as proxy__pb2
class CMCProxyStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetKVMData = channel.unary_unary(
'/proto.CMCProxy/GetKVMData',
request_serializer=proxy__pb2.GetKVMDataRequest.SerializeToString,
response_deserializer=proxy__pb2.GetKVMDataResponse.FromString,
)
class CMCProxyServicer(object):
# missing associated documentation comment in .proto file
pass
def GetKVMData(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_CMCProxyServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetKVMData': grpc.unary_unary_rpc_method_handler(
servicer.GetKVMData,
request_deserializer=proxy__pb2.GetKVMDataRequest.FromString,
response_serializer=proxy__pb2.GetKVMDataResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'proto.CMCProxy', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 30.148936 | 78 | 0.748765 | 151 | 1,417 | 6.754967 | 0.417219 | 0.047059 | 0.088235 | 0.108824 | 0.170588 | 0.170588 | 0.170588 | 0.170588 | 0.119608 | 0.119608 | 0 | 0.005133 | 0.175018 | 1,417 | 46 | 79 | 30.804348 | 0.867408 | 0.199718 | 0 | 0.107143 | 1 | 0 | 0.086331 | 0.023381 | 0 | 0 | 0 | 0 | 0 | 1 | 0.107143 | false | 0.107143 | 0.071429 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
37093a980c54f237531f79d7be56635f875ed92e | 2,184 | py | Python | tests/test_03_find.py | gpiantoni/bidso | 57d383254de06aa2631189668350f0aea72a57b8 | [
"MIT"
] | null | null | null | tests/test_03_find.py | gpiantoni/bidso | 57d383254de06aa2631189668350f0aea72a57b8 | [
"MIT"
] | null | null | null | tests/test_03_find.py | gpiantoni/bidso | 57d383254de06aa2631189668350f0aea72a57b8 | [
"MIT"
] | null | null | null | from pytest import raises, warns
from bidso.find import find_in_bids, find_root, _generate_pattern
from .paths import BIDS_PATH, task_ieeg
filename = task_ieeg.get_filename(BIDS_PATH)
def test_find_root():
assert find_root(filename).name == 'bids'
assert find_root(filename, target='subject').name == 'sub-bert'
assert find_root(filename, target='session').name == 'ses-day02'
def test_find_in_bids_01():
found = find_in_bids(filename, subject='bert', session='day01', run='1',
extension='.nii.gz', upwards=True)
assert found.name == 'sub-bert_ses-day01_task-motor_run-1_bold.nii.gz'
with warns(UserWarning):
find_in_bids(filename, subject='bert', useless='xxx', task='motor',
modality='channels', upwards=True)
def test_find_in_bids_02():
with raises(FileNotFoundError):
find_in_bids(filename, upwards=True, subject='xxx')
def test_find_in_bids_03():
with raises(FileNotFoundError):
find_in_bids(filename, upwards=True, subject='bert')
def test_find_in_bids_04():
assert sum(1 for x in find_in_bids(BIDS_PATH, generator=True, subject='bert')) == 21
def test_find_in_bids_05():
with raises(FileNotFoundError):
find_in_bids(BIDS_PATH, subject='xxx')
with raises(StopIteration):
next(find_in_bids(BIDS_PATH, subject='xxx', generator=True))
def test_find_in_bids_06():
with raises(ValueError):
find_in_bids(BIDS_PATH, upwards=True, generator=True)
def test_generate_pattern():
assert _generate_pattern(True, dict(subject='test')) == 'sub-test_*.*'
assert _generate_pattern(False, dict(subject='test')) == 'sub-test.*'
assert _generate_pattern(True, dict(subject='test', session='sess')) == 'sub-test_ses-sess_*.*'
assert _generate_pattern(True, dict(subject='test', modality='mod')) == 'sub-test_*_mod.*'
assert _generate_pattern(True, dict(session='sess', extension='.nii.gz')) == '*_ses-sess_*.nii.gz'
assert _generate_pattern(True, dict(modality='mod', extension='.nii.gz')) == '*_mod.nii.gz'
def test_wildcard_subject():
with raises(ValueError):
_generate_pattern(False, dict())
| 32.117647 | 102 | 0.697344 | 299 | 2,184 | 4.795987 | 0.220736 | 0.062762 | 0.104603 | 0.054393 | 0.486053 | 0.334031 | 0.245467 | 0.147838 | 0.087866 | 0.087866 | 0 | 0.01252 | 0.158883 | 2,184 | 67 | 103 | 32.597015 | 0.7681 | 0 | 0 | 0.121951 | 1 | 0 | 0.123626 | 0.031136 | 0 | 0 | 0 | 0 | 0.268293 | 1 | 0.219512 | false | 0 | 0.073171 | 0 | 0.292683 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
370fdd0e01d6e696fcb5140440d9c90712101d9b | 131 | py | Python | bugtests/test225s.py | doom38/jython_v2.2.1 | 0803a0c953c294e6d14f9fc7d08edf6a3e630a15 | [
"CNRI-Jython"
] | null | null | null | bugtests/test225s.py | doom38/jython_v2.2.1 | 0803a0c953c294e6d14f9fc7d08edf6a3e630a15 | [
"CNRI-Jython"
] | null | null | null | bugtests/test225s.py | doom38/jython_v2.2.1 | 0803a0c953c294e6d14f9fc7d08edf6a3e630a15 | [
"CNRI-Jython"
] | null | null | null |
import support
def foo(a=1, b=2+4):
return b
v = foo()
if v != 6:
raise support.TestError, "Wrong return value %d" % d
| 10.916667 | 55 | 0.59542 | 24 | 131 | 3.25 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041237 | 0.259542 | 131 | 11 | 56 | 11.909091 | 0.762887 | 0 | 0 | 0 | 0 | 0 | 0.162791 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.166667 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
37177270f9fb987d331a5473a530bd83b92b43ec | 726 | py | Python | 9/travel_log.py | krutilins/python-coding | 66e1255e94f383c3fac7178f8e3be81db66ebdfc | [
"MIT"
] | null | null | null | 9/travel_log.py | krutilins/python-coding | 66e1255e94f383c3fac7178f8e3be81db66ebdfc | [
"MIT"
] | null | null | null | 9/travel_log.py | krutilins/python-coding | 66e1255e94f383c3fac7178f8e3be81db66ebdfc | [
"MIT"
] | null | null | null | from typing import List
travel_log = []
def add_to_travel_log(country: str, times: int, cities: List[str]):
travel_log.append({
"country": country,
"times": times,
"cities": cities,
})
add_to_travel_log("Belarus", 1, ["Vitebsk", "Polotsk", "Novopolotsk", "Minsk", "Beshenkovichi"])
add_to_travel_log("Belarus", 1, ["Vitebsk", "Polotsk", "Novopolotsk", "Minsk", "Beshenkovichi"])
add_to_travel_log("Belarus", 1, ["Vitebsk", "Polotsk", "Novopolotsk", "Minsk", "Beshenkovichi"])
add_to_travel_log("Belarus", 1, ["Vitebsk", "Polotsk", "Novopolotsk", "Minsk", "Beshenkovichi"])
add_to_travel_log("Belarus", 1, ["Vitebsk", "Polotsk", "Novopolotsk", "Minsk", "Beshenkovichi"])
for log in travel_log:
print(log) | 36.3 | 96 | 0.687328 | 89 | 726 | 5.370787 | 0.280899 | 0.169456 | 0.138075 | 0.175732 | 0.679916 | 0.679916 | 0.679916 | 0.679916 | 0.679916 | 0.679916 | 0 | 0.007788 | 0.115702 | 726 | 20 | 97 | 36.3 | 0.73676 | 0 | 0 | 0.333333 | 0 | 0 | 0.368638 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.133333 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
37182dfa9af0b78350a0af133c46785ce5023680 | 483 | py | Python | examples/OLX/review.py | godber/banky | a3c41b810b16cce737f0ca7e2f1a10a32c3a5d66 | [
"MIT"
] | null | null | null | examples/OLX/review.py | godber/banky | a3c41b810b16cce737f0ca7e2f1a10a32c3a5d66 | [
"MIT"
] | null | null | null | examples/OLX/review.py | godber/banky | a3c41b810b16cce737f0ca7e2f1a10a32c3a5d66 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Finds all question.yml files and generate a single HTML document for review
./review.py GFAquestions/W1B/ > out.html
"""
import os
from pathlib import Path
import click
from genhtml import gen
@click.command()
@click.argument('dir')
def main(dir):
for root_dir, dirs, files in os.walk(dir):
for file in files:
if file == 'question.yml':
print(gen(root_dir + "/" + file))
if __name__ == '__main__':
main()
| 17.888889 | 78 | 0.641822 | 69 | 483 | 4.347826 | 0.608696 | 0.073333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002695 | 0.231884 | 483 | 26 | 79 | 18.576923 | 0.80593 | 0.285714 | 0 | 0 | 1 | 0 | 0.071006 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.307692 | 0 | 0.384615 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
371dce11b5d72394d8a7fb98e7e1741d588c2c09 | 443 | py | Python | Ex030LeituraIMC.py | GuSilva20/Primeiros-Codigos | 67deefb683b40c2c4fee1bb383730588635e9d85 | [
"MIT"
] | null | null | null | Ex030LeituraIMC.py | GuSilva20/Primeiros-Codigos | 67deefb683b40c2c4fee1bb383730588635e9d85 | [
"MIT"
] | null | null | null | Ex030LeituraIMC.py | GuSilva20/Primeiros-Codigos | 67deefb683b40c2c4fee1bb383730588635e9d85 | [
"MIT"
] | null | null | null | peso = float(input('Digite seu peso: [KG]'))
alt = float(input('Digite a altura: [m]'))
imc = peso / (alt**2)
print("IMC: {:.2f}".format(imc))
if imc < 18.5:
print("Abaixo do Peso!")
elif imc >= 18.5 and imc <= 25:
print("Você está no PESO IDEAL!")
elif 25.1 >= imc and imc <= 30:
print("Você está com SOBREPESO!")
elif imc >= 30.1 and imc <= 40:
print("Você está OBESO!")
else:
print('Você está com OBESIDADE MÓRBIDA!')
| 26.058824 | 45 | 0.604966 | 74 | 443 | 3.621622 | 0.486486 | 0.134328 | 0.19403 | 0.119403 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056657 | 0.20316 | 443 | 16 | 46 | 27.6875 | 0.70255 | 0 | 0 | 0 | 0 | 0 | 0.367946 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.428571 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
2e9d74efb7eefeafb1713acec3b52e1d7fb6250a | 13,709 | py | Python | tests/test_sensor_sqs.py | kylestewart/stackstorm-aws | 1eeb73645fce241d417f6300275ed36ac1097160 | [
"Apache-2.0"
] | 15 | 2016-12-04T01:31:50.000Z | 2020-10-24T10:49:51.000Z | tests/test_sensor_sqs.py | kylestewart/stackstorm-aws | 1eeb73645fce241d417f6300275ed36ac1097160 | [
"Apache-2.0"
] | 85 | 2016-12-10T12:51:16.000Z | 2022-01-17T23:28:48.000Z | tests/test_sensor_sqs.py | kylestewart/stackstorm-aws | 1eeb73645fce241d417f6300275ed36ac1097160 | [
"Apache-2.0"
] | 36 | 2016-12-11T07:29:53.000Z | 2021-09-08T19:08:47.000Z | import mock
import yaml
from boto3.session import Session
from botocore.exceptions import ClientError
from botocore.exceptions import NoCredentialsError
from botocore.exceptions import NoRegionError
from botocore.exceptions import EndpointConnectionError
from st2tests.base import BaseSensorTestCase
from sqs_sensor import AWSSQSSensor
class SQSSensorTestCase(BaseSensorTestCase):
sensor_cls = AWSSQSSensor
class MockResource(object):
def __init__(self, msgs=[]):
self.msgs = msgs
def get_queue_by_name(self, **kwargs):
return SQSSensorTestCase.MockQueue(self.msgs)
def Queue(self, queue):
return SQSSensorTestCase.MockQueue(self.msgs)
class MockResourceNonExistentQueue(object):
def __init__(self, msgs=[]):
self.msgs = msgs
def get_queue_by_name(self, **kwargs):
raise ClientError({'Error': {'Code': 'AWS.SimpleQueueService.NonExistentQueue'}},
'sqs_test')
def Queue(self, queue):
raise ClientError({'Error': {'Code': 'AWS.SimpleQueueService.NonExistentQueue'}},
'sqs_test')
def create_queue(self, **kwargs):
return SQSSensorTestCase.MockQueue(self.msgs)
class MockResourceRaiseClientError(object):
def __init__(self, error_code=''):
self.error_code = error_code
def get_queue_by_name(self, **kwargs):
raise ClientError({'Error': {'Code': self.error_code}}, 'sqs_test')
def Queue(self, queue):
raise ClientError({'Error': {'Code': self.error_code}}, 'sqs_test')
class MockResourceRaiseNoCredentialsError(object):
def get_queue_by_name(self, **kwargs):
raise NoCredentialsError()
def Queue(self, queue):
raise NoCredentialsError()
class MockResourceRaiseEndpointConnectionError(object):
def get_queue_by_name(self, **kwargs):
raise EndpointConnectionError(endpoint_url='')
def Queue(self, queue):
raise EndpointConnectionError(endpoint_url='')
class MockStsClient(object):
def __init__(self):
self.meta = mock.Mock(service_model={})
def get_caller_identity(self):
ci = mock.Mock()
ci.get = lambda attribute: '111222333444' if attribute == 'Account' else None
return ci
def assume_role(self, RoleArn, RoleSessionName):
return {
'Credentials': {
'AccessKeyId': 'access_key_id_example',
'SecretAccessKey': 'secret_access_key_example',
'SessionToken': 'session_token_example'
}
}
class MockStsClientRaiseClientError(MockStsClient):
def assume_role(self, RoleArn, RoleSessionName):
raise ClientError({'Error': {'Code': 'AccessDenied'}}, 'sqs_test')
class MockQueue(object):
def __init__(self, msgs=[]):
self.dummy_messages = [SQSSensorTestCase.MockMessage(x) for x in msgs]
def receive_messages(self, **kwargs):
return self.dummy_messages
class MockMessage(object):
def __init__(self, body=None):
self.body = body
def delete(self):
return mock.MagicMock(return_value=None)
def setUp(self):
super(SQSSensorTestCase, self).setUp()
self.full_config = self.load_yaml('full.yaml')
self.blank_config = self.load_yaml('blank.yaml')
self.multiaccount_config = self.load_yaml('multiaccount.yaml')
self.mixed_config = self.load_yaml('mixed.yaml')
def load_yaml(self, filename):
return yaml.safe_load(self.get_fixture_content(filename))
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
def test_poll_with_blank_config(self):
sensor = self.get_sensor_instance(config=self.blank_config)
sensor.setup()
sensor.poll()
self.assertEqual(self.get_dispatched_triggers(), [])
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource', mock.Mock(return_value=MockResource()))
def _poll_without_message(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
self.assertEqual(self.get_dispatched_triggers(), [])
def test_poll_without_message_full_config(self):
self._poll_without_message(self.full_config)
def test_poll_without_message_multiaccount_config(self):
self._poll_without_message(self.multiaccount_config)
def test_poll_without_message_mixed_config(self):
self._poll_without_message(self.mixed_config)
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource', mock.Mock(return_value=MockResource(['{"foo":"bar"}'])))
def _poll_with_message(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
self.assertNotEqual(self.get_dispatched_triggers(), [])
def test_poll_with_message_full_config(self):
self._poll_with_message(self.full_config)
def test_poll_with_message_multiaccount_config(self):
self._poll_with_message(self.multiaccount_config)
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResourceNonExistentQueue(['{"foo":"bar"}'])))
def _poll_with_non_existent_queue(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
contexts = self.get_dispatched_triggers()
self.assertNotEqual(contexts, [])
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
def test_poll_with_non_existent_queue_full_config(self):
self._poll_with_non_existent_queue(self.full_config)
def test_poll_with_non_existent_queue_multiaccount_config(self):
self._poll_with_non_existent_queue(self.multiaccount_config)
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResource(['{"foo":"bar"}'])))
def test_set_input_queues_config_dynamically(self):
sensor = self.get_sensor_instance(config=self.blank_config)
sensor._sensor_service.set_value('aws.roles',
['arn:aws:iam::123456789098:role/rolename1'],
local=False)
sensor.setup()
# set credential mock to prevent sending request to AWS
mock_credentials = mock.Mock()
mock_credentials.access_key = sensor._get_config_entry('aws_access_key_id')
mock_credentials.secret_key = sensor._get_config_entry('aws_secret_access_key')
Session.get_credentials = mock_credentials
# set test value to datastore
sensor._sensor_service.set_value('aws.input_queues', 'hoge', local=False)
sensor.poll()
# update input_queues to check this is reflected
sensor._sensor_service.set_value('aws.input_queues', 'fuga,puyo', local=False)
sensor.poll()
# update input_queues to check this is reflected
sensor._sensor_service.set_value(
'aws.input_queues',
'https://sqs.us-west-2.amazonaws.com/123456789098/queue_name_3',
local=False
)
sensor.poll()
contexts = self.get_dispatched_triggers()
self.assertNotEqual(contexts, [])
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
# get message from queue 'hoge', 'fuga' then 'puyo'
self.assertEqual([x['payload']['queue'] for x in contexts],
['hoge', 'fuga', 'puyo',
'https://sqs.us-west-2.amazonaws.com/123456789098/queue_name_3'])
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResource(['{"foo":"bar"}'])))
def test_set_input_queues_config_with_list(self):
# set 'input_queues' config with list type
config = self.full_config
config['sqs_sensor']['input_queues'] = [
'foo',
'bar',
'https://sqs.us-west-2.amazonaws.com/123456789098/queue_name_3'
]
config['sqs_sensor']['roles'] = ['arn:aws:iam::123456789098:role/rolename1']
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
contexts = self.get_dispatched_triggers()
self.assertNotEqual(contexts, [])
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
self.assertEqual([x['payload']['queue'] for x in contexts],
['foo', 'bar',
'https://sqs.us-west-2.amazonaws.com/123456789098/queue_name_3'])
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(
return_value=MockResourceRaiseClientError('InvalidClientTokenId'))
)
def _fails_with_invalid_token(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
self.assertEqual(self.get_dispatched_triggers(), [])
def test_fails_with_invalid_token_full_config(self):
self._fails_with_invalid_token(self.full_config)
def test_fails_with_invalid_token_multiaccount_config(self):
self._fails_with_invalid_token(self.multiaccount_config)
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResourceRaiseNoCredentialsError()))
def _fails_without_credentials(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
self.assertEqual(self.get_dispatched_triggers(), [])
def test_fails_without_credentials_full_config(self):
self._fails_without_credentials(self.full_config)
def test_fails_without_credentials_multiaccount_config(self):
self._fails_without_credentials(self.multiaccount_config)
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResourceRaiseEndpointConnectionError()))
def _fails_with_invalid_region(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
self.assertEqual(self.get_dispatched_triggers(), [])
def test_fails_with_invalid_region_full_config(self):
self._fails_with_invalid_region(self.full_config)
def test_fails_with_invalid_region_multiaccount_config(self):
self._fails_with_invalid_region(self.multiaccount_config)
@mock.patch.object(Session, 'client',
mock.Mock(return_value=MockStsClientRaiseClientError()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResource(['{"foo":"bar"}'])))
def _fails_assuming_role(self, config):
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
def test_fails_assuming_role_full_config(self):
self._fails_assuming_role(self.full_config)
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
self.assertNotEqual(self.get_dispatched_triggers(), [])
def test_fails_assuming_role_multiaccount_config(self):
self._fails_assuming_role(self.multiaccount_config)
self.assertEqual(self.get_dispatched_triggers(), [])
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(side_effect=NoRegionError(
service_name='sqs', region_name='us-east-1')))
def test_fails_creating_sqs_resource(self):
sensor = self.get_sensor_instance(config=self.mixed_config)
sensor.setup()
sensor.poll()
self.assertEqual(self.get_dispatched_triggers(), [])
@mock.patch.object(Session, 'client', mock.Mock(return_value=MockStsClient()))
@mock.patch.object(Session, 'resource',
mock.Mock(return_value=MockResource(['{"foo":"bar"}'])))
def _poll_with_missing_arn(self, config):
config['sqs_sensor']['roles'] = []
sensor = self.get_sensor_instance(config=config)
sensor.setup()
sensor.poll()
def test_poll_with_missing_arn_full_config(self):
self._poll_with_missing_arn(self.full_config)
self.assertNotEqual(self.get_dispatched_triggers(), [])
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
def test_poll_with_missing_arn_multiaccount_config(self):
self._poll_with_missing_arn(self.multiaccount_config)
self.assertEqual(self.get_dispatched_triggers(), [])
def test_poll_with_missing_arn_mixed_config(self):
self._poll_with_missing_arn(self.mixed_config)
self.assertNotEqual(self.get_dispatched_triggers(), [])
self.assertTriggerDispatched(trigger='aws.sqs_new_message')
| 38.835694 | 100 | 0.667809 | 1,508 | 13,709 | 5.765252 | 0.112069 | 0.036807 | 0.039683 | 0.058201 | 0.733839 | 0.705544 | 0.647918 | 0.576375 | 0.528755 | 0.485967 | 0 | 0.009075 | 0.220293 | 13,709 | 352 | 101 | 38.946023 | 0.804285 | 0.019403 | 0 | 0.466926 | 0 | 0 | 0.096085 | 0.018309 | 0 | 0 | 0 | 0 | 0.093385 | 1 | 0.210117 | false | 0 | 0.035019 | 0.027237 | 0.319066 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2e9dda89e868a73bdaaca07150abc4e169cdf033 | 640 | py | Python | others/glyphs-han-master/unicode.py | DaoMingze/My-Rime-Config | 7e0ad80b0a9b41731e8a8d9f535887f396130602 | [
"Apache-2.0"
] | null | null | null | others/glyphs-han-master/unicode.py | DaoMingze/My-Rime-Config | 7e0ad80b0a9b41731e8a8d9f535887f396130602 | [
"Apache-2.0"
] | null | null | null | others/glyphs-han-master/unicode.py | DaoMingze/My-Rime-Config | 7e0ad80b0a9b41731e8a8d9f535887f396130602 | [
"Apache-2.0"
] | null | null | null | blocks = {
'CJK Unified Ideographs Extension A': (0x3400, 0x4DBF),
'CJK Unified Ideographs': (0x4E00, 0x9FFF),
'CJK Unified Ideographs Extension B': (0x20000, 0x2A6DF),
'CJK Unified Ideographs Extension C': (0x2A700, 0x2B73F),
'CJK Unified Ideographs Extension D': (0x2B740, 0x2B81F),
'CJK Unified Ideographs Extension E': (0x2B820, 0x2CEAF),
'CJK Unified Ideographs Extension F': (0x2CEB0, 0x2EBEF),
'CJK Unified Ideographs Extension G': (0x30000, 0x3134F),
}
for name, (start, end) in blocks.items():
with open(f'Tables/Character Sets/Unicode {name}.txt', 'w') as f:
f.writelines(chr(i) + '\n' for i in range(start, end + 1))
| 42.666667 | 66 | 0.709375 | 86 | 640 | 5.27907 | 0.569767 | 0.176211 | 0.352423 | 0.447137 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117002 | 0.145313 | 640 | 14 | 67 | 45.714286 | 0.71298 | 0 | 0 | 0 | 0 | 0 | 0.473438 | 0 | 0 | 0 | 0.16875 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ea80fff0c02c7b2da4f815e43d282c95b65e0ab | 491 | py | Python | test/test_relu.py | neblar/numpynn | b33c5f671c8e835b55ed775ababa358e14c987bc | [
"MIT"
] | null | null | null | test/test_relu.py | neblar/numpynn | b33c5f671c8e835b55ed775ababa358e14c987bc | [
"MIT"
] | null | null | null | test/test_relu.py | neblar/numpynn | b33c5f671c8e835b55ed775ababa358e14c987bc | [
"MIT"
] | null | null | null | import torch
import numpy as np
from ..src.ReLU import ReLU as NumpyReLU
class Tester:
relu_numpy = NumpyReLU()
def y_torch(self, x):
return torch.relu(torch.tensor(x)).numpy()
def y_numpy(self, x):
return self.relu_numpy(x)
def __call__(self):
x = np.random.randn(128)
assert np.array_equal(
self.y_torch(x),
self.y_numpy(x)
)
def test():
tester = Tester()
for _ in range(32):
tester()
| 17.535714 | 50 | 0.578411 | 69 | 491 | 3.942029 | 0.42029 | 0.055147 | 0.080882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014706 | 0.307536 | 491 | 27 | 51 | 18.185185 | 0.785294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0.210526 | false | 0 | 0.157895 | 0.105263 | 0.578947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
2ea81e397ef25d457257bf245b2833269430b618 | 2,341 | py | Python | simulators/stage_ros/scripts/stage_joints.py | sphanit/CoHAN_Navigation | 3751d1631e223dc7ad214d29d4310cc571761a7e | [
"BSD-3-Clause"
] | null | null | null | simulators/stage_ros/scripts/stage_joints.py | sphanit/CoHAN_Navigation | 3751d1631e223dc7ad214d29d4310cc571761a7e | [
"BSD-3-Clause"
] | null | null | null | simulators/stage_ros/scripts/stage_joints.py | sphanit/CoHAN_Navigation | 3751d1631e223dc7ad214d29d4310cc571761a7e | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# Brief: This function publishes /joint_states for tucked_arm_pose of PR2, necessary for displaying robot robot_description in Rviz
# Author: Phani Teja Singamaneni
import rospy
from sensor_msgs.msg import JointState
def main():
rospy.init_node('stage_joints', anonymous=True)
joint_states = JointState()
joint_states.header.frame_id = "/joint_states"
joint_states.name = ["laser_tilt_mount_joint","fl_caster_rotation_joint","fl_caster_l_wheel_joint","fl_caster_r_wheel_joint","fr_caster_rotation_joint","fr_caster_l_wheel_joint","fr_caster_r_wheel_joint","bl_caster_rotation_joint","bl_caster_l_wheel_joint","bl_caster_r_wheel_joint","br_caster_rotation_joint","br_caster_l_wheel_joint","br_caster_r_wheel_joint","r_gripper_motor_slider_joint","r_gripper_motor_screw_joint","r_gripper_l_finger_joint","r_gripper_r_finger_joint","r_gripper_l_finger_tip_joint","r_gripper_r_finger_tip_joint","r_gripper_joint","l_gripper_motor_slider_joint","l_gripper_motor_screw_joint","l_gripper_l_finger_joint","l_gripper_r_finger_joint","l_gripper_l_finger_tip_joint","l_gripper_r_finger_tip_joint","l_gripper_joint","torso_lift_joint","torso_lift_motor_screw_joint","head_pan_joint","head_tilt_joint","l_shoulder_pan_joint","l_shoulder_lift_joint","l_upper_arm_roll_joint","l_elbow_flex_joint","l_forearm_roll_joint","l_wrist_flex_joint","l_wrist_roll_joint","r_shoulder_pan_joint","r_shoulder_lift_joint","r_upper_arm_roll_joint","r_elbow_flex_joint","r_forearm_roll_joint","r_wrist_flex_joint","r_wrist_roll_joint"]
joint_states.position = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0, -2.9802322387695312e-08, 0.06914562731981277, 1.2399593591690063, 1.7890771627426147, -1.6932392120361328, -1.7343393564224243, -0.09060896933078766, -0.07657696306705475, -0.0138227678835392, 1.097334384918213, -1.5566887855529785, -2.114457845687866, -1.4083462953567505, -1.8511372804641724, 0.2240774929523468]
r = rospy.Rate(50)
joint_states_pub = rospy.Publisher("/stage_joint_states", JointState, queue_size=1)
while not rospy.is_shutdown():
joint_states.header.stamp = rospy.Time.now()
joint_states_pub.publish(joint_states)
r.sleep()
if __name__ == "__main__":
main()
| 97.541667 | 1,157 | 0.792396 | 393 | 2,341 | 4.24173 | 0.277354 | 0.070786 | 0.104379 | 0.136773 | 0.154769 | 0.035993 | 0.035993 | 0.035993 | 0.035993 | 0.035993 | 0 | 0.148593 | 0.074327 | 2,341 | 23 | 1,158 | 101.782609 | 0.620674 | 0.075609 | 0 | 0 | 0 | 0 | 0.485423 | 0.339658 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.125 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ec08e8ad2e05f156c6e6c4d9b2462d5cc429843 | 277 | py | Python | Codewars/8kyu/thinkful-dictionary-drills-order-filler/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codewars/8kyu/thinkful-dictionary-drills-order-filler/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codewars/8kyu/thinkful-dictionary-drills-order-filler/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python - 3.6.0
stock = {
'football': 4,
'boardgame': 10,
'leggos': 1,
'doll': 5
}
test.assert_equals(fillable(stock, 'football', 3), True)
test.assert_equals(fillable(stock, 'leggos', 2), False)
test.assert_equals(fillable(stock, 'action figure', 1), False)
| 23.083333 | 62 | 0.646209 | 38 | 277 | 4.631579 | 0.578947 | 0.170455 | 0.272727 | 0.409091 | 0.494318 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047619 | 0.166065 | 277 | 11 | 63 | 25.181818 | 0.714286 | 0.050542 | 0 | 0 | 0 | 0 | 0.206897 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2edb7ca406a900d305ea11be0864fda670950661 | 217 | py | Python | tests/test.metricsManager.py | bongiovimatthew/jigsaw-rl | e9589a78b62a7645fe9bc054f0411230eb249acf | [
"MIT"
] | 1 | 2018-09-11T23:50:38.000Z | 2018-09-11T23:50:38.000Z | tests/test.metricsManager.py | bongiovimatthew/jigsaw-rl | e9589a78b62a7645fe9bc054f0411230eb249acf | [
"MIT"
] | 6 | 2018-09-11T23:46:57.000Z | 2018-09-15T00:33:45.000Z | tests/test.metricsManager.py | bongiovimatthew/jigsaw-rl | e9589a78b62a7645fe9bc054f0411230eb249acf | [
"MIT"
] | null | null | null | from metricsManager import MetricsManager
def TestDrawGraphs():
manager = MetricsManager()
manager.displayMetricsGraph()
return
def main():
TestDrawGraphs()
if __name__ == "__main__":
main()
| 13.5625 | 41 | 0.695853 | 18 | 217 | 7.944444 | 0.611111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.207373 | 217 | 15 | 42 | 14.466667 | 0.831395 | 0 | 0 | 0 | 0 | 0 | 0.036866 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.444444 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ee099f29059e41c328e7874e3b6406e0db2acd5 | 2,361 | py | Python | flip-text/main.py | nvxden/py-mix | ae850ba7366cbb2bea8373fa994ba69b31116f11 | [
"MIT"
] | null | null | null | flip-text/main.py | nvxden/py-mix | ae850ba7366cbb2bea8373fa994ba69b31116f11 | [
"MIT"
] | null | null | null | flip-text/main.py | nvxden/py-mix | ae850ba7366cbb2bea8373fa994ba69b31116f11 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import sys
# objects
mapping = {
'а' : 'ɐ',
'б' : 'ƍ',
'в' : 'ʚ',
'г' : 'ɹ',
'д' : 'ɓ',
'е' : 'ǝ̤',
'ё' : 'ǝ',
'ж' : 'ж',
'з' : 'ε',
'и' : 'и̯',
'й' : 'n',
'к' : 'ʞ',
'л' : 'v',
'м' : 'w',
'н' : 'н',
'о' : 'о',
'п' : 'u',
'р' : 'd',
'с' : 'ɔ',
'т' : 'ɯ',
'у' : 'ʎ',
'ф' : 'ȸ',
'х' : 'х',
'ц' : 'ǹ',
'ч' : 'Һ',
'ш' : 'm',
'щ' : 'm',
'ъ' : 'q',
'ы' : 'qı',
'ь' : 'q',
'э' : 'є',
'ю' : 'ıo',
'я' : 'ʁ',
'А' : 'ɐ',
'Б' : 'ƍ',
'В' : 'ʚ',
'Г' : 'ɹ',
'Д' : 'ɓ',
'Е' : 'ǝ̤',
'Ё' : 'ǝ',
'Ж' : 'ж',
'З' : 'ε',
'И' : 'и̯',
'Й' : 'n',
'К' : 'ʞ',
'Л' : 'v',
'М' : 'w',
'Н' : 'н',
'О' : 'о',
'П' : 'u',
'Р' : 'd',
'С' : 'ɔ',
'Т' : 'ɯ',
'У' : 'ʎ',
'Ф' : 'ȸ',
'Х' : 'х',
'Ц' : 'ǹ',
'Ч' : 'Һ',
'Ш' : 'm',
'Щ' : 'm',
'Ъ' : 'q',
'Ы' : 'q',
'Ь' : 'q',
'Э' : 'єı',
'Ю' : 'ıo',
'Я' : 'ʁ',
'a' : 'ɐ',
'b' : 'q',
'c' : 'ɔ',
'd' : 'p',
'e' : 'ǝ',
'f' : 'ɟ',
'g' : 'ƃ',
'h' : 'ɥ',
'i' : 'ı',
'j' : 'ɾ',
'k' : 'ʞ',
'l' : 'l',
'm' : 'ɯ',
'n' : 'u',
'o' : 'o',
'p' : 'd',
'q' : 'ᕹ',
'r' : 'ɹ',
's' : 's',
't' : 'ʇ',
'u' : 'n',
'v' : 'ʌ',
'w' : 'ʍ',
'x' : 'x',
'y' : 'ʎ',
'z' : 'z',
'A' : 'ɐ',
'B' : 'q',
'C' : 'ɔ',
'D' : 'p',
'E' : 'ǝ',
'F' : 'ɟ',
'G' : 'ƃ',
'H' : 'ɥ',
'I' : 'ı',
'J' : 'ɾ',
'K' : 'ʞ',
'L' : 'l',
'M' : 'ɯ',
'N' : 'u',
'O' : 'o',
'P' : 'd',
'Q' : 'ᕹ',
'R' : 'ɹ',
'S' : 's',
'T' : 'ʇ',
'U' : 'n',
'V' : 'ʌ',
'W' : 'ʍ',
'X' : 'x',
'Y' : 'ʎ',
'Z' : 'z',
'!' : '¡',
'*' : '*',
'(' : ')',
')' : '(',
'-' : '-',
'=' : '=',
'_' : '‾',
'+' : '+',
'[' : ']',
']' : '[',
'{' : '}',
'}' : '{',
';' : ';',
'?' : '¿',
',' : '‘',
'.' : '˙',
'<' : '>',
'>' : '<',
'/' : '\\',
'\\' : '/',
}
# functions
def flat_text(s: str):
'''
remove double spaces and replace
\n and \t to spaces
'''
res = ""
prev_is_space = True
for sym in s:
if not sym.isspace():
prev_is_space = False
res += sym
continue
if not prev_is_space:
prev_is_space = True
res += ' ' # replace \n and \t to ' '
continue
return res.strip()
# main
text = sys.stdin.read()
text = flat_text(text)
text = ''.join(map(lambda s: mapping.get(s, s), text))[::-1]
print(text)
# END
| 12.233161 | 60 | 0.244388 | 329 | 2,361 | 1.744681 | 0.413374 | 0.041812 | 0.076655 | 0.013937 | 0.43554 | 0.38676 | 0.38676 | 0.38676 | 0.38676 | 0.38676 | 0 | 0.001308 | 0.352393 | 2,361 | 192 | 61 | 12.296875 | 0.368869 | 0.057603 | 0 | 0.025316 | 0 | 0 | 0.129105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006329 | false | 0 | 0.006329 | 0 | 0.018987 | 0.006329 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ee2b0fe10e60783d49e3b44534e38d6afcf0e04 | 15,352 | py | Python | database_delivery_sdk/api/dbservice/create_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | 5 | 2019-07-31T04:11:05.000Z | 2021-01-07T03:23:20.000Z | database_delivery_sdk/api/dbservice/create_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | database_delivery_sdk/api/dbservice/create_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: create.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from database_delivery_sdk.model.database_delivery import dbinstance_pb2 as database__delivery__sdk_dot_model_dot_database__delivery_dot_dbinstance__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='create.proto',
package='dbservice',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0c\x63reate.proto\x12\tdbservice\x1a>database_delivery_sdk/model/database_delivery/dbinstance.proto\"\xb2\x01\n\x16\x43reateDBServiceRequest\x12J\n\x0f\x63reateDbservice\x18\x01 \x01(\x0b\x32\x31.dbservice.CreateDBServiceRequest.CreateDbservice\x1aL\n\x0f\x43reateDbservice\x12\r\n\x05owner\x18\x01 \x03(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06\x64\x62Type\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x65sc\x18\x04 \x01(\t\"\xc2\x02\n\x17\x43reateDBServiceResponse\x12\x31\n\ndbInstance\x18\x01 \x03(\x0b\x32\x1d.database_delivery.DBInstance\x12\x37\n\x05owner\x18\x02 \x03(\x0b\x32(.dbservice.CreateDBServiceResponse.Owner\x12\x12\n\ninstanceId\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0e\n\x06\x64\x62Type\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x65sc\x18\x06 \x01(\t\x12\x13\n\x0bupdatedTime\x18\x07 \x01(\x03\x1a\x66\n\x05Owner\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x12\n\nuser_email\x18\x02 \x01(\t\x12\x10\n\x08user_tel\x18\x03 \x01(\t\x12\x11\n\tuser_type\x18\x04 \x01(\t\x12\x10\n\x08nickname\x18\x05 \x01(\t\"\x84\x01\n\x1e\x43reateDBServiceResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12\x30\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\".dbservice.CreateDBServiceResponseb\x06proto3')
,
dependencies=[database__delivery__sdk_dot_model_dot_database__delivery_dot_dbinstance__pb2.DESCRIPTOR,])
_CREATEDBSERVICEREQUEST_CREATEDBSERVICE = _descriptor.Descriptor(
name='CreateDbservice',
full_name='dbservice.CreateDBServiceRequest.CreateDbservice',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='owner', full_name='dbservice.CreateDBServiceRequest.CreateDbservice.owner', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dbservice.CreateDBServiceRequest.CreateDbservice.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dbType', full_name='dbservice.CreateDBServiceRequest.CreateDbservice.dbType', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='desc', full_name='dbservice.CreateDBServiceRequest.CreateDbservice.desc', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=194,
serialized_end=270,
)
_CREATEDBSERVICEREQUEST = _descriptor.Descriptor(
name='CreateDBServiceRequest',
full_name='dbservice.CreateDBServiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='createDbservice', full_name='dbservice.CreateDBServiceRequest.createDbservice', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CREATEDBSERVICEREQUEST_CREATEDBSERVICE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=92,
serialized_end=270,
)
_CREATEDBSERVICERESPONSE_OWNER = _descriptor.Descriptor(
name='Owner',
full_name='dbservice.CreateDBServiceResponse.Owner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instanceId', full_name='dbservice.CreateDBServiceResponse.Owner.instanceId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_email', full_name='dbservice.CreateDBServiceResponse.Owner.user_email', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_tel', full_name='dbservice.CreateDBServiceResponse.Owner.user_tel', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_type', full_name='dbservice.CreateDBServiceResponse.Owner.user_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nickname', full_name='dbservice.CreateDBServiceResponse.Owner.nickname', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=493,
serialized_end=595,
)
_CREATEDBSERVICERESPONSE = _descriptor.Descriptor(
name='CreateDBServiceResponse',
full_name='dbservice.CreateDBServiceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dbInstance', full_name='dbservice.CreateDBServiceResponse.dbInstance', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='owner', full_name='dbservice.CreateDBServiceResponse.owner', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceId', full_name='dbservice.CreateDBServiceResponse.instanceId', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='dbservice.CreateDBServiceResponse.name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dbType', full_name='dbservice.CreateDBServiceResponse.dbType', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='desc', full_name='dbservice.CreateDBServiceResponse.desc', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updatedTime', full_name='dbservice.CreateDBServiceResponse.updatedTime', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CREATEDBSERVICERESPONSE_OWNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=273,
serialized_end=595,
)
_CREATEDBSERVICERESPONSEWRAPPER = _descriptor.Descriptor(
name='CreateDBServiceResponseWrapper',
full_name='dbservice.CreateDBServiceResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='dbservice.CreateDBServiceResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='dbservice.CreateDBServiceResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='dbservice.CreateDBServiceResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='dbservice.CreateDBServiceResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=598,
serialized_end=730,
)
_CREATEDBSERVICEREQUEST_CREATEDBSERVICE.containing_type = _CREATEDBSERVICEREQUEST
_CREATEDBSERVICEREQUEST.fields_by_name['createDbservice'].message_type = _CREATEDBSERVICEREQUEST_CREATEDBSERVICE
_CREATEDBSERVICERESPONSE_OWNER.containing_type = _CREATEDBSERVICERESPONSE
_CREATEDBSERVICERESPONSE.fields_by_name['dbInstance'].message_type = database__delivery__sdk_dot_model_dot_database__delivery_dot_dbinstance__pb2._DBINSTANCE
_CREATEDBSERVICERESPONSE.fields_by_name['owner'].message_type = _CREATEDBSERVICERESPONSE_OWNER
_CREATEDBSERVICERESPONSEWRAPPER.fields_by_name['data'].message_type = _CREATEDBSERVICERESPONSE
DESCRIPTOR.message_types_by_name['CreateDBServiceRequest'] = _CREATEDBSERVICEREQUEST
DESCRIPTOR.message_types_by_name['CreateDBServiceResponse'] = _CREATEDBSERVICERESPONSE
DESCRIPTOR.message_types_by_name['CreateDBServiceResponseWrapper'] = _CREATEDBSERVICERESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateDBServiceRequest = _reflection.GeneratedProtocolMessageType('CreateDBServiceRequest', (_message.Message,), {
'CreateDbservice' : _reflection.GeneratedProtocolMessageType('CreateDbservice', (_message.Message,), {
'DESCRIPTOR' : _CREATEDBSERVICEREQUEST_CREATEDBSERVICE,
'__module__' : 'create_pb2'
# @@protoc_insertion_point(class_scope:dbservice.CreateDBServiceRequest.CreateDbservice)
})
,
'DESCRIPTOR' : _CREATEDBSERVICEREQUEST,
'__module__' : 'create_pb2'
# @@protoc_insertion_point(class_scope:dbservice.CreateDBServiceRequest)
})
_sym_db.RegisterMessage(CreateDBServiceRequest)
_sym_db.RegisterMessage(CreateDBServiceRequest.CreateDbservice)
CreateDBServiceResponse = _reflection.GeneratedProtocolMessageType('CreateDBServiceResponse', (_message.Message,), {
'Owner' : _reflection.GeneratedProtocolMessageType('Owner', (_message.Message,), {
'DESCRIPTOR' : _CREATEDBSERVICERESPONSE_OWNER,
'__module__' : 'create_pb2'
# @@protoc_insertion_point(class_scope:dbservice.CreateDBServiceResponse.Owner)
})
,
'DESCRIPTOR' : _CREATEDBSERVICERESPONSE,
'__module__' : 'create_pb2'
# @@protoc_insertion_point(class_scope:dbservice.CreateDBServiceResponse)
})
_sym_db.RegisterMessage(CreateDBServiceResponse)
_sym_db.RegisterMessage(CreateDBServiceResponse.Owner)
CreateDBServiceResponseWrapper = _reflection.GeneratedProtocolMessageType('CreateDBServiceResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _CREATEDBSERVICERESPONSEWRAPPER,
'__module__' : 'create_pb2'
# @@protoc_insertion_point(class_scope:dbservice.CreateDBServiceResponseWrapper)
})
_sym_db.RegisterMessage(CreateDBServiceResponseWrapper)
# @@protoc_insertion_point(module_scope)
| 44.498551 | 1,315 | 0.75964 | 1,808 | 15,352 | 6.163717 | 0.102876 | 0.048816 | 0.050879 | 0.037688 | 0.661522 | 0.61154 | 0.579505 | 0.553661 | 0.544688 | 0.53051 | 0 | 0.036142 | 0.118682 | 15,352 | 344 | 1,316 | 44.627907 | 0.787509 | 0.036412 | 0 | 0.680645 | 1 | 0.003226 | 0.218103 | 0.180828 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.019355 | 0 | 0.019355 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ee30ebb849ee3cb4fa56074929468e36a9ddd3f | 174 | py | Python | music/class_/audioa/d/flat/__init__.py | jedhsu/music | dea68c4a82296cd4910e786f533b2cbf861377c3 | [
"MIT"
] | null | null | null | music/class_/audioa/d/flat/__init__.py | jedhsu/music | dea68c4a82296cd4910e786f533b2cbf861377c3 | [
"MIT"
] | null | null | null | music/class_/audioa/d/flat/__init__.py | jedhsu/music | dea68c4a82296cd4910e786f533b2cbf861377c3 | [
"MIT"
] | null | null | null | """
*mus . key . d . flt*
"""
from ._key import DfKey
from .major import DfMajor
from .minor import DfMinor
__all__ = [
"DfKey",
"DfMajor",
"DfMinor",
]
| 10.235294 | 26 | 0.568966 | 20 | 174 | 4.7 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.281609 | 174 | 16 | 27 | 10.875 | 0.752 | 0.12069 | 0 | 0 | 0 | 0 | 0.136691 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.375 | 0 | 0.375 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
2ee3ec79f4dde5dcaa341328384d0337c62fc38c | 1,234 | py | Python | Python/zzz_training_challenge/Python_Challenge/solutions/tests/ch03_recursion/ex08_power_test.py | Kreijeck/learning | eaffee08e61f2a34e01eb8f9f04519aac633f48c | [
"MIT"
] | null | null | null | Python/zzz_training_challenge/Python_Challenge/solutions/tests/ch03_recursion/ex08_power_test.py | Kreijeck/learning | eaffee08e61f2a34e01eb8f9f04519aac633f48c | [
"MIT"
] | null | null | null | Python/zzz_training_challenge/Python_Challenge/solutions/tests/ch03_recursion/ex08_power_test.py | Kreijeck/learning | eaffee08e61f2a34e01eb8f9f04519aac633f48c | [
"MIT"
] | null | null | null | # Beispielprogramm für das Buch "Python Challenge"
#
# Copyright 2020 by Michael Inden
import pytest
from ch03_recursion.solutions.ex08_power_of import is_power_of_2, power_of_iterative, power_of, power_of_optimized
@pytest.mark.parametrize("value, expected",
[(2, True), (3, False), (4, True),
(10, False), (16, True)])
def test_is_power_of2(value, expected):
assert is_power_of_2(value) == expected
def inputs_and_expected():
return [(2, 2, 4), (4, 2, 16), (16, 2, 256),
(4, 4, 256), (2, 8, 256), (3, 3, 27)]
@pytest.mark.parametrize("number, exponent, expected",
inputs_and_expected())
def test_power_of(number, exponent, expected):
assert power_of(number, exponent) == expected
@pytest.mark.parametrize("number, exponent, expected",
inputs_and_expected())
def test_power_of_optimized(number, exponent, expected):
assert power_of_optimized(number, exponent) == expected
@pytest.mark.parametrize("number, exponent, expected",
inputs_and_expected())
def test_power_of_iterative(number, exponent, expected):
assert power_of_iterative(number, exponent) == expected
| 32.473684 | 114 | 0.659643 | 155 | 1,234 | 5.006452 | 0.296774 | 0.108247 | 0.255155 | 0.104381 | 0.552835 | 0.552835 | 0.342784 | 0.342784 | 0.342784 | 0.342784 | 0 | 0.046778 | 0.220421 | 1,234 | 37 | 115 | 33.351351 | 0.759875 | 0.06483 | 0 | 0.272727 | 0 | 0 | 0.08087 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.227273 | false | 0 | 0.090909 | 0.045455 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ef1a0ab46fea7fd75480dfdf673789b57d3a17d | 196 | py | Python | tests.py | BetaS/datetime-quarter | 8013daa8a3c8c50f13da4706f1af1c31370d962e | [
"MIT"
] | 10 | 2020-09-11T22:27:13.000Z | 2021-12-14T12:46:04.000Z | tests.py | BetaS/datetime-quarter | 8013daa8a3c8c50f13da4706f1af1c31370d962e | [
"MIT"
] | null | null | null | tests.py | BetaS/datetime-quarter | 8013daa8a3c8c50f13da4706f1af1c31370d962e | [
"MIT"
] | 1 | 2021-03-27T14:50:41.000Z | 2021-03-27T14:50:41.000Z | from datequarter import DateQuarter
if __name__ == "__main__":
quarter1 = DateQuarter(2019, 1)
quarter2 = DateQuarter(2018, 4)
print(list(DateQuarter.between(quarter1, quarter2)))
| 19.6 | 56 | 0.719388 | 21 | 196 | 6.333333 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08642 | 0.173469 | 196 | 9 | 57 | 21.777778 | 0.734568 | 0 | 0 | 0 | 0 | 0 | 0.041026 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2ef3b099a954ffd320041a645a9be6a11f2923ea | 262 | py | Python | home_logs/public/urls.py | tsaklidis/LogingAPI | 4a77fc3685e6ec369e2a4037a81a3fffcc3056c6 | [
"MIT"
] | 9 | 2019-09-27T22:20:40.000Z | 2019-11-07T22:16:30.000Z | home_logs/public/urls.py | tsaklidis/LogingAPI | 4a77fc3685e6ec369e2a4037a81a3fffcc3056c6 | [
"MIT"
] | null | null | null | home_logs/public/urls.py | tsaklidis/LogingAPI | 4a77fc3685e6ec369e2a4037a81a3fffcc3056c6 | [
"MIT"
] | 2 | 2019-09-27T22:20:44.000Z | 2019-10-04T08:11:45.000Z | from django.conf.urls import url
from home_logs.public import views
urlpatterns = [
url('^$', views.public, name='public'),
url('^lang/(?P<lang>[\w\-]+)/', views.public, name='public_el'),
url('^norights/$', views.no_rights, name='no_rights'),
]
| 21.833333 | 68 | 0.637405 | 36 | 262 | 4.527778 | 0.527778 | 0.134969 | 0.184049 | 0.257669 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.141221 | 262 | 11 | 69 | 23.818182 | 0.724444 | 0 | 0 | 0 | 0 | 0 | 0.232824 | 0.091603 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2c0996023995fe07352b8bfd5b8c3c4fe2175639 | 7,574 | py | Python | engine/geometry/point_2d.py | codehearts/pickles-fetch-quest | ca9b3c7fe26acb50e1e2d654d068f5bb953bc427 | [
"MIT"
] | 3 | 2017-12-07T19:17:36.000Z | 2021-07-29T18:24:25.000Z | engine/geometry/point_2d.py | codehearts/pickles-fetch-quest | ca9b3c7fe26acb50e1e2d654d068f5bb953bc427 | [
"MIT"
] | 41 | 2017-11-11T06:00:08.000Z | 2022-03-28T23:27:25.000Z | engine/geometry/point_2d.py | codehearts/pickles-fetch-quest | ca9b3c7fe26acb50e1e2d654d068f5bb953bc427 | [
"MIT"
] | 2 | 2018-08-31T23:49:00.000Z | 2021-09-21T00:42:48.000Z | import operator
class Point2d(object):
"""A two dimensional point in space.
Attributes:
x (int): The x coordinate of the point.
y (int): The y coordinate of the point.
"""
def __init__(self, x, y):
"""Creates a new two dimensional point.
Args:
x (int): The x coordinate of the point.
y (int): The y coordinate of the point.
"""
super(Point2d, self).__init__()
self.x = x
self.y = y
def set(self, other):
"""Sets this point from another object.
If `other` is another :cls:`Point2d`, the x and y coordinates will be
assigned to the x and y coordinates of the other point.
If `other` is an iterable, the first index is assigned to the x
coordinate and the second index is assigned to the y coordinate.
If `other` is a number, both coordinates will be assigned to it.
Args:
other (:obj:`object`): The value to assign this point to.
"""
try:
self.x, self.y = other.x, other.y
except AttributeError:
try:
self.x, self.y = other[0], other[1]
except TypeError:
self.x = other
self.y = other
def __repr__(self):
"""Returns a human-readable string representation of the point.
Returns:
A string in the format "Point2d(x, y)".
"""
return 'Point2d({}, {})'.format(self.x, self.y)
def __getitem__(self, key):
"""Access x coordinate at index 0 or y coordinate at index 1.
Args:
key (mixed): Key to access. 0 for x coordinate, 1 for y coordinate.
Returns:
The x or y coordinate, depending on the key.
Raises:
IndexError: If an invalid index is requested.
"""
if key == 0:
return self.x
elif key == 1:
return self.y
raise IndexError('Invalid index for coordinate of 2d point')
def _call_with_other(self, fn, other):
"""Calls a function for both coordinates with another object.
Used to prevent code duplication for operator overloading.
Args:
fn (callable): Function to call with an individual coordinate
and `other`. Will be called with `self.x` and `other` before
`self.y` and `other`.
other (:obj:`object`): The other object to pass to the function.
Returns:
A tuple of the return values of the function when called with
`self.x`, `self.y`, and `other`.
"""
try:
x_result, y_result = fn(self.x, other.x), fn(self.y, other.y)
except AttributeError:
try:
x_result, y_result = fn(self.x, other[0]), fn(self.y, other[1])
except TypeError:
x_result, y_result = fn(self.x, other), fn(self.y, other)
finally:
return (x_result, y_result)
def __eq__(self, other):
"""Determines if this point is equal to another object.
Args:
other (:obj:`object`): The value to compare against the point.
"""
equalities = self._call_with_other(operator.eq, other)
return equalities[0] and equalities[1]
def __ne__(self, other):
"""Determines if this point is not equal to another object.
Args:
other (:obj:`object`): The value to compare against the point.
"""
equalities = self._call_with_other(operator.ne, other)
return equalities[0] or equalities[1]
def __add__(self, other):
"""Adds to the x and y coordinates.
If `other` is another :cls:`Point2d`, the x and y coordinates will be
summed with the x and y coordinates of the other point respectively.
If `other` is an iterable, the first index is added to the x
coordinate and the second index is added to the y coordinate.
If `other` is a number, it will be added to both coordinates.
Args:
other (:obj:`object`): The value to add to the point.
"""
return Point2d(*self._call_with_other(operator.add, other))
def __sub__(self, other):
"""Subtracts from the x and y coordinates.
If `other` is another :cls:`Point2d`, the x and y coordinates will be
subtracted by the x and y coordinates of the other point respectively.
If `other` is an iterable, the first index is subtracted from the x
coordinate and the second index is subtracted from the y coordinate.
If `other` is a number, it will be subtracted from both coordinates.
Args:
other (:obj:`object`): The value to subtract from the point.
"""
return Point2d(*self._call_with_other(operator.sub, other))
def __mul__(self, other):
"""Multiplies the x and y coordinates by the value.
If `other` is another :cls:`Point2d`, the x and y coordinates will be
multiplied by the x and y coordinates of the other point respectively.
If `other` is an iterable, the x coordinate is multiplied by the first
index and the y coordinate is multiplied by the second index.
If `other` is a number, both coordinates will be multiplied by it.
Args:
other (:obj:`object`): The value to multiply the point by.
"""
return Point2d(*self._call_with_other(operator.mul, other))
def __floordiv__(self, other):
"""Divides the x and y coordinates by the value, flooring the result.
If `other` is another :cls:`Point2d`, the x and y coordinates will be
divided by the x and y coordinates of the other point respectively.
If `other` is an iterable, the x coordinate is divided by the first
index and the y coordinate is divided by the second index.
If `other` is a number, both coordinates will be divided by it.
Args:
other (:obj:`object`): The value to divide the point by.
"""
return Point2d(*self._call_with_other(operator.floordiv, other))
def __iadd__(self, other):
"""Adds to the x and y coordinates.
See :fn:`__add__` for documentation on how `other` will be processed.
Args:
other (:obj:`object`): The value to add to the point.
"""
self.x, self.y = self._call_with_other(operator.add, other)
return self
def __imul__(self, other):
"""Multiplies the x and y coordinates by the value.
See :fn:`__mul__` for documentation on how `other` will be processed.
Args:
other (:obj:`object`): The value to multiply the point by.
"""
self.x, self.y = self._call_with_other(operator.mul, other)
return self
def __ifloordiv__(self, other):
"""Divides the x and y coordinates by the value, flooring the result.
See :fn:`__floordiv__` for documentation on how `other` will be
processed.
Args:
other (:obj:`object`): The value to divide the point by.
"""
self.x, self.y = self._call_with_other(operator.floordiv, other)
return self
def __round__(self, digits=None):
"""Rounds the x and y coordinates to the number of digits.
Kwargs:
digits (int, optional): The number of digits to round the point to.
"""
self.x, self.y = round(self.x, digits), round(self.y, digits)
return self
| 34.117117 | 79 | 0.598495 | 1,043 | 7,574 | 4.244487 | 0.135187 | 0.023492 | 0.028462 | 0.032528 | 0.652586 | 0.604924 | 0.596341 | 0.568782 | 0.548679 | 0.423763 | 0 | 0.005398 | 0.315157 | 7,574 | 221 | 80 | 34.271493 | 0.848082 | 0.570636 | 0 | 0.20339 | 0 | 0 | 0.02244 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.254237 | false | 0 | 0.016949 | 0 | 0.525424 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
2c1f1a246a11a186c8603becc0ef30630d44a4c7 | 4,392 | py | Python | cbot/adapters/storageadapters/storage_adapter.py | wangyitao/cbot | 6b2500f5118ddd5ef581f31104e70e5a57b72f7d | [
"MIT"
] | 8 | 2018-10-18T09:15:36.000Z | 2019-09-01T04:42:59.000Z | cbot/adapters/storageadapters/storage_adapter.py | wangyitao/cbot | 6b2500f5118ddd5ef581f31104e70e5a57b72f7d | [
"MIT"
] | 1 | 2018-10-19T06:35:38.000Z | 2018-10-19T06:35:38.000Z | cbot/adapters/storageadapters/storage_adapter.py | wangyitao/cbot | 6b2500f5118ddd5ef581f31104e70e5a57b72f7d | [
"MIT"
] | 5 | 2018-10-19T05:56:26.000Z | 2019-09-01T04:43:11.000Z | import logging
class StorageAdapter(object):
"""
所有存储数据都要实现的基类
"""
def __init__(self, base_query=None, *args, **kwargs):
"""
初始化公共属性
"""
self.kwargs = kwargs
self.logger = kwargs.get('logger', logging.getLogger(__name__))
self.adapter_supports_queries = True
self.base_query = None
def get_model(self, model_name):
"""
根据数据库名字返回数据库模型
"""
# 数据库名必须小写
model_name = model_name.lower()
kwarg_model_key = '%s_model' % (model_name,)
if kwarg_model_key in self.kwargs:
return self.kwargs.get(kwarg_model_key)
get_model_method = getattr(self, 'get_%s_model' % (model_name,))
return get_model_method()
def generate_base_query(self, chatterbot, session_id):
"""
为存储数据库创建基本查询
"""
if self.adapter_supports_queries:
for filter_instance in chatterbot.filters:
self.base_query = filter_instance.filter_selection(chatterbot, session_id)
def count(self):
"""
返回数据库中的条目数
"""
raise self.AdapterMethodNotImplementedError(
'The `count` method is not implemented by this adapter.'
)
def find(self, statement_text):
"""
查询,如果存在返回查询对象
"""
raise self.AdapterMethodNotImplementedError(
'The `find` method is not implemented by this adapter.'
)
def remove(self, statement_text):
"""
删除指定文本
"""
raise self.AdapterMethodNotImplementedError(
'The `remove` method is not implemented by this adapter.'
)
def filter(self, **kwargs):
"""
返回数据库中的对象列表
"""
raise self.AdapterMethodNotImplementedError(
'The `filter` method is not implemented by this adapter.'
)
def update(self, statement):
"""
更新数据库
"""
raise self.AdapterMethodNotImplementedError(
'The `update` method is not implemented by this adapter.'
)
def get_latest_response(self, conversation_id):
"""
获取最新响应
"""
raise self.AdapterMethodNotImplementedError(
'The `get_latest_response` method is not implemented by this adapter.'
)
def create_conversation(self):
"""
创建会话
"""
raise self.AdapterMethodNotImplementedError(
'The `create_conversation` method is not implemented by this adapter.'
)
def add_to_conversation(self, conversation_id, statement, response):
"""
将新语句添加到会话
"""
raise self.AdapterMethodNotImplementedError(
'The `add_to_conversation` method is not implemented by this adapter.'
)
def get_random(self):
"""
从数据库中随机获取对话
"""
raise self.AdapterMethodNotImplementedError(
'The `get_random` method is not implemented by this adapter.'
)
def drop(self):
"""
删除连接到指定适配器的数据库
"""
raise self.AdapterMethodNotImplementedError(
'The `drop` method is not implemented by this adapter.'
)
def get_response_statements(self):
"""
获取响应语句
"""
statement_list = self.filter()
responses = set()
to_remove = list()
for statement in statement_list:
for response in statement.in_response_to:
responses.add(response.text)
for statement in statement_list:
if statement.text not in responses:
to_remove.append(statement)
for statement in to_remove:
statement_list.remove(statement)
return statement_list
class EmptyDatabaseException(Exception):
def __init__(self,
value='The database currently contains no entries. At least one entry is expected. You may need to train your chat bot to populate your database.'):
self.value = value
def __str__(self):
return repr(self.value)
class AdapterMethodNotImplementedError(NotImplementedError):
"""
适配器方法未实现错误
"""
pass
| 27.974522 | 170 | 0.565801 | 402 | 4,392 | 5.99005 | 0.273632 | 0.037375 | 0.170266 | 0.182724 | 0.232973 | 0.171512 | 0.171512 | 0.171512 | 0.076827 | 0 | 0 | 0 | 0.354053 | 4,392 | 156 | 171 | 28.153846 | 0.848784 | 0.040073 | 0 | 0.151899 | 0 | 0.012658 | 0.203298 | 0.017032 | 0 | 0 | 0 | 0 | 0 | 1 | 0.202532 | false | 0.012658 | 0.012658 | 0.012658 | 0.303797 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
257c4fefc4d66fd3836e6fdb946b6bb85897b207 | 547 | py | Python | cogs/members.py | ticibi/discord-pkb | 8258c0aca5c99dd9bf9c04951ace11f5c1197a39 | [
"MIT"
] | 1 | 2021-07-03T22:40:25.000Z | 2021-07-03T22:40:25.000Z | cogs/members.py | ticibi/discord-pkb | 8258c0aca5c99dd9bf9c04951ace11f5c1197a39 | [
"MIT"
] | null | null | null | cogs/members.py | ticibi/discord-pkb | 8258c0aca5c99dd9bf9c04951ace11f5c1197a39 | [
"MIT"
] | null | null | null | from discord import Embed
from discord.ext import commands
from database import db
class Members(commands.Cog, name='members'):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_ready(self):
print(f'{__name__} extension loaded')
@commands.Cog.listener()
async def on_member_join(self, member):
db.insert_all(member)
@commands.Cog.listener()
async def on_member_remove(self, member):
pass
def setup(client):
client.add_cog(Members(client))
| 21.88 | 45 | 0.685558 | 72 | 547 | 5 | 0.444444 | 0.122222 | 0.158333 | 0.2 | 0.275 | 0.275 | 0.194444 | 0 | 0 | 0 | 0 | 0 | 0.20841 | 547 | 24 | 46 | 22.791667 | 0.831409 | 0 | 0 | 0.176471 | 0 | 0 | 0.062157 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0.058824 | 0.176471 | 0 | 0.352941 | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
258dd905d9ca2637341cb6a88aa1ce1fca6641af | 20,446 | py | Python | tests/shakelib/rupture_distance_tests.py | ynthdhj/shakemap | 2771b8aee6b22f065cc80632c894a0ba77829619 | [
"CC0-1.0"
] | null | null | null | tests/shakelib/rupture_distance_tests.py | ynthdhj/shakemap | 2771b8aee6b22f065cc80632c894a0ba77829619 | [
"CC0-1.0"
] | null | null | null | tests/shakelib/rupture_distance_tests.py | ynthdhj/shakemap | 2771b8aee6b22f065cc80632c894a0ba77829619 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python
import os
import sys
import numpy as np
import openquake.hazardlib.geo as geo
from shakelib.rupture.edge_rupture import EdgeRupture
from shakelib.rupture.quad_rupture import QuadRupture
from shakelib.rupture.origin import Origin
from shakelib.sites import Sites
from shakelib.distance import get_distance
homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script?
shakedir = os.path.abspath(os.path.join(homedir, '..', '..'))
sys.path.insert(0, shakedir)
def test_multisegment_discordant():
# The one thing that isn't check above is discordancy for segments
# with multiple quads. For this, we need a synthetic example.
x0 = np.array([0, 1, -1, 10, 9, 7])
y0 = np.array([0, 10, 20, 40, 35, 30])
z0 = np.array([0, 0, 0, 0, 0, 0])
x1 = np.array([1, -1, 0, 9, 7, 6])
y1 = np.array([10, 20, 30, 35, 30, 25])
z1 = np.array([0, 0, 0, 0, 0, 0])
x2 = np.array([3, 1, 2, 7, 5, 4])
y2 = np.array([10, 20, 30, 35, 30, 25])
z2 = np.array([10, 10, 10, 10, 10, 10])
x3 = np.array([2, 3, 1, 8, 7, 5])
y3 = np.array([0, 10, 20, 40, 35, 30])
z3 = np.array([10, 10, 10, 10, 10, 10])
epilat = 32.15270
epilon = -115.30500
proj = geo.utils.get_orthographic_projection(
epilon - 1, epilon + 1, epilat + 1, epilat - 1)
lon0, lat0 = proj(x0, y0, reverse=True)
lon1, lat1 = proj(x1, y1, reverse=True)
lon2, lat2 = proj(x2, y2, reverse=True)
lon3, lat3 = proj(x3, y3, reverse=True)
# Make an Origin object; most of the 'event' values don't matter for
# this example
origin = Origin({'lat': 0, 'lon': 0, 'depth': 0, 'mag': 7.2, 'eventsourcecode': ''})
rup = QuadRupture.fromVertices(
lon0, lat0, z0, lon1, lat1, z1, lon2, lat2, z2, lon3, lat3, z3,
origin, group_index=[0, 0, 0, 1, 1, 1])
# Sites
buf = 0.25
lat = np.linspace(np.nanmin(rup.lats) - buf, np.nanmax(rup.lats) + buf, 20)
lon = np.linspace(np.nanmin(rup.lons) - buf, np.nanmax(rup.lons) + buf, 20)
lons, lats = np.meshgrid(lon, lat)
dep = np.zeros_like(lons)
x, y = proj(lon, lat)
rupx, rupy = proj(rup.lons, rup.lats)
# Calculate U and T
dtypes = ['U', 'T']
dists = get_distance(dtypes, lats, lons, dep, rup)
targetU = np.array(
[[-28.53228275, -28.36479713, -28.20139732, -28.0407734,
-27.88135558, -27.72144153, -27.55935946, -27.39362017,
-27.22300147, -27.04653062, -26.86338215, -26.67275638,
-26.47381287, -26.26569449, -26.04762427, -25.81902477,
-25.57961136, -25.32943282, -25.06885791, -24.79852214],
[-23.53750292, -23.3748086, -23.21793537, -23.06521934,
-22.91449689, -22.76331684, -22.60928211, -22.45042208,
-22.28542121, -22.11355532, -21.93435402, -21.74720475,
-21.55115107, -21.34497916, -21.12749377, -20.89781118,
-20.6555466, -20.40086149, -20.13439948, -19.85716145],
[-18.53499939, -18.37689929, -18.22732841, -18.08427516,
-17.94468687, -17.80472632, -17.66045115, -17.50880802,
-17.3484421, -17.17963435, -17.0032098, -16.81921732,
-16.62638972, -16.42258419, -16.20564846, -15.9741218,
-15.72753538, -15.4663671, -15.19180844, -14.9054813],
[-13.52283359, -13.36797542, -13.22589288, -13.09466537,
-12.97028551, -12.84653536, -12.71591089, -12.57212088,
-12.41335561, -12.24319318, -12.06681006, -11.88598424,
-11.69798166, -11.49796348, -11.28169605, -11.04691388,
-10.79343174, -10.52262594, -10.23677602, -9.93851158],
[-8.49936685, -8.34357094, -8.20650964, -8.08786858,
-7.98403171, -7.88628837, -7.78005273, -7.64833307,
-7.48359988, -7.29992491, -7.11862682, -6.94410189,
-6.76618701, -6.5727842, -6.35634881, -6.11465447,
-5.84925708, -5.56369035, -5.26212482, -4.94857454],
[-3.46638168, -3.30047216, -3.15914418, -3.04618465,
-2.96252939, -2.90194067, -2.84436315, -2.75029014,
-2.56983592, -2.33744275, -2.1512136, -1.99833104,
-1.84066354, -1.6541107, -1.43071517, -1.17252753,
-0.88592286, -0.57817222, -0.25582315, 0.07585567],
[1.56416954, 1.75393848, 1.9183586, 2.04909316,
2.13723278, 2.17776584, 2.18272501, 2.20967639,
2.37405656, 2.65073289, 2.80205222, 2.90973407,
3.05124404, 3.2505182, 3.50336116, 3.7967575,
4.11742779, 4.45465822, 4.80070204, 5.15033407],
[6.5633489, 6.78740885, 6.99419348, 7.17551069,
7.31963558, 7.4113505, 7.43666779, 7.40177458,
7.40517136, 7.58520044, 7.62013169, 7.71596777,
7.90558457, 8.17213015, 8.49008681, 8.83763176,
9.19937294, 9.56556659, 9.9305469, 10.29132309],
[11.48996073, 11.74301446, 11.99016964, 12.22782156,
12.44984059, 12.6446727, 12.78798484, 12.82584849,
12.61992833, 12.26579742, 12.32166685, 12.54665462,
12.86628045, 13.23578462, 13.62571822, 14.01882924,
14.40617707, 14.78388296, 15.15089889, 15.5076165],
[16.31383216, 16.57376544, 16.83189511, 17.08626411,
17.33309437, 17.56429108, 17.76005623, 17.85853532,
17.57101025, 17.32637346, 17.45075419, 17.77199513,
18.16933168, 18.58284635, 18.9891851, 19.37985879,
19.75324557, 20.11079653, 20.4549905, 20.78837053],
[21.03975749, 21.28450315, 21.5243142, 21.75603974,
21.97469496, 22.17298057, 22.34310053, 22.49668569,
22.73940191, 22.70030633, 22.95351405, 23.35967832,
23.75891016, 24.14867803, 24.51536915, 24.85878249,
25.18398203, 25.49615514, 25.79932964, 26.09638269],
[25.70484089, 25.92709225, 26.14280395, 26.35119497,
26.55363501, 26.75827099, 26.9915523, 27.31779086,
27.77993211, 27.71070831, 28.13624949, 28.723482,
29.25285078, 29.66404032, 30.00169474, 30.30044315,
30.57916576, 30.84804427, 31.1126134, 31.37586841],
[30.35406633, 30.5585145, 30.75843356, 30.95627127,
31.15811912, 31.3763124, 31.63114968, 31.94156189,
32.23691802, 32.38759301, 32.86915665, 33.83467935,
34.46125278, 34.89905345, 35.25111257, 35.55095664,
35.82150686, 36.07720619, 36.32643896, 36.57385362],
[35.0222379, 35.21734711, 35.41081942, 35.60589495,
35.80774808, 36.02313791, 36.25826988, 36.51619168,
36.81025966, 37.21777129, 37.86674108, 38.66578072,
39.25203723, 39.78060643, 40.20815617, 40.5606039,
40.86634527, 41.14457482, 41.40732554, 41.66197722],
[39.73046099, 39.92514041, 40.12152415, 40.32316112,
40.5350467, 40.76393316, 41.01937758, 41.3172128,
41.68596492, 42.16604148, 42.77622755, 43.447503,
44.03771478, 44.55012468, 45.00551259, 45.40376857,
45.75505135, 46.07204699, 46.36554362, 46.64361367],
[44.4876174, 44.68959464, 44.89710008, 45.11420443,
45.34646809, 45.60143197, 45.88932906, 46.22363997,
46.61975585, 47.0884227, 47.62307543, 48.1913408,
48.74937117, 49.26945799, 49.74327902, 50.17123158,
50.55810895, 50.91098842, 51.23731582, 51.54375617],
[49.29279265, 49.50696882, 49.73006999, 49.96625305,
50.22080319, 50.50022572, 50.81209441, 51.1642666,
51.56290694, 52.00913021, 52.49553006, 53.00565389,
53.51861282, 54.01614414, 54.48672101, 54.9254339,
55.33212663, 55.70951516, 56.06170563, 56.39317058],
[54.13906629, 54.3671694, 54.60643024, 54.86053563,
55.13377911, 55.43088558, 55.75658576, 56.1148189,
56.50752978, 56.93329478, 57.38640012, 57.85715119,
58.33367994, 58.80451404, 59.26065475, 59.69644542,
60.10938419, 60.49940252, 60.86803179, 61.21767916],
[59.01741908, 59.25887491, 59.51248349, 59.78119592,
60.06816694, 60.37651862, 60.70895927, 61.0672529,
61.45160192, 61.86010542, 62.28853397, 62.73062937,
63.17894547, 63.62598375, 64.06523791, 64.49185106,
64.90281064, 65.2967858, 65.67377362, 66.03469546],
[63.9193099, 64.17236414, 64.4376317, 64.71732366,
65.01362255, 65.32847988, 65.66334836, 66.0188704,
66.39457546, 66.7886684, 67.19800022, 67.61828012,
68.04451487, 68.47157851, 68.89476917, 69.31022713,
69.71515194, 70.10782673, 70.4875021, 70.85420436]]
)
np.testing.assert_allclose(targetU, dists['U'], atol=0.01)
targetT = np.array(
[[-2.27427469e+01, -1.97498544e+01, -1.67512900e+01,
-1.37464632e+01, -1.07350712e+01, -7.71715083e+00,
-4.69305811e+00, -1.66336318e+00, 1.37131605e+00,
4.41047613e+00, 7.45381136e+00, 1.05011799e+01,
1.35524779e+01, 1.66074913e+01, 1.96657949e+01,
2.27267294e+01, 2.57894503e+01, 2.88530154e+01,
3.19164798e+01, 3.49789747e+01],
[-2.30778766e+01, -2.00896906e+01, -1.70950973e+01,
-1.40931667e+01, -1.10834219e+01, -8.06600712e+00,
-5.04171582e+00, -2.01179123e+00, 1.02248614e+00,
4.06025218e+00, 7.10129626e+00, 1.01459367e+01,
1.31946312e+01, 1.62475702e+01, 1.93044511e+01,
2.23644788e+01, 2.54265185e+01, 2.84892997e+01,
3.15515954e+01, 3.46123426e+01],
[-2.33971472e+01, -2.04144525e+01, -1.74245193e+01,
-1.44256870e+01, -1.14169177e+01, -8.39830615e+00,
-5.37141115e+00, -2.33902937e+00, 6.95823925e-01,
3.73133431e+00, 6.76769593e+00, 9.80663091e+00,
1.28500821e+01, 1.58991008e+01, 1.89534737e+01,
2.20119662e+01, 2.50728111e+01, 2.81341606e+01,
3.11943854e+01, 3.42522163e+01],
[-2.36965870e+01, -2.07206976e+01, -1.77370901e+01,
-1.47426715e+01, -1.17347885e+01, -8.71247709e+00,
-5.67801094e+00, -2.63761285e+00, 4.00625914e-01,
3.43182302e+00, 6.45782532e+00, 9.48491128e+00,
1.25187545e+01, 1.55616657e+01, 1.86127822e+01,
2.16694756e+01, 2.47286680e+01, 2.77876297e+01,
3.08443066e+01, 3.38973527e+01],
[-2.39698399e+01, -2.10022612e+01, -1.80281475e+01,
-1.50423801e+01, -1.20388157e+01, -9.01204040e+00,
-5.96160398e+00, -2.89867328e+00, 1.52194374e-01,
3.17268218e+00, 6.17334725e+00, 9.17699572e+00,
1.21964990e+01, 1.52330975e+01, 1.82821226e+01,
2.13375815e+01, 2.43943933e+01, 2.74490375e+01,
3.04994435e+01, 3.35446330e+01],
[-2.42070742e+01, -2.12471979e+01, -1.82855675e+01,
-1.53163304e+01, -1.23296744e+01, -9.31127857e+00,
-6.24535210e+00, -3.12882361e+00, -2.24460581e-02,
2.95354485e+00, 5.89215412e+00, 8.86387424e+00,
1.18748249e+01, 1.49128245e+01, 1.79640055e+01,
2.10182501e+01, 2.40696313e+01, 2.71153177e+01,
3.01543919e+01, 3.31869788e+01],
[-2.43971375e+01, -2.14368866e+01, -1.84826148e+01,
-1.55321207e+01, -1.25786621e+01, -9.60654678e+00,
-6.58612151e+00, -3.48118311e+00, -3.16555025e-01,
2.61618307e+00, 5.53740540e+00, 8.52666510e+00,
1.15623361e+01, 1.46149780e+01, 1.76674294e+01,
2.07125025e+01, 2.37483764e+01, 2.67756033e+01,
2.97955606e+01, 3.28097430e+01],
[-2.45384925e+01, -2.15583842e+01, -1.85874288e+01,
-1.56290738e+01, -1.26867853e+01, -9.76140655e+00,
-6.84407754e+00, -3.90089971e+00, -8.41806596e-01,
2.14754495e+00, 5.18583472e+00, 8.26271822e+00,
1.13266091e+01, 1.43684333e+01, 1.73916223e+01,
2.04017469e+01, 2.34034936e+01, 2.64002111e+01,
2.93941282e+01, 3.23866586e+01],
[-2.46576775e+01, -2.16355610e+01, -1.86129545e+01,
-1.55919156e+01, -1.25763765e+01, -9.57306672e+00,
-6.59044329e+00, -3.62352541e+00, -5.92041388e-01,
2.33255341e+00, 5.29498494e+00, 8.24834463e+00,
1.11833819e+01, 1.41167617e+01, 1.70571082e+01,
2.00065102e+01, 2.29645946e+01, 2.59302937e+01,
2.89023967e+01, 3.18797332e+01],
[-2.48161623e+01, -2.17489533e+01, -1.86651328e+01,
-1.55589864e+01, -1.24224388e+01, -9.24466730e+00,
-6.01521475e+00, -2.75148770e+00, 3.89519039e-01,
2.99589525e+00, 5.45696689e+00, 8.01247078e+00,
1.07291540e+01, 1.35565782e+01, 1.64461360e+01,
1.93723515e+01, 2.23222250e+01, 2.52881249e+01,
2.82650171e+01, 3.12494172e+01],
[-2.50857405e+01, -2.20002811e+01, -1.88926336e+01,
-1.57550887e+01, -1.25770789e+01, -9.34497451e+00,
-6.04430316e+00, -2.67290100e+00, 5.40854953e-01,
2.30509492e+00, 3.58183843e+00, 6.23701436e+00,
9.28727128e+00, 1.23205706e+01, 1.53428945e+01,
1.83666035e+01, 2.13934954e+01, 2.44218171e+01,
2.74496472e+01, 3.04757209e+01],
[-2.55082697e+01, -2.24454912e+01, -1.93710045e+01,
-1.62824768e+01, -1.31767102e+01, -1.00469827e+01,
-6.86985653e+00, -3.54681638e+00, 1.07062999e-01,
3.34891657e-01, -1.70694750e-01, 3.57896940e+00,
7.17013928e+00, 1.05232789e+01, 1.37976070e+01,
1.70230221e+01, 2.02076136e+01, 2.33576919e+01,
2.64794914e+01, 2.95785985e+01],
[-2.60778515e+01, -2.30695744e+01, -2.00684150e+01,
-1.70790651e+01, -1.41074315e+01, -1.11587507e+01,
-8.23273307e+00, -5.33306966e+00, -2.80144302e+00,
-1.84760416e+00, -1.05368779e+00, 1.26163211e+00,
4.90086292e+00, 8.53883059e+00, 1.20996577e+01,
1.55589098e+01, 1.89237978e+01, 2.22114952e+01,
2.54390313e+01, 2.86203790e+01],
[-2.67537229e+01, -2.38123298e+01, -2.08964272e+01,
-1.80168638e+01, -1.51896230e+01, -1.24401995e+01,
-9.81536176e+00, -7.41008520e+00, -5.38073414e+00,
-3.78262975e+00, -2.29669890e+00, -3.53057240e-01,
3.13642477e+00, 6.97021789e+00, 1.07026969e+01,
1.42945488e+01, 1.77655640e+01, 2.11406146e+01,
2.44409375e+01, 2.76832489e+01],
[-2.74832153e+01, -2.46028623e+01, -2.17593854e+01,
-1.89653378e+01, -1.62381218e+01, -1.36022404e+01,
-1.10914555e+01, -8.74572618e+00, -6.58963863e+00,
-4.58336507e+00, -2.57607747e+00, -2.67233150e-01,
2.82788692e+00, 6.36737407e+00, 9.93334021e+00,
1.34440609e+01, 1.68846862e+01, 2.02577163e+01,
2.35710668e+01, 2.68337230e+01],
[-2.82199728e+01, -2.53838486e+01, -2.25869234e+01,
-1.98388981e+01, -1.71512612e+01, -1.45365893e+01,
-1.20059297e+01, -9.56220853e+00, -7.18799023e+00,
-4.83006994e+00, -2.39120744e+00, 2.51308627e-01,
3.17331949e+00, 6.33022626e+00, 9.61428455e+00,
1.29426788e+01, 1.62705993e+01, 1.95770961e+01,
2.28539081e+01, 2.60992146e+01],
[-2.89332200e+01, -2.61222251e+01, -2.33461951e+01,
-2.06105222e+01, -1.79200485e+01, -1.52776479e+01,
-1.26817390e+01, -1.01225741e+01, -7.57801870e+00,
-5.01122873e+00, -2.37434916e+00, 3.78303328e-01,
3.27093827e+00, 6.29527723e+00, 9.41912399e+00,
1.26046423e+01, 1.58204753e+01, 1.90448689e+01,
2.22643265e+01, 2.54712657e+01],
[-2.96082809e+01, -2.68067500e+01, -2.40331802e+01,
-2.12894659e+01, -1.85760763e+01, -1.58910124e+01,
-1.32284735e+01, -1.05774862e+01, -7.92111801e+00,
-5.23724922e+00, -2.50179068e+00, 3.05790568e-01,
3.19666197e+00, 6.16975035e+00, 9.21353475e+00,
1.23109519e+01, 1.54443017e+01, 1.85982825e+01,
2.17611016e+01, 2.49243957e+01],
[-3.02420504e+01, -2.74395034e+01, -2.46578094e+01,
-2.18967353e+01, -1.91546206e+01, -1.64278219e+01,
-1.37101816e+01, -1.09927140e+01, -8.26378719e+00,
-5.51007519e+00, -2.71836365e+00, 1.22111758e-01,
3.01754598e+00, 5.96851903e+00, 8.97043180e+00,
1.20150997e+01, 1.50927299e+01, 1.81935916e+01,
2.13090724e+01, 2.44321477e+01],
[-3.08377073e+01, -2.80281994e+01, -2.52335334e+01,
-2.24524366e+01, -1.96825082e+01, -1.69199811e+01,
-1.41595768e+01, -1.13945492e+01, -8.61701306e+00,
-5.81861191e+00, -2.99148017e+00, -1.29317230e-01,
2.77170749e+00, 5.71249079e+00, 8.69110042e+00,
1.17033684e+01, 1.47437429e+01, 1.78061436e+01,
2.08846464e+01, 2.39739290e+01]]
)
np.testing.assert_allclose(targetT, dists['T'], atol=0.01)
def test_EdgeRupture_vs_QuadRupture():
# Sites stuff
sites = Sites.fromCenter(-122.15, 37.15, 1.5, 1.5, 0.01, 0.01)
sm_dict = sites._GeoDict
west = sm_dict.xmin
east = sm_dict.xmax
south = sm_dict.ymin
north = sm_dict.ymax
nx = sm_dict.nx
ny = sm_dict.ny
lats = np.linspace(north, south, ny)
lons = np.linspace(west, east, nx)
lon, lat = np.meshgrid(lons, lats)
dep = np.zeros_like(lon)
# Construct QuadRupture
xp0 = np.array([-122.0, -122.5])
yp0 = np.array([37.1, 37.4])
xp1 = np.array([-121.7, -122.3])
yp1 = np.array([37.2, 37.2])
zp = np.array([0, 6])
widths = np.array([30, 20])
dips = np.array([30, 40])
origin = Origin({'lat': 0, 'lon': 0, 'depth': 0, 'mag': 7.2, 'eventsourcecode': ''})
qrup = QuadRupture.fromTrace(xp0, yp0, xp1, yp1, zp, widths, dips, origin)
rrup_q = qrup.computeRrup(lon, lat, dep)
rjb_q = qrup.computeRjb(lon, lat, dep)
# Construct equivalent EdgeRupture
toplons = np.array([-122.0, -121.7, -122.5, -122.3])
toplats = np.array([37.1, 37.2, 37.4, 37.2])
topdeps = np.array([0, 0, 6, 6])
botlons = np.array([-121.886864, -121.587568, -122.635467, -122.435338])
botlats = np.array([36.884527, 36.984246, 37.314035, 37.114261])
botdeps = np.array([15.0000, 14.9998, 18.8558, 18.8559])
group_index = [0, 0, 1, 1]
erup = EdgeRupture.fromArrays(
toplons, toplats, topdeps, botlons, botlats, botdeps,
origin, group_index)
rrup_e = erup.computeRrup(lon, lat, dep)
rjb_e = erup.computeRjb(lon, lat, dep)
# Check that QuadRupture and EdgeRupture give the same result
# (we check the absolute values of QuadRupture elsewhere)
np.testing.assert_allclose(rrup_e, rrup_q, atol=0.35)
np.testing.assert_allclose(rjb_e, rjb_q, atol=0.35)
# For ploting
# plt.imshow(rjb_q, interpolation="none")
# plt.imshow(rjb_e, interpolation="none")
# fig = plt.contourf(lon, lat, rrup_e,
# levels = range(0, 100, 1),
# cmap=plt.cm.spectral)
# cbar = plt.colorbar(fig)
# for q in qrup.getQuadrilaterals():
# x = [a.longitude for a in q]+[q[0].longitude]
# y = [a.latitude for a in q]+[q[0].latitude]
# plt.plot(x, y, 'r')
if __name__ == "__main__":
test_multisegment_discordant()
test_EdgeRupture_vs_QuadRupture()
| 54.233422 | 89 | 0.569549 | 2,768 | 20,446 | 4.186777 | 0.394509 | 0.029511 | 0.00233 | 0.002071 | 0.031064 | 0.0214 | 0.019846 | 0.019846 | 0.007939 | 0.007939 | 0 | 0.585132 | 0.274968 | 20,446 | 376 | 90 | 54.37766 | 0.196641 | 0.041426 | 0 | 0.006006 | 0 | 0 | 0.00378 | 0 | 0 | 0 | 0 | 0 | 0.012012 | 1 | 0.006006 | false | 0 | 0.027027 | 0 | 0.033033 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
259b4a700e195f13682a38c91a3e33adf3894111 | 650 | py | Python | feed/admin.py | Zadigo/twitter_template | 110dd857bce219bd596a30db6e49acffca7af286 | [
"MIT"
] | null | null | null | feed/admin.py | Zadigo/twitter_template | 110dd857bce219bd596a30db6e49acffca7af286 | [
"MIT"
] | null | null | null | feed/admin.py | Zadigo/twitter_template | 110dd857bce219bd596a30db6e49acffca7af286 | [
"MIT"
] | null | null | null | from django.contrib import admin
from accounts.admin import custom_site
from feed.models import Conversation, Hashtag, Reply
class ConversationAdmin(admin.ModelAdmin):
list_display = ['user', 'created_on']
date_hierarchy = 'created_on'
class ReplyAdmin(admin.ModelAdmin):
list_display = ['user', 'created_on']
date_hierarchy = 'created_on'
class HashtagAdmin(admin.ModelAdmin):
list_display = ['name', 'created_on']
date_hiearchy = 'crated_on'
search_fields = ['name']
custom_site.register(Conversation, ConversationAdmin)
custom_site.register(Reply, ReplyAdmin)
custom_site.register(Hashtag, HashtagAdmin)
| 27.083333 | 53 | 0.753846 | 75 | 650 | 6.306667 | 0.413333 | 0.095137 | 0.120507 | 0.164905 | 0.27907 | 0.27907 | 0.27907 | 0.27907 | 0.27907 | 0.27907 | 0 | 0 | 0.143077 | 650 | 23 | 54 | 28.26087 | 0.849192 | 0 | 0 | 0.25 | 0 | 0 | 0.115385 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1875 | 0 | 0.8125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
25a3b07a3aab64a6c063831ebb68bc20ae2010fd | 233 | py | Python | about_us/views.py | mohammad-Serpoush/Charity-Management-Service | 320b9f8f784a1043b79ccf940f69f8353b499759 | [
"MIT"
] | 1 | 2022-03-19T13:19:13.000Z | 2022-03-19T13:19:13.000Z | about_us/views.py | salmanAndroidDev/charity-app | f2ea53c91c9cf46a63af6d3bef211c75dd5219bc | [
"MIT"
] | null | null | null | about_us/views.py | salmanAndroidDev/charity-app | f2ea53c91c9cf46a63af6d3bef211c75dd5219bc | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.shortcuts import render
def about_us(request):
context = {
'members': get_user_model().objects.all()
}
return render(request, 'about_us.html', context)
| 23.3 | 52 | 0.712446 | 31 | 233 | 5.16129 | 0.645161 | 0.125 | 0.15 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.180258 | 233 | 9 | 53 | 25.888889 | 0.837696 | 0 | 0 | 0 | 0 | 0 | 0.085837 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
25b6b7450bf8e4f48351e390679db71130f9387b | 2,709 | py | Python | prov_vo/migrations/0002_parameter_entityrights.py | kristinriebe/django-prov-vo | 5bd86eb58833fe591004e6ef431b2b3deae7a62c | [
"Apache-2.0"
] | 1 | 2018-12-11T05:53:55.000Z | 2018-12-11T05:53:55.000Z | prov_vo/migrations/0002_parameter_entityrights.py | kristinriebe/django-prov-vo | 5bd86eb58833fe591004e6ef431b2b3deae7a62c | [
"Apache-2.0"
] | null | null | null | prov_vo/migrations/0002_parameter_entityrights.py | kristinriebe/django-prov-vo | 5bd86eb58833fe591004e6ef431b2b3deae7a62c | [
"Apache-2.0"
] | 1 | 2021-06-23T13:09:05.000Z | 2021-06-23T13:09:05.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-15 22:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('prov_vo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Parameter',
fields=[
('id', models.CharField(max_length=128, primary_key=True, serialize=False)),
('value', models.CharField(blank=True, max_length=128, null=True)),
],
),
migrations.CreateModel(
name='ParameterDescription',
fields=[
('id', models.CharField(max_length=128, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=128, null=True)),
('annotation', models.CharField(blank=True, max_length=512, null=True)),
('datatype', models.CharField(blank=True, choices=[('vo:boolean', 'vo:boolean'), ('vo:bit', 'vo:bit'), ('vo:unsignedByte', 'vo:unsignedByte'), ('vo:short', 'vo:short'), ('vo:int', 'vo:int'), ('vo:long', 'vo:long'), ('vo:char', 'vo:char'), ('vo:unicodeChar', 'vo:unicodeChar'), ('vo:float', 'vo:float'), ('vo:double', 'vo:double'), ('vo:floatComplex', 'vo:floatComplex'), ('vo:doubleComplex', 'vo:doubleComplex')], max_length=128, null=True)),
('xtype', models.CharField(blank=True, choices=[('timestamp', 'timestamp'), ('position', 'position')], max_length=128, null=True)),
('unit', models.CharField(blank=True, max_length=128, null=True)),
('ucd', models.CharField(blank=True, max_length=128, null=True)),
('utype', models.CharField(blank=True, max_length=128, null=True)),
('arraysize', models.IntegerField(blank=True, null=True)),
('minval', models.CharField(blank=True, max_length=128, null=True)),
('maxval', models.CharField(blank=True, max_length=128, null=True)),
('options', models.CharField(blank=True, max_length=1024, null=True)),
],
),
migrations.AlterField(
model_name='entity',
name='rights',
field=models.CharField(blank=True, choices=[('voprov:public', 'voprov:public'), ('voprov:secure', 'voprov:secure'), ('voprov:propriatary', 'voprov:proprietary')], max_length=128, null=True),
),
migrations.AddField(
model_name='parameter',
name='description',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='prov_vo.ParameterDescription'),
),
]
| 53.117647 | 458 | 0.598007 | 295 | 2,709 | 5.4 | 0.318644 | 0.131827 | 0.090395 | 0.180791 | 0.422473 | 0.338983 | 0.272442 | 0.272442 | 0.272442 | 0.079096 | 0 | 0.030476 | 0.224806 | 2,709 | 50 | 459 | 54.18 | 0.728095 | 0.025102 | 0 | 0.27907 | 1 | 0 | 0.207733 | 0.010614 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.069767 | 0 | 0.139535 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
25c54b3f7a8611ce881b78258a38f5fb1fbcdd1d | 314 | py | Python | output/models/nist_data/atomic/positive_integer/schema_instance/nistschema_sv_iv_atomic_positive_integer_min_inclusive_5_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/nist_data/atomic/positive_integer/schema_instance/nistschema_sv_iv_atomic_positive_integer_min_inclusive_5_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/nist_data/atomic/positive_integer/schema_instance/nistschema_sv_iv_atomic_positive_integer_min_inclusive_5_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.nist_data.atomic.positive_integer.schema_instance.nistschema_sv_iv_atomic_positive_integer_min_inclusive_5_xsd.nistschema_sv_iv_atomic_positive_integer_min_inclusive_5 import NistschemaSvIvAtomicPositiveIntegerMinInclusive5
__all__ = [
"NistschemaSvIvAtomicPositiveIntegerMinInclusive5",
]
| 52.333333 | 242 | 0.914013 | 33 | 314 | 7.969697 | 0.606061 | 0.159696 | 0.239544 | 0.152091 | 0.365019 | 0.365019 | 0.365019 | 0.365019 | 0.365019 | 0 | 0 | 0.013333 | 0.044586 | 314 | 5 | 243 | 62.8 | 0.863333 | 0 | 0 | 0 | 0 | 0 | 0.152866 | 0.152866 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
25d781f398bf9863ae061e6d0a9789cc9fc808cd | 20,735 | py | Python | manifestparser.py | jursonovicst/manifestparser | 21b1727bceea3ae6318f41139e374a20a35a537b | [
"MIT"
] | null | null | null | manifestparser.py | jursonovicst/manifestparser | 21b1727bceea3ae6318f41139e374a20a35a537b | [
"MIT"
] | null | null | null | manifestparser.py | jursonovicst/manifestparser | 21b1727bceea3ae6318f41139e374a20a35a537b | [
"MIT"
] | null | null | null | # import pycurl
# from StringIO import *
# import xml.etree.ElementTree as ET
# from urlparse import urlparse
# import random
#
#
# class SmoothStreamingMedia: # SmoothStreamingMedia
#
# @staticmethod
# def parsemanifest(url, proxy="", localip=None):
# cv = pycurl.Curl()
# cv.setopt(pycurl.FOLLOWLOCATION, True)
# cv.setopt(pycurl.USERAGENT, "manifestparser")
# cv.setopt(pycurl.SSL_VERIFYHOST, False)
# cv.setopt(pycurl.SSL_VERIFYPEER, False)
# cv.setopt(pycurl.PROXY, proxy)
# if localip is not None:
# cv.setopt(pycurl.INTERFACE, localip)
#
# buffer = StringIO()
# cv.setopt(cv.URL, url.rstrip())
# cv.setopt(pycurl.WRITEFUNCTION, buffer.write)
# cv.perform()
#
# retcode = int(cv.getinfo(pycurl.RESPONSE_CODE))
#
# if retcode >= 400:
# cv.close()
# raise Exception("Server returned %d, cannot fetch manifest: %s" % (retcode, url))
#
# cv.close()
#
# xml = ET.fromstring(buffer.getvalue())
#
# if xml.tag != "SmoothStreamingMedia":
# raise NotImplementedError("Smooth streaming tag %s has not been implemented" % xml.tag)
#
# smoothstreamingmedia = SmoothStreamingMedia(xml.get('MajorVersion', None), xml.get('MinorVersion', None),
# xml.get('TimeScale', 10000000), xml.get('Duration', 0),
# xml.get('IsLive', "false"), url.replace("/Manifest", ""))
#
# for element in xml.iter("StreamIndex"):
# smoothstreamingmedia.addstreamelement(StreamElement.parsexml(element))
#
# return smoothstreamingmedia
#
# def __init__(self, majorversion, minorversion, timescale, duration, islive, baseurl):
# if majorversion is None or int(majorversion) != 2:
# raise Exception(
# "Violation of 2.2.2.1: The major version of the Manifest Response message. MUST be set to 2. '%s' received" % majorversion)
# self.majorversion = int(majorversion)
#
# if minorversion is None or int(minorversion) not in [0, 2]:
# raise Exception(
# "Violation of 2.2.2.1: The minor version of the Manifest Response message. MUST be set to 0 or 2. '%s.%s' received" % (
# majorversion, minorversion))
# self.minorversion = int(minorversion)
#
# self.timescale = int(timescale)
# self.duration = int(duration)
# self.islive = True if str(islive).lower() in ['true'] else False
#
# self._streamelements = []
# self._baseurl = baseurl
#
# def addstreamelement(self, streamelement):
# self._streamelements.append(streamelement)
#
# def iterfragmenturls(self):
# for streamelement in self._streamelements:
# for url in streamelement.iterfragmenturls():
# yield self._baseurl + url
#
# def printtree(self):
# print("==========")
# print(self.__class__.__name__)
# print("majorv:\t%d" % self.majorversion)
# print("minorv:\t%d" % self.minorversion)
# print("timesc:\t%s" % str(self.timescale) if self.timescale is not None else "None")
# print("durati:\t%s" % str(self.duration) if self.duration is not None else "None")
# print("live stream" if self.islive else "on-demand stream")
# for streamelement in self._streamelements:
# streamelement.printtree()
#
#
# class StreamElement: # StreamIndex
#
# @staticmethod
# def parsexml(xml):
# if xml.tag != "StreamIndex":
# raise NotImplementedError("Invalid StreamIndex element: '%s'" % xml.tag)
#
# streamelement = StreamElement(xml.get('Type', None), xml.get('TimeScale', None),
# xml.get('Name', xml.attrib['Type']), xml.attrib['Url'])
#
# for element in xml.iter("QualityLevel"):
# streamelement.addtrackelement(TrackElement.parsexml(element))
#
# for element in xml.iter("c"):
# streamelement.addstreamfragment(StreamFragment.parsexml(element))
#
# return streamelement
#
# def __init__(self, type, streamtimescale, name, url):
# if (type is None or type not in ["video", "audio", "text"]):
# raise Exception("Violation of 2.2.2.3: The type of the stream: video, audio, or text. '%s' received" % type)
# self.type = str(type)
#
# self.streamtimescale = int(streamtimescale) if streamtimescale is not None else None
# self.name = str(name) if name is not None else None
# self.url = str(url)
#
# self._trackelements = []
# self._streamfragments = []
#
# def addtrackelement(self, trackelement):
# self._trackelements.append(trackelement)
#
# def addstreamfragment(self, streamfragment):
# self._streamfragments.append(streamfragment)
#
# def iterfragmenturls(self):
# for trackelement in self._trackelements:
# url = self.url.replace("{bitrate}", str(trackelement.bitrate))
#
# lastduration = 0
# starttime = 0
# for streamfragment in self._streamfragments:
# if streamfragment.fragmenttime is not None:
# starttime = streamfragment.fragmenttime
# else:
# starttime += lastduration
#
# if (streamfragment.fragmentduration is not None):
# lastduration = streamfragment.fragmentduration
#
# yield "/" + url.replace("{start time}", str(starttime))
#
# def printtree(self):
# print("==========")
# print(self.__class__.__name__)
# print("type:\t%s" % self.type)
# if (self.name is not None):
# print("name:\t%s" % self.name)
# if (self.url is not None):
# print("url:\t%s" % self.url)
# for trackelement in self._trackelements:
# trackelement.printtree()
# for streamfragment in self._streamfragments:
# streamfragment.printtree()
#
#
# class TrackElement: # QualityLevel
#
# @staticmethod
# def parsexml(xml):
# if xml.tag != "QualityLevel":
# raise NotImplementedError("Invalid TrackElement element: '%s'" % xml.tag)
#
# return TrackElement(xml.get('Bitrate', None))
#
# def __init__(self, bitrate):
# if (bitrate is None):
# raise Exception(
# "Violation of 2.2.2.5: The following fields are required and MUST be present in TrackAttributes: IndexAttribute and BitrateAttribute.")
#
# self.bitrate = int(bitrate)
#
# def printtree(self):
# print("--" + self.__class__.__name__ + "--")
# print("bitrt:\t%d" % self.bitrate)
#
#
# class StreamFragment:
#
# @staticmethod
# def parsexml(xml):
# if xml.tag != "c":
# raise NotImplementedError("Invalid Fragment element: '%s'" % xml.tag)
#
# return StreamFragment(xml.get('t', None), xml.get('d', None))
#
# def __init__(self, fragmenttime, fragmentduration):
# if (fragmenttime is None and fragmentduration is None):
# raise Exception(
# "Violation of 2.2.2.6: Either one or both of FragmentDuration and FragmentTime fields are required and MUST be present.")
# self.fragmenttime = int(fragmenttime) if fragmenttime is not None else None
# self.fragmentduration = int(fragmentduration) if fragmentduration is not None else None
#
# def printtree(self):
# print("--" + self.__class__.__name__ + "--")
# if (self.fragmenttime is not None):
# print("ftime:\t%d" % self.fragmenttime)
# if (self.fragmentduration is not None):
# print("fdur:\t%d" % self.fragmentduration)
#
#
# class ManifestParser:
# ST_OTHER = 0
# ST_DASH = 1
# ST_HSS = 2
# ST_HSSLIVE = 3
#
# @staticmethod
# def streamtypetostring(type):
# if type == ManifestParser.ST_DASH:
# return "Dash"
# elif type == ManifestParser.ST_HSS:
# return "HSS"
# elif type == ManifestParser.ST_HSSLIVE:
# return "HSS Live"
#
# return "Unknown"
#
# def __init__(self, proxy=""):
# self._url = None
# self._streamtype = ManifestParser.ST_OTHER
# self._baseurl = None
# self._manifest = None # XMLTree element for the dash/hss Manifest file
# self._proxy = proxy # proxy string
# self._cv = None
#
# def getstreamtype(self):
# return self._streamtype
#
# def getstreamtypestring(self):
# return ManifestParser.streamtypetostring(self.getstreamtype())
#
# # Returns true or false
# def fetchmanifest(self, url, localip=None):
# self._url = None
# self._streamtype = ManifestParser.ST_OTHER
# self._baseurl = None
# self._manifest = None
#
# self._cv = pycurl.Curl()
# self._cv.setopt(pycurl.FOLLOWLOCATION, True)
# self._cv.setopt(pycurl.USERAGENT, "manifestparser")
# self._cv.setopt(pycurl.SSL_VERIFYHOST, False)
# self._cv.setopt(pycurl.SSL_VERIFYPEER, False)
# self._cv.setopt(pycurl.PROXY, self._proxy)
# if localip is not None:
# self._cv.setopt(pycurl.INTERFACE, localip)
#
# buffer = StringIO()
# self._cv.setopt(self._cv.URL, url.rstrip())
# self._cv.setopt(pycurl.WRITEFUNCTION, buffer.write)
# self._cv.perform()
#
# retcode = int(self._cv.getinfo(pycurl.RESPONSE_CODE))
#
# if retcode >= 400:
# raise Exception("Server returned %d, cannot fetch manifest: %s" % (retcode, url))
#
# self._url = self._cv.getinfo(pycurl.EFFECTIVE_URL)
# self._cv.close()
#
# return self.parsemanifest(buffer.getvalue(), self._url)
#
# def parsemanifest(self, buff, url):
# self._url = url
# self._manifest = ET.fromstring(buff)
#
# if self._manifest.tag == "SmoothStreamingMedia":
# if self._manifest.get('MajorVersion', '') != "2":
# raise NotImplementedError(
# "Smooth streaming protocol version: %s.x is not implemented." % self._manifest.attrib[
# 'MajorVersion'])
#
# self._streamtype = ManifestParser.ST_HSS
# if self._manifest.get('IsLive', 'false').lower() == 'true':
# self._streamtype = ManifestParser.ST_HSSLIVE
#
# elif self._manifest.tag == "{urn:mpeg:DASH:schema:MPD:2011}MPD":
# if self._manifest.get('profiles') != 'urn:mpeg:dash:profile:isoff-on-demand:2011':
# raise NotImplementedError(
# "MPEG-DASH profile: %s is not implemented." % self._manifest.attrib['profiles'])
#
# self._url = self._url.rstrip()
# self._streamtype = ManifestParser.ST_DASH
# d = urlparse(self._url)
# self._baseurl = d.scheme + "://" + d.netloc + '/'.join(d.path.split('/')[0:-1]) + '/'
#
# else:
# raise NotImplementedError("Unknown manifest: %s" % self._manifest.tag)
#
# return True
#
# def getduration(self):
# if self._streamtype == ManifestParser.ST_HSS:
# return int(self._manifest.get('Duration')) / self.gettimescale()
# elif self._streamtype == ManifestParser.ST_HSSLIVE:
# return None
#
# raise Exception("Not implemented for %s streams." % self.getstreamtypestring())
#
# # returns the timescale in local unit
# def gettimescale(self):
# if self._streamtype == ManifestParser.ST_HSS or self._streamtype == ManifestParser.ST_HSSLIVE:
# return int(self._manifest.get('TimeScale', 10000000)) # HSS default value
#
# raise Exception("Not implemented for %s streams." % self.getstreamtypestring())
#
# def gettypes(self):
# if self._streamtype == ManifestParser.ST_HSS or self._streamtype == ManifestParser.ST_HSSLIVE:
# types = []
#
# for qlev in self._manifest.findall("./StreamIndex"):
# if str(qlev.attrib['Type']) not in types:
# types.append(str(qlev.attrib['Type']))
#
# for type in types:
# yield type
#
# else:
# raise Exception("Not implemented for %s streams." % self.getstreamtypestring())
#
# def getbitratesfor(self, type="video"):
# if self._streamtype == ManifestParser.ST_HSS or self._streamtype == ManifestParser.ST_HSSLIVE:
# for qlev in self._manifest.findall("./StreamIndex[@Type='" + type + "']/QualityLevel"):
# yield int(qlev.attrib['Bitrate'])
#
# elif self._streamtype == ManifestParser.ST_DASH:
# ns = {'ns': 'urn:mpeg:DASH:schema:MPD:2011'}
# # for actor in self._root.findall(".//ns:ContentComponent[@contentType='"+contentType+"']../ns:Representation", ns):
# aset = self._manifest.find(".//ns:ContentComponent[@contentType='" + type + "']..", ns)
# if aset is None:
# raise Exception("No %s ContentComponent in manifest found" % type)
# for repr in aset.findall("./ns:Representation", ns):
# yield int(repr.attrib['bandwidth'])
#
# def getmaxbitratefor(self, type="video"):
# return max(i for i in self.getbitratesfor(type))
#
# def getminbitratefor(self, type="video"):
# return min(i for i in self.getbitratesfor(type))
#
# def getrndbitratefor(self, type="video"):
# bitrates = []
# for i in self.getbitratesfor(type):
# bitrates.append(i)
# return random.choice(bitrates)
#
# # returns a tuple of {url, byterange, fragmentlength} for all fragments
# def getfragmenturlsfor(self, bitrate, type="video"):
# if self._streamtype == ManifestParser.ST_HSS:
# timescale = self.gettimescale()
# baseurl = self._url.replace('/Manifest', '') + '/' + \
# self._manifest.find("./StreamIndex[@Type='" + type + "']").attrib['Url'].replace('{bitrate}',
# str(bitrate))
#
# t = 0
# for c in self._manifest.findall("./StreamIndex[@Type='" + type + "']/c"):
# d = int(c.attrib['d'])
# yield {'url': baseurl.replace('{start time}', str(t)), 'byterange': None, 'time': t / timescale,
# 'duration': d / timescale}
# t += d
#
# elif self._streamtype == ManifestParser.ST_HSSLIVE:
# timescale = self.gettimescale()
# baseurl = self._url.replace('/Manifest', '') + '/' + \
# self._manifest.find("./StreamIndex[@Type='" + type + "']").attrib['Url'].replace('{bitrate}',
# str(bitrate))
#
# t0 = None
# for c in self._manifest.findall("./StreamIndex[@Type='" + type + "']/c[@t]"):
# t = int(c.attrib['t'])
# if t0 is None:
# t0 = t
#
# raise Exception("This should be implemented!!!")
# d = int(c.attrib['d'])
# yield {'url': baseurl.replace('{start time}', str(t)), 'byterange': None, 'time': (t - t0) / timescale,
# 'duration': d / timescale}
#
# if c.attrib.has_key('d'):
# while True:
# yield {'url': baseurl.replace('{start time}', str(t)), 'byterange': None,
# 'time': (t - t0) / timescale, 'duration': d / timescale}
# t += d
#
#
# elif self._streamtype == ManifestParser.ST_DASH: # TODO: check and rewrite
# mp4 = MP4Parser()
# repsegurl = mp.getrepsegurl(bitrate, type)
# repseg = mp.fetchdata(repsegurl)
# repurl = self._getrepurl(bitrate, type)
# offset = int(repsegurl['byterange'].split('-')[1])
# logging.debug(offset)
# for ret in mp4.getsidxsubsegments(repseg):
# yield {'url': repurl, 'byterange': "%d-%d" % (ret['from'], ret['to']), 'time': ret['duration']}
#
# # returns a tuple of {path, byterange, fragmentlength} for all fragments
# def getfragmentpathsfor(self, bitrate, type="video"):
# for ret in self.getfragmenturlsfor(bitrate, type):
# yield {'path': urlparse(ret['url']).path, 'byterange': ret['byterange'], 'time': ret['time'],
# 'duration': ret['duration']}
#
# def _getrepurl(self, bitrate, type="video"): # TODO: type check not present
# url = ""
# if self._streamtype == ManifestParser.ST_DASH:
# ns = {'ns': 'urn:mpeg:DASH:schema:MPD:2011'}
# url = self._baseurl + self._manifest.find(
# ".//ns:Representation[@bandwidth='" + str(bitrate) + "']/ns:BaseURL", ns).text
# return url
#
# def getrepsegurl(self, bitrate, type="video"): # TODO: type check not present
# url = {'url': "", 'byterange': None}
# if self._streamtype == ManifestParser.ST_DASH:
# ns = {'ns': 'urn:mpeg:DASH:schema:MPD:2011'}
# url['byterange'] = \
# self._manifest.find(".//ns:Representation[@bandwidth='" + str(bitrate) + "']/ns:SegmentBase",
# ns).attrib[
# 'indexRange']
# url['url'] = self._getrepurl(bitrate)
# return url
#
# def getrepiniturl(self, bitrate):
# url = {'url': "", 'byterange': None}
# if self._streamtype == ManifestParser.ST_DASH:
# ns = {'ns': 'urn:mpeg:DASH:schema:MPD:2011'}
# url['byterange'] = \
# self._manifest.find(
# ".//ns:Representation[@bandwidth='" + str(bitrate) + "']/ns:SegmentBase/ns:Initialization",
# ns).attrib['range']
# url['url'] = self._getrepurl(bitrate)
# return url
#
# def fetchdata(self, url):
# try:
# buffer = StringIO()
# self._cv.setopt(self._cv.URL, url['url'])
# self._cv.setopt(pycurl.WRITEFUNCTION, buffer.write)
# self._cv.setopt(pycurl.FOLLOWLOCATION, True)
# self._cv.setopt(pycurl.USERAGENT, "dashclient")
# if args.proxy != "-":
# self._cv.setopt(pycurl.PROXY, args.proxy)
# logging.debug("Using proxy " + args.proxy)
# else:
# self._cv.setopt(pycurl.PROXY, "")
# if url['byterange'] is not None:
# self._cv.setopt(pycurl.RANGE, url['byterange'])
# self._cv.setopt(pycurl.SSL_VERIFYHOST, False)
# self._cv.setopt(pycurl.SSL_VERIFYPEER, False)
# logging.info("Downloading fragment: " + url['url'] + " (bytes: " + url['byterange'] + ")")
# self._cv.perform()
# except TypeError:
# logging.error("wrong argument: " + url)
# return False
# except pycurl.error:
# logging.warning(self._cv.errstr() + ": " + url)
# return False
#
# if self._cv.getinfo(pycurl.RESPONSE_CODE) >= 300:
# logging.warning(self._cv.errstr() + ": " + url)
# return False
#
# return buffer
#
# def geturlsfor(self, type="video"):
# bw = []
# if self._streamtype == ManifestParser.ST_HSS:
# pass
# elif self._streamtype == ManifestParser.ST_DASH:
# ns = {'ns': 'urn:mpeg:DASH:schema:MPD:2011'}
# # for actor in self._root.findall(".//ns:ContentComponent[@contentType='"+contentType+"']../ns:Representation", ns):
# aset = self._manifest.find(".//ns:ContentComponent[@contentType='" + type + "']..", ns)
# if aset is None:
# logging.error("No %s ContentComponent in manifest found" % type)
# return []
# logging.debug(
# "found %s ContentComponent (id=%s)" % (type, aset.find("./ns:ContentComponent", ns).attrib['id']))
# for repr in aset.findall("./ns:Representation", ns):
# bw.append(int(repr.attrib['bandwidth']))
# logging.debug(
# "found " + type + " Representation (id=%s, bitrate=%s)" % (
# repr.attrib['id'], repr.attrib['bandwidth']))
#
# return bw
| 43.108108 | 153 | 0.558958 | 2,092 | 20,735 | 5.442639 | 0.133843 | 0.015282 | 0.027051 | 0.057966 | 0.46566 | 0.410065 | 0.35895 | 0.308098 | 0.270595 | 0.232215 | 0 | 0.006796 | 0.29742 | 20,735 | 480 | 154 | 43.197917 | 0.77478 | 0.95293 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0.002083 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
25e60ee12a998324d78272a3ce04c198b92ccaa3 | 542 | py | Python | Dataset/Leetcode/train/14/340.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/train/14/340.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/train/14/340.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution(object):
def XXX(self, strs):
if len(strs) == 0:
return ''
item = strs[0]
rst = []
for k in range(0, len(item)):
z = 1
for j in range(1, len(strs)):
if k >= len(strs[j]):
break
if strs[j][k] != item[k]:
break
z += 1
if z == len(strs):
rst.append(item[k])
if len(rst) != k+1:
break
return ''.join(rst)
| 25.809524 | 41 | 0.348708 | 64 | 542 | 2.953125 | 0.359375 | 0.148148 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02682 | 0.51845 | 542 | 20 | 42 | 27.1 | 0.697318 | 0 | 0 | 0.157895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
25ea766d20fe9d8cb491f9b800dd1529af4d5718 | 805 | py | Python | src/exceptions/file_source_depleted.py | Pejo-306/adastra-python-task | 210a047ef39b9cba8581f77a807669f7805afe02 | [
"MIT"
] | null | null | null | src/exceptions/file_source_depleted.py | Pejo-306/adastra-python-task | 210a047ef39b9cba8581f77a807669f7805afe02 | [
"MIT"
] | null | null | null | src/exceptions/file_source_depleted.py | Pejo-306/adastra-python-task | 210a047ef39b9cba8581f77a807669f7805afe02 | [
"MIT"
] | null | null | null | class FileSourceDepleted(Exception):
"""Exception raised when attempting to read from a depleted source file
Attributes:
filepath(str): path to depleted file
message(str): error message
"""
def __init__(self, filepath: str, message: str = "File source is depleted"):
"""Construct FileSourceDepleted
:param filepath: path to depleted file
:type filepath: str
:param message: error message
:type message: str
"""
self.filepath = filepath
self.message = message
super().__init__(message)
def __str__(self) -> str:
"""Get the informal string representation of the error
:return: full error message
:rtype: str
"""
return f"{self.filepath}: {self.message}"
| 28.75 | 80 | 0.617391 | 87 | 805 | 5.574713 | 0.425287 | 0.068041 | 0.057732 | 0.074227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.291925 | 805 | 27 | 81 | 29.814815 | 0.850877 | 0.478261 | 0 | 0 | 0 | 0 | 0.168224 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
25f72bb92c39dd39976660c62b8b83e46cc7f177 | 2,659 | py | Python | fiaas_mast/metadata_generator.py | xavileon/mast | 9f5195b835cd93704355f73ad317df27932c8951 | [
"Apache-2.0"
] | null | null | null | fiaas_mast/metadata_generator.py | xavileon/mast | 9f5195b835cd93704355f73ad317df27932c8951 | [
"Apache-2.0"
] | null | null | null | fiaas_mast/metadata_generator.py | xavileon/mast | 9f5195b835cd93704355f73ad317df27932c8951 | [
"Apache-2.0"
] | null | null | null | import yaml
from k8s.models.common import ObjectMeta
from requests.exceptions import MissingSchema, InvalidURL
from .common import dict_merge, generate_random_uuid_string, ClientError
class MetadataGenerator:
def __init__(self, http_client, create_deployment_id=generate_random_uuid_string):
self.http_client = http_client
self.create_deployment_id = create_deployment_id
def build_annotations(self, items, prefix=""):
annotations = {}
for k, v, in items:
annotations["{}{}".format(prefix, k)] = str(v)
annotation_objects = self.get_annotation_objects()
return {k: annotations for k in annotation_objects} if annotation_objects else annotations
def spinnaker_annotations(self, release):
return self.build_annotations(release.spinnaker_tags.items(), "pipeline.schibsted.io/")
def raw_annotations(self, release):
return self.build_annotations(release.raw_tags.items())
def merge_tags(self, generator_object, config):
if generator_object.spinnaker_tags:
if "annotations" not in config:
config["annotations"] = {}
dict_merge(config["annotations"], self.spinnaker_annotations(generator_object))
if generator_object.raw_tags:
if "annotations" not in config:
config["annotations"] = {}
dict_merge(config["annotations"], self.raw_annotations(generator_object))
if generator_object.application_name != generator_object.original_application_name:
if "annotations" not in config:
config["annotations"] = {}
config['annotations']['mast'] = {
'originalApplicationName': generator_object.original_application_name
}
def metadata(self, generator_object, namespace, deployment_id):
application_name = generator_object.application_name
labels = {"fiaas/deployment_id": deployment_id, "app": application_name}
annotations = {k: str(v) for k, v in generator_object.metadata_annotations.items()}
# TODO: Why doesn't annotations default to a dict?
metadata = ObjectMeta(name=application_name, namespace=namespace, labels=labels, annotations=annotations)
return metadata
def get_annotation_objects(self):
return []
def download_config(self, config_url):
try:
resp = self.http_client.get(config_url)
except (InvalidURL, MissingSchema) as e:
raise ClientError("Invalid config_url") from e
resp.raise_for_status()
app_config = yaml.safe_load(resp.text)
return app_config
| 41.546875 | 113 | 0.6871 | 295 | 2,659 | 5.938983 | 0.288136 | 0.094178 | 0.023973 | 0.030822 | 0.264269 | 0.22089 | 0.171804 | 0.148402 | 0.085616 | 0.085616 | 0 | 0.000485 | 0.224144 | 2,659 | 63 | 114 | 42.206349 | 0.848764 | 0.018052 | 0 | 0.122449 | 1 | 0 | 0.073591 | 0.017248 | 0 | 0 | 0 | 0.015873 | 0 | 1 | 0.163265 | false | 0 | 0.081633 | 0.061224 | 0.387755 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
25fbd53e267bed3bd34c846fa57ed5644299a95a | 17,380 | py | Python | tests/cylinder_tests.py | kauevestena/sanit3Dsdi | 6f12add218a9c64b86e3eb85d865117ac07e7299 | [
"MIT"
] | null | null | null | tests/cylinder_tests.py | kauevestena/sanit3Dsdi | 6f12add218a9c64b86e3eb85d865117ac07e7299 | [
"MIT"
] | null | null | null | tests/cylinder_tests.py | kauevestena/sanit3Dsdi | 6f12add218a9c64b86e3eb85d865117ac07e7299 | [
"MIT"
] | null | null | null | # import subprocess
# import numpy as np
# import json
# import geopandas as gpd
# from shapely.geometry import LineString
# import numpy as np
# import os
# import pymesh
# from copy import deepcopy
# # normalized_v = v/np.linalg.norm(v)
# def normalize_vec(input_vec):
# try:
# return input_vec/np.linalg.norm(input_vec)
# except:
# print(np.linalg.norm(input_vec),'check for zero norm')
# return input_vec * 0
# # class plane:
# # def __init__(self,pt_onplane:np.array,normal:np.array):
# # self.d = -pt_onplane.dot(normal)
# # self.a = normal[0]
# # self.b = normal[1]
# # self.c = normal[2]
# # def a_point(self,X,Y,Z):
# # return self.a*X + self.b*Y + self.c*Z + self.d
# def plane_as_4vec(normal:np.array,pt_onplane:np.array):
# '''
# plane as 4vec:
# - normal vector
# - point on plane
# '''
# return np.array([*normal,-np.dot(normal,pt_onplane)])
# def pt_onplane(plane4vec,X,Y):
# # plane equation, with z=f(X,Y)
# if not plane4vec[2] < 0.0001:
# Z = - (plane4vec[0]*X+plane4vec[1]*Y+plane4vec[3])/plane4vec[2]
# return np.array([X,Y,Z])
# else:
# Z = X + 0.1*X
# Y = - - (plane4vec[0]*X+plane4vec[2]*Z+plane4vec[3])/plane4vec[1]
# return np.array([X,Y,Z])
# def gdec2rad(gdec):
# return gdec * np.pi/180
# def circumference_3D(center_pt,radius,v1,v2,n_points=32):
# '''
# a circunference in 3D:
# - Center Point
# - The Radius
# thx: https://math.stackexchange.com/a/1184089/307651
# '''
# angles = np.linspace(0,2*np.pi,n_points)
# point_list = []
# for angle in angles:
# # circle_point = center_pt + (radius*np.cos(angle)*v1) + (radius*np.sin(angle)*v2)
# circle_point = center_pt + radius * (np.cos(angle)*v2 + np.sin(angle)*v1)
# point_list.append(circle_point)
# return np.array(point_list)
# def reverse_order_rangelist(a,b):
# l1 = list(range(-a+1,-b+1))
# return list(map(abs,l1))
# def segments(curve):
# '''
# code from
# https://stackoverflow.com/a/62061414/4436950
# thx Georgy
# '''
# return list(map(LineString, zip(curve.coords[:-1], curve.coords[1:])))
# def create_edgeslist(num_vertices,as_np=True):
# edgelist = []
# if num_vertices > 0:
# for i in range(num_vertices-1):
# edgelist.append([i,i+1])
# if as_np:
# return np.array(edgelist)
# else:
# return edgelist
# def get_raster_val_at_geoXY(x,y,rasterpath):
# runstring = f'gdallocationinfo -valonly -geoloc {rasterpath} {x} {y}'
# ret = subprocess.run(runstring,shell=True, stdout=subprocess.PIPE).stdout.decode('utf-8')
# return float(ret.strip('\n'))
# def pymesh_cylinder_for_cityjson(vertices,radius,zero_index=0,num_edges=16,rounding_places=4,custom_attrs=None,tol_for_simplification=None):
# '''
# creates a cylinder using pymesh
# '''
# num_vertices = vertices.shape[0]
# edges_list = create_edgeslist(num_vertices)
# wire_network = pymesh.wires.WireNetwork.create_from_data(vertices, edges_list)
# inflator = pymesh.wires.Inflator(wire_network)
# inflator.set_profile(num_edges)
# inflator.inflate(radius, per_vertex_thickness=False)
# mesh = inflator.mesh
# bbox_min, bbox_max = mesh.bbox
# mins = bbox_min.tolist()
# maxs = bbox_max.tolist()
# points = np.around(mesh.vertices,decimals=rounding_places).tolist()
# max_vert_idx = np.amax(mesh.faces)
# faces = deepcopy(mesh.faces) + zero_index
# faces = faces.tolist()
# faces = [[face] for face in faces]
# meshdata = {'mins':mins,'maxs':maxs,'points':points,'faces':faces,'zero_ref':max_vert_idx}
# if tol_for_simplification:
# # simplify based on tolerance
# mesh,info = pymesh.collapse_short_edges(mesh,tol_for_simplification)
# meshdata['edges_collapsed'] = info["num_edge_collapsed"]
# if custom_attrs:
# # for key in custom_attrs:
# # meshdata[key] = custom_attrs[key]
# meshdata['attributes'] = custom_attrs
# return meshdata
# def city_object_dict(faces,attrs_dict):
# # TODO: swap between MultiSurface/Solid
# city_obj = {
# "geometry": [
# {
# "boundaries": [],
# "lod": 1,
# "type": "MultiSurface"
# }
# ],
# "attributes": {
# },
# "type": "GenericCityObject"
# }
# city_obj['geometry'][0]['boundaries'] = faces
# city_obj['attributes'] = attrs_dict
# return city_obj
# class cylinder3D:
# '''
# DEPRECATED BY
# pymesh_cylinder(vertices,radius,num_edges=16,tol_for_simplification=None)
# manteined only because I don't wanna delete it
# class to implement a cylinder in 3D to use it in cityjson
# '''
# def __init__(self,p1,p2,radius,points_per_circle=32):
# # first, its handy:
# self.p_1 = p1
# self.p_2 = p2
# self.number_of_points = points_per_circle*2
# self.circle_points_n = points_per_circle
# # the axis of the cylinder, is the difference vector:
# self.axis = p2 - p1
# # its normalized version will be used as the plane normal
# self.plane_n = normalize_vec(self.axis)
# # the plane as a 4 vec of parameters: [a,b,c,d]
# plane = plane_as_4vec(self.plane_n,p1)
# # any point on the plane
# point_on_plane = pt_onplane(plane,p1[0]+0.1*p1[0],p1[1]-0.1*p1[1])
# # first vector parallel to the plane containing the circle
# vec1_planeparalel = normalize_vec(point_on_plane-p1)
# # second vector parallel to the plane containing the circle
# vec2_planeparalel = normalize_vec(np.cross(vec1_planeparalel,self.plane_n))
# # first circumference
# # it must needs to be divisible by 4
# if points_per_circle % 4 != 0:
# points_per_circle = (points_per_circle // 4) * 4
# # the first circumference
# self.circle1 = circumference_3D(p1,radius,vec1_planeparalel,vec2_planeparalel,points_per_circle)
# # the second contains basically each point summed up with the axis
# self.circle2 = self.circle1 + self.axis
# def check_circles(self):
# centers = (self.p_1,self.p_2)
# for i,circle in enumerate((self.circle1,self.circle2)):
# print('\ncircle ',i+1,':')
# for point in circle:
# print(np.dot(point-centers[i],self.axis))
# print(np.linalg.norm(point-centers[i]))
# def get_vertices_list(self,as_list=False):
# self.justaposed = np.concatenate((self.circle1,self.circle2))
# self.mins = np.min(self.justaposed,axis=0)
# self.maxs = np.max(self.justaposed,axis=0)
# if as_list:
# return list(map(list,self.justaposed))
# else:
# return self.justaposed
# def boundaries_list(self,new_zero=0):
# # first the two circles boundaries
# zero = new_zero
# # first circle ending:
# fce = zero + self.circle_points_n
# c1 = [list(range(zero,fce))]
# # c2 = [list(range(fce,fce+self.circle_points_n))]
# c2 = [reverse_order_rangelist(fce+self.circle_points_n,fce)]
# # for the rest of the faces:
# rectangles = []
# for i in range(zero,fce):
# print(i,fce)
# p0 = i
# p1 = i + fce
# if i+1 == fce:
# p2 = fce
# p3 = zero
# else:
# p2 = i + fce + 1
# p3 = i + 1
# # the current face
# curr = [[p3,p0,p1,p2]]
# rectangles.append(curr)
# # rectangles.append(rectangles[0])
# # rectangles.pop(0)
# # print(rectangles)
# # res_list = []
# # res_list.append(c1)
# # res_list.append(rectangles)
# # res_list.append(c2)
# res_list = [c1,rectangles,c2]
# self.boundaries = res_list
# return res_list
# def as_city_object(self,attrs_dict):
# # city_obj = {name: {
# # "geometry": [
# # {
# # "boundaries": [],
# # "lod": 1,
# # "type": "Solid"
# # }
# # ],
# # "attributes": {
# # },
# # "type": "GenericCityObject"
# # }}
# # city_obj[name]['geometry'][0]['boundaries'].append(self.boundaries)
# # city_obj[name]['attributes'] = attrs_dict
# city_obj = {
# "geometry": [
# {
# "boundaries": [],
# "lod": 1,
# "type": "MultiSurface"
# }
# ],
# "attributes": {
# },
# "type": "GenericCityObject"
# }
# # city_obj['geometry'][0]['boundaries'].append(self.boundaries)
# city_obj['geometry'][0]['boundaries'] = self.boundaries
# city_obj['attributes'] = attrs_dict
# return city_obj
# # # # # THIS WAS AN ATTEMPT, MANTEINED HERE
# # # # # class city_json_simple2:
# # # # # base = {
# # # # # "type": "CityJSON",
# # # # # "version": "1.0",
# # # # # "CityObjects": {},
# # # # # "vertices": [],
# # # # # "metadata": {
# # # # # "geographicalExtent": [
# # # # # ]}}
# # # # # # cjio validation:
# # # # # # cjio our_test_cylinder.json validate --long > test_cylinder_report.txt
# # # # # mins = []
# # # # # maxs = []
# # # # # point_list = []
# # # # # def __init__(self,axis_vertex_list,radii_list,attrs_list,pts_per_cicle=32):
# # # # # # first we will check if two list are equally-sized
# # # # # # thx: https://stackoverflow.com/a/16720915/4436950
# # # # # ref_len = len(axis_vertex_list)
# # # # # if all(len(lst) == ref_len for lst in [radii_list,attrs_list]):
# # # # # for i,pointpair in enumerate(axis_vertex_list):
# # # # # print('processing segment ',i,' of ',ref_len,' segments')
# # # # # name = f't{i}'
# # # # # p1 = pointpair[0]
# # # # # p2 = pointpair[1]
# # # # # zero = i * 2 * pts_per_cicle
# # # # # cylinder = cylinder3D(p1,p2,radii_list[i],pts_per_cicle)
# # # # # self.point_list.append(cylinder.get_vertices_list(True))
# # # # # boundaries = cylinder.boundaries_list(zero)
# # # # # self.base['CityObjects'][name] = cylinder.as_city_object(attrs_list[i])
# # # # # self.mins.append(cylinder.mins)
# # # # # self.maxs.append(cylinder.maxs)
# # # # # del cylinder
# # # # # abs_max = np.max(np.array(self.maxs),axis=0)
# # # # # abs_min = np.min(np.array(self.mins),axis=0)
# # # # # bbox = [*abs_min,*abs_max]
# # # # # # filling the bounding box:
# # # # # self.base['metadata']['geographicalExtent'] = bbox
# # # # # # filling the vertices:
# # # # # # self.base['vertices'] = list(map(list,self.point_list))
# # # # # for i,point in enumerate(self.point_list[0]):
# # # # # self.base['vertices'].append(point)
# # # # # # self.base['vertices'] = [[point.tolist()] for point in self.point_list]
# # # # # # self.plist = [[point] for point in self.point_list]
# # # # # else:
# # # # # print('input lists are in different sizes, check your data!!!')
# # # # # def dump_to_file(self,outpath):
# # # # # with open(outpath,'w+') as writer:
# # # # # json.dump(self.base,writer,indent=2)
# ##### OUR BIG CLASS:
# class city_json_simple:
# base = {
# "type": "CityJSON",
# "version": "1.0",
# "CityObjects": {},
# "vertices": [],
# "metadata": {
# "geographicalExtent": [
# ]}}
# # cjio validation:
# # cjio our_test_cylinder.json validate --long > test_cylinder_report.txt
# mins = []
# maxs = []
# point_list = []
# def __init__(self,cylinderlist,EPSG):
# # SETTING epsg
# self.base["metadata"]["referenceSystem"] = f"urn:ogc:def:crs:EPSG::{EPSG}"
# # first we will check if two list are equally-sized
# # thx: https://stackoverflow.com/a/16720915/4436950
# total_cylinders = len(cylinderlist)
# for i,cylinder in enumerate(cylinderlist):
# print('writing cylinder',i,' of ',total_cylinders,' segments')
# name = f't{i}'
# self.base['CityObjects'][name] = city_object_dict(cylinder['faces'],cylinder['attributes'])
# self.mins.append(cylinder['mins'])
# self.maxs.append(cylinder['maxs'])
# self.base['vertices'] += cylinder['points']
# abs_max = np.max(np.array(self.maxs),axis=0)
# abs_min = np.min(np.array(self.mins),axis=0)
# bbox = [*abs_min,*abs_max]
# # filling the bounding box:
# self.base['metadata']['geographicalExtent'] = bbox
# # filling the vertices:
# # self.base['vertices'] = list(map(list,self.point_list))3
# # for i,point in enumerate(self.point_list[0]):
# # self.base['vertices'].append(point)
# # self.base['vertices'] = [[point.tolist()] for point in self.point_list]
# # self.plist = [[point] for point in self.point_list]
# def dump_to_file(self,outpath):
# with open(outpath,'w+') as writer:
# json.dump(self.base,writer)
# # # ###############################################################
# # # # the points
# # # p1 = np.array([1,1,1])
# # # p2 = np.array([5,5,1])
# # # p3 = np.array([6,7,6])
# # # # c1 = cylinder3D(p1,p2,10)
# # # # p_list = c1.get_vertices_list(False)
# # # # v_list = c1.boundaries_list(64)
# # # # print(v_list[0])
# # # # print(c1.maxs)
# # # # print(c1.mins)
# # # # print(c1.justaposed)
# # # lines_list = [(p1,p2)]
# # # radius_list = [1]
# # # attrs_list = [{"function": "something"}]
# # # builder = city_json_simple(lines_list,radius_list,attrs_list,4)
# # # # print(builder.base)
# # # builder.dump_to_file('our_test_cylinder.json')
# pipes_filepath = '/home/kaue/sanit3Dsdi/tests/sample_rede_agua_tratada.geojson'
# rasterpath = os.path.join(os.environ['HOME'],'sanit3Dsdi/tests/test_vrt_dtm.vrt')
# as_gdf = gpd.read_file(pipes_filepath)
# material_key = 'MATERIAL'
# diameter_key = 'DIAMETRO'
# print(as_gdf[diameter_key].unique())
# meshinfos = []
# n_entities = as_gdf.shape[0]
# zeroindex = 0
# with open('cylinder_report.txt','w+') as writer:
# for i,feature in enumerate(as_gdf.geometry):
# if as_gdf[diameter_key][i] != '':
# if feature.geom_type == 'LineString':
# as_array = np.array(feature)
# else:
# lines = []
# for line in feature:
# lines.append(np.array(line))
# as_array = np.concatenate(lines,axis=0)
# Z_list = []
# for point in as_array:
# Z = get_raster_val_at_geoXY(*point,rasterpath) - 2
# Z_list.append(Z)
# # as_array = np.concatenate((as_array,np.array(Z_list)[:,-1:]),axis=1)
# vertices = np.column_stack((as_array,np.array(Z_list)))
# radius = float(as_gdf[diameter_key][i]) / 200 #200 transforms into radius in centimeters
# customattrs = {"diametro":as_gdf[diameter_key][i],'material':as_gdf[material_key][i]}
# try:
# print('cylinder',i,' of ',n_entities,' with zero index: ',zeroindex)
# cylinder_meshinfo = pymesh_cylinder_for_cityjson(vertices,radius,zero_index=zeroindex,custom_attrs=customattrs)
# zeroindex += (cylinder_meshinfo['zero_ref'] + 500 )
# except Exception as e:
# writer.write(f'\n{i}')
# writer.write(feature.wkt)
# writer.write(str(e))
# meshinfos.append(cylinder_meshinfo)
# if i > 50:
# break
# # "referenceSystem":"urn:ogc:def:crs:EPSG::31984"
# builder = city_json_simple(meshinfos,31984)
# outpath = os.path.join(os.environ['HOME'],'data/sanit3d_out/pipery01_50.json')
# print(outpath)
# builder.dump_to_file(outpath)
| 27.5 | 142 | 0.528884 | 1,973 | 17,380 | 4.491637 | 0.198175 | 0.015798 | 0.013202 | 0.007673 | 0.282668 | 0.242496 | 0.229406 | 0.229406 | 0.183931 | 0.183931 | 0 | 0.024742 | 0.318642 | 17,380 | 631 | 143 | 27.543582 | 0.723611 | 0.917664 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0.001585 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
25fc451afa31243430222bec55b2f111e79ac503 | 676 | py | Python | sources/t06/t06ej08.py | workready/pythonbasic | 59bd82caf99244f5e711124e1f6f4dec8de22141 | [
"MIT"
] | null | null | null | sources/t06/t06ej08.py | workready/pythonbasic | 59bd82caf99244f5e711124e1f6f4dec8de22141 | [
"MIT"
] | null | null | null | sources/t06/t06ej08.py | workready/pythonbasic | 59bd82caf99244f5e711124e1f6f4dec8de22141 | [
"MIT"
] | null | null | null | # Python añadirá internamente el nombre de la clase delante de __baz
class Foo(object):
def __init__(self):
self.__baz = 42
def foo(self):
print(self.__baz)
# Como el método __init__ empieza por __, en realidad será Bar__init__, y el del padre Foo__init__. Así existen por separado
class Bar(Foo):
def __init__(self):
super().__init__()
self.__baz = 21
def bar(self):
print(self.__baz)
x = Bar()
x.foo() # 42, porque el método imprime Foo__baz
x.bar() # 21, porque el método imprime Bar__baz
# Podemos ver los miembros "mangleados" que tiene la instancia x
print(x.__dict__) # {'_Bar__baz': 21, '_Foo__baz': 42}
| 25.037037 | 124 | 0.673077 | 103 | 676 | 3.932039 | 0.446602 | 0.069136 | 0.054321 | 0.079012 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022989 | 0.227811 | 676 | 26 | 125 | 26 | 0.752874 | 0.536982 | 0 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038462 | 0 | 1 | 0.266667 | false | 0 | 0 | 0 | 0.4 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d3418e5026ce3e02ad31ae070813d872c538b3b2 | 333 | py | Python | python/tests/analyzer/test_list_item_analysis.py | eno-lang/enolib | 4175f7c1e8246493b6758c29bddc80d20eaf15f7 | [
"MIT"
] | 17 | 2019-04-15T21:03:37.000Z | 2022-01-24T11:03:34.000Z | python/tests/analyzer/test_list_item_analysis.py | eno-lang/enolib | 4175f7c1e8246493b6758c29bddc80d20eaf15f7 | [
"MIT"
] | 20 | 2019-03-13T23:23:40.000Z | 2022-03-29T13:40:57.000Z | python/tests/analyzer/test_list_item_analysis.py | eno-lang/enolib | 4175f7c1e8246493b6758c29bddc80d20eaf15f7 | [
"MIT"
] | 4 | 2019-04-15T21:18:03.000Z | 2019-09-21T16:18:10.000Z | from tests.util import match_object_snapshot
from tests.analyzer.util import analyze
input = """
list:
- value
- value
- value
- value
- value
""".strip()
def test_list_item_analysis():
analysis = analyze(input)
assert match_object_snapshot(analysis, 'tests/analyzer/snapshots/list_item_analysis.snap.yaml')
| 19.588235 | 99 | 0.726727 | 42 | 333 | 5.547619 | 0.5 | 0.171674 | 0.193133 | 0.171674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168168 | 333 | 16 | 100 | 20.8125 | 0.841155 | 0 | 0 | 0.384615 | 0 | 0 | 0.342342 | 0.159159 | 0 | 0 | 0 | 0 | 0.076923 | 1 | 0.076923 | false | 0 | 0.153846 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d382c90b4c0b9ab7e5f9321d48db0e2a98a51ef2 | 2,490 | py | Python | erec/Tabulate_fion_Xe.py | cajohare/DarkElectronRecoils | ec417bb924be4d980650b71647b2fdeb28246f11 | [
"MIT"
] | null | null | null | erec/Tabulate_fion_Xe.py | cajohare/DarkElectronRecoils | ec417bb924be4d980650b71647b2fdeb28246f11 | [
"MIT"
] | null | null | null | erec/Tabulate_fion_Xe.py | cajohare/DarkElectronRecoils | ec417bb924be4d980650b71647b2fdeb28246f11 | [
"MIT"
] | 1 | 2019-12-10T06:55:02.000Z | 2019-12-10T06:55:02.000Z | from numpy import *
from HaloFuncs import *
from Params import *
from AtomicFuncs import *
# s orbitals
n_s = array([1]+[2]*2+[3]*3+[4]*3+[5]*4)
Z_s = array([54.9179,47.2500,26.0942,68.1771,16.8296,12.0759,31.9030,8.0145,5.8396,14.7123,3.8555,2.6343,1.8124])
c_1s = array([-0.965401,-0.040350,0.001890,-0.003868,-0.000263,0.000547,-0.000791,0.000014,-0.000013,-0.000286,0.000005,-0.000003,0.000001])
c_2s = array([0.313912,0.236118,-0.985333,0.000229,-0.346825,0.345786,-0.120941,-0.005057,0.001528,-0.151508,-0.000281,0.000134,-0.000040])
c_3s = array([-0.140382,-0.125401,0.528161,-0.000435,0.494492,-1.855445,0.128637,-0.017980,0.000792,0.333907,-0.000228,0.000191,-0.000037])
c_4s = array([0.064020,0.059550,-0.251138,0.000152,-0.252274,1.063559,-0.071737,-0.563072,-0.697466,-0.058009,-0.018353,0.002292,-0.000834])
c_5s = array([-0.022510,-0.021077,0.088978,-0.000081,0.095199,-0.398492,0.025623,0.274471,0.291110,0.011171,-0.463123,-0.545266,-0.167779])
# p orbitals
n_p = array([2]*2+[3]*3+[4]*3+[5]*4)
Z_p = array([58.7712,22.6065,48.9702,13.4997,9.8328,40.2591,7.1841,5.1284,21.5330,3.4469,2.2384,1.14588])
c_2p = array([0.051242,0.781070,0.114910,-0.000731,0.000458,0.083993,-0.000265,0.000034,0.009061,-0.000014,0.000006,-0.000002])
c_3p = array([0.000264,0.622357,-0.009861,-0.952677,-0.337900,-0.026340,-0.000384,-0.001665,0.087491,0.000240,-0.000083,0.000026])
c_4p = array([0.013769,-0.426955,0.045088,0.748434,0.132850,0.059406,-0.679569,-0.503653,-0.149635,-0.014193,0.000528,-0.000221])
c_5p = array([-0.005879,0.149040,-0.018716,-0.266839,-0.031096,-0.024100,0.267374,0.161460,0.059721,-0.428353,-0.542284,-0.201667])
# d orbitals
n_d = array([3]*3+[4]*5)
Z_d = array([19.9787,12.2129,8.6994,27.7398,15.9410,6.0580,4.0990,2.5857])
c_4d = array([-0.013758,-0.804573,0.260624,0.00749,0.244109,0.597018,0.395554,0.039786])
c_3d = array([0.220185,0.603140,0.194682,-0.014369,0.049865,-0.000300,0.000418,-0.000133])
n = int(input("Number of values"))
E_r_vals = logspace(-1.0,3.0,n)/1000.0 # keV
q_vals = logspace(0.0,4.0,n)
np = 50
F1 = zeros(shape=(n,n))
F2 = zeros(shape=(n,n))
F3 = zeros(shape=(n,n))
for i in range(0,n):
F1[i,:] = f_nl_ion_sq(q_vals,E_r_vals[i],1,c_5p,n_p,Z_p,np=np)
F2[i,:] = f_nl_ion_sq(q_vals,E_r_vals[i],0,c_5s,n_s,Z_s,np=np)
F3[i,:] = f_nl_ion_sq(q_vals,E_r_vals[i],2,c_4d,n_d,Z_d,np=np)
print(i,'of',n)
savetxt('../data/fion/fion_Xe.txt',vstack((E_r_vals,q_vals,log10(F1),log10(F2),log10(F3))),delimiter='\t',fmt="%1.16f")
| 54.130435 | 140 | 0.685141 | 553 | 2,490 | 2.992767 | 0.455696 | 0.039879 | 0.018127 | 0.021752 | 0.048943 | 0.048943 | 0.048943 | 0.048943 | 0.048943 | 0.038066 | 0 | 0.492766 | 0.056225 | 2,490 | 45 | 141 | 55.333333 | 0.211489 | 0.014458 | 0 | 0 | 0 | 0 | 0.020417 | 0.0098 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.117647 | 0 | 0.117647 | 0.029412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d393dca18498ae62303fa36f0b1c7264c4cc3644 | 431 | py | Python | findMissing.py | quake0day/oj | c09333d1738f8735de0d5d825db6f4b707585670 | [
"MIT"
] | null | null | null | findMissing.py | quake0day/oj | c09333d1738f8735de0d5d825db6f4b707585670 | [
"MIT"
] | null | null | null | findMissing.py | quake0day/oj | c09333d1738f8735de0d5d825db6f4b707585670 | [
"MIT"
] | null | null | null | class Solution:
# @param nums: a list of integers
# @return: an integer
def findMissing(self, nums):
# write your code here
length = len(nums)
new_array = [x for x in xrange(len(nums)+1)]
#print new_array
for item in nums:
new_array[item] = -1
for item in new_array:
if item != -1:
return item
a = Solution()
print a.findMissing([0,1,3])
| 20.52381 | 52 | 0.554524 | 61 | 431 | 3.852459 | 0.52459 | 0.13617 | 0.102128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021201 | 0.343387 | 431 | 20 | 53 | 21.55 | 0.809187 | 0.201856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.05 | 0 | 0 | null | null | 0 | 0 | null | null | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d397b08e2a0e60ffb2dab182a94b32b11c910ca0 | 721 | py | Python | lesson18_projects/house3/auto_gen/code/states1/out_opendoor.py | muzudho/py-state-machine-practice | e31c066f4cf142b6b6c5ff273b56a0f89428c59e | [
"MIT"
] | null | null | null | lesson18_projects/house3/auto_gen/code/states1/out_opendoor.py | muzudho/py-state-machine-practice | e31c066f4cf142b6b6c5ff273b56a0f89428c59e | [
"MIT"
] | null | null | null | lesson18_projects/house3/auto_gen/code/states1/out_opendoor.py | muzudho/py-state-machine-practice | e31c066f4cf142b6b6c5ff273b56a0f89428c59e | [
"MIT"
] | null | null | null | from lesson17_projects.house3.auto_gen.data.const import E_ENTER, E_FAILED
class OutOpendoorState():
def update(self, req):
self.on_entry(req)
# 入力
msg = self.on_trigger(req)
# 分岐
if msg == E_ENTER:
self.on_enter(req)
return E_ENTER
elif msg == E_FAILED:
self.on_failed(req)
return E_FAILED
elif msg == None:
return None
else:
raise ValueError(f"Unexpected msg:{msg}")
def on_entry(self, req):
pass
def on_trigger(self, req):
return req.context.pull_trigger()
def on_enter(self, req):
pass
def on_failed(self, req):
pass
| 18.973684 | 74 | 0.552011 | 92 | 721 | 4.141304 | 0.391304 | 0.091864 | 0.086614 | 0.073491 | 0.08399 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006494 | 0.359223 | 721 | 37 | 75 | 19.486486 | 0.818182 | 0.006935 | 0 | 0.130435 | 0 | 0 | 0.02809 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0.130435 | 0.043478 | 0.043478 | 0.478261 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
d39809c2141f4cd018324c8ca277829e7cfd52f9 | 1,063 | py | Python | pywebhooks/api/resources/v1/webhook/registrations_api.py | chadlung/pywebhooks | 4b5f41be7c3c498a31cb0225cbde8e63c48ce999 | [
"Apache-2.0"
] | 94 | 2015-04-03T12:10:54.000Z | 2021-08-30T13:50:48.000Z | pywebhooks/api/resources/v1/webhook/registrations_api.py | chadlung/pywebhooks | 4b5f41be7c3c498a31cb0225cbde8e63c48ce999 | [
"Apache-2.0"
] | 10 | 2016-06-07T17:34:39.000Z | 2019-11-23T00:00:09.000Z | pywebhooks/api/resources/v1/webhook/registrations_api.py | chadlung/pywebhooks | 4b5f41be7c3c498a31cb0225cbde8e63c48ce999 | [
"Apache-2.0"
] | 10 | 2015-04-14T18:03:08.000Z | 2021-08-30T13:50:49.000Z | # Standard lib imports
# None
# Third-party imports
from flask import request
from flask_restful import Resource
# Project-level imports
from pywebhooks import DEFAULT_REGISTRATIONS_TABLE
from pywebhooks.api.decorators.authorization import api_key_restricted_resource
from pywebhooks.api.handlers.pagination_handler import paginate
from pywebhooks.api.handlers.resources_handler import delete_all
from pywebhooks.api.decorators.validation import validate_pagination_params
class RegistrationsAPI(Resource):
"""
Handles the REST API interaction for Registrations
"""
@api_key_restricted_resource(verify_admin=False)
@validate_pagination_params()
def get(self):
"""
Get a listing of Registrations (paginated if need be)
"""
return paginate(request, DEFAULT_REGISTRATIONS_TABLE, 'registrations')
@api_key_restricted_resource(verify_admin=True)
def delete(self):
"""
Deletes all records in the Registrations table
"""
return delete_all(DEFAULT_REGISTRATIONS_TABLE)
| 30.371429 | 79 | 0.762935 | 123 | 1,063 | 6.382114 | 0.471545 | 0.089172 | 0.086624 | 0.09172 | 0.122293 | 0.122293 | 0.122293 | 0 | 0 | 0 | 0 | 0 | 0.174976 | 1,063 | 34 | 80 | 31.264706 | 0.895097 | 0.206961 | 0 | 0 | 0 | 0 | 0.01671 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.466667 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
d39fa7cad9e712a960a7465bbc7e8ecbdf8dff04 | 793 | py | Python | fifty_off/api/serializers/dashboard/dashboard_serializer.py | DanielSalazar1/50off | e39c8709ea8ac81b39c02060517353ed03b60074 | [
"BSD-3-Clause"
] | null | null | null | fifty_off/api/serializers/dashboard/dashboard_serializer.py | DanielSalazar1/50off | e39c8709ea8ac81b39c02060517353ed03b60074 | [
"BSD-3-Clause"
] | 10 | 2020-06-05T20:15:38.000Z | 2022-01-13T01:58:11.000Z | fifty_off/api/serializers/dashboard/dashboard_serializer.py | DanielSalazar1/50off | e39c8709ea8ac81b39c02060517353ed03b60074 | [
"BSD-3-Clause"
] | null | null | null | from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
class FavouriteSerializer(serializers.Serializer):
name = models.CharField(max_length=255)
serial = models.UUIDField(default=uuid.uuid4, editable=False)
# photo = models.ImageField(upload_to='uploads/%Y/%m/%d/', blank=True, null=True,)
def create(self, validated_data):
name = validated_data.get('name', None)
serial = validated_data.get('serial', None)
# photo = validated_data.get('photo')
# Plug in our data from the request into our `User` model.
new_item = Item.objects.create(name = name, serial = serial)
return new_item
| 39.65 | 86 | 0.722573 | 102 | 793 | 5.519608 | 0.578431 | 0.092362 | 0.085258 | 0.0746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006107 | 0.174023 | 793 | 19 | 87 | 41.736842 | 0.853435 | 0.218159 | 0 | 0 | 0 | 0 | 0.016234 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.333333 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
d3a9681bea7e0b3c22e7e24641e30d8043bb632e | 616 | py | Python | cogs/utils/permissions.py | Electromaster232/discord-ix-bot | 666f2fd5fcd6c38fa426a6ba9b896a5b67593b48 | [
"MIT"
] | 1 | 2019-09-30T01:18:08.000Z | 2019-09-30T01:18:08.000Z | cogs/utils/permissions.py | Electromaster232/discord-ix-bot | 666f2fd5fcd6c38fa426a6ba9b896a5b67593b48 | [
"MIT"
] | null | null | null | cogs/utils/permissions.py | Electromaster232/discord-ix-bot | 666f2fd5fcd6c38fa426a6ba9b896a5b67593b48 | [
"MIT"
] | null | null | null | import discord
from config import Config
def modcheck(ctx):
roles = ctx.message.server.roles
role = discord.utils.get(roles, name="Moderator")
return role in ctx.message.author.roles or ownercheck(ctx) or admincheck(ctx)
def admincheck(ctx):
roles = ctx.message.server.roles
role = discord.utils.get(roles, name="Administrator")
return role in ctx.message.author.roles or ownercheck(ctx) or admincheck(ctx)
def managecheck(ctx):
return ctx.message.author.server_permissions.manage_server or ownercheck(ctx)
def ownercheck(ctx):
return ctx.message.author.id in Config.botowners | 28 | 81 | 0.751623 | 87 | 616 | 5.298851 | 0.310345 | 0.130152 | 0.138829 | 0.078091 | 0.642082 | 0.533623 | 0.533623 | 0.533623 | 0.533623 | 0.533623 | 0 | 0 | 0.147727 | 616 | 22 | 82 | 28 | 0.878095 | 0 | 0 | 0.285714 | 0 | 0 | 0.035656 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0.142857 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
6ca34780c1722f8f934f4dafd7ad3f41459c69ec | 33,323 | py | Python | graphapi.py | slashrod/facebook-graphapi | 4beb9c323aaad9bbe38d549f85188fb6a99c5797 | [
"Apache-2.0"
] | 1 | 2015-01-26T15:18:15.000Z | 2015-01-26T15:18:15.000Z | graphapi.py | slashrod/facebook-graphapi | 4beb9c323aaad9bbe38d549f85188fb6a99c5797 | [
"Apache-2.0"
] | null | null | null | graphapi.py | slashrod/facebook-graphapi | 4beb9c323aaad9bbe38d549f85188fb6a99c5797 | [
"Apache-2.0"
] | null | null | null | # -*- coding:utf-8 -*-
# Facebook Graph API Explorer. This will walk through the graph api.
#
# Features:
# - Get ID for a Facebook's group
# - Create a dummy profile to interract with the Facebook's group
# - Create a dummy Facebook's application using the dummy profile
# - Get AppID and AppSecret for the dummy app page
# - Use Python client for Facebook Api to get an access token
# * Query the group for its fields (owner, creation_date, update_date,
# picture, link, id,...).
# See https://developers.facebook.com/docs/reference/api/group/ for details
# * Query the group for its members
# * Query the group for its events
# * Query the group for its picture
# * Query the group for its docs
# * Query the group for its feed
# - Create database layout using web2py DAL (TO BE IMPLEMENTED)
# - The database layout is provided in app/models/facebook.py
# - Pull data from Facebook if this is the first time the app is used
# - For that, just query the database to check if there is existing data
# in the database. If not, that means the app is running the first time
# So we need to just PULL data from Facebook
# - If "select from the database tables" returns data, we need to update the
# content with new posts, comments, subscriptions, and get
import facebook # Import facebook.py to use python client for facebook API
from gluon import * # To import web2py libraries and tools
# In occurence, current
from urlparse import urlparse # To get query string from url. Most used
# in get_query_parameters()
# Get database access layer from current
table_name_prefix = 'facebook'
db = current.db
app_id = '239533669516062'
app_secret = '706a63e3fec89fbc35e0c92eac57ad66'
access_token = 'CAADZA2sdIgx4BAEVgPkiSvtD689tGOTmhbeZBIkeXboNsxDBMSgMOCq4uZAfZBCrsQmcDrPPZB98t5jCMlzoPZBqa2lhBWwVL0kEf7FezcvbJv3eydjnZA4dR8ZAe27JLl8rFcWTw9BnJnh8AwCb8217EJkFVSbUBH0ZD'
facebook_profile_id = '100003123256932'
facebook_id = '335662792434' # Facebook Group ID
facebook_post_id = '335662792434_10150431047037435'
def get_query_parameters(url):
"""
Takes an url as argument and returns the query parameters
parameters: {parameter:value, ...}
For instance:
url = 'http://abc.test.it/now?a=3&h=7'
parameters = get_query_parameters(url)
parameters returns:
{'a':3, 'h':7}
"""
query = urlparse(url).query
parameters = facebook.parse_qs(query)
parameters = {key:value for key,value in parameters.iteritems()}
# We are interested in extracting all parameters except access token
if 'access_token' in parameters:
del parameters['access_token']
pass
return parameters
class Base(dict):
@staticmethod
def factory(object_type, facebook_id, graph, *args, **kwargs):
object_types = {
'group' : FcbGroup,
'user' : FcbUser,
}
assert object_type in object_types, "Unknown object type"
return object_types[object_type](facebook_id, graph=graph, *args, **kwargs)
def __init__(self, facebook_id, *args, **kwargs):
"""
fields = dict( {field_name: supported} )
field_name: object field, str, refer to facebook graph API documentation
supported : boolean, True if supported by the database, False if cannot be stored in the
database
connexions = dict {connexion: supported}
"""
# Verify that graph object is provided
assert 'graph' in kwargs, "A facebook.GraphAPI object should be provided"
assert isinstance(kwargs['graph'], facebook.GraphAPI), "graph\
should be of type facebook.GraphAPI"
self.graph = kwargs.pop('graph')
self.facebook_id = facebook_id
assert isinstance(self.graph, facebook.GraphAPI), "graph parameter should be instance of GraphAPI"
self.fields = {
'id':True, # The object ID, string
'name':True, # The name of the object, string
}
self.references = { # True if attribute.type = list:reference
# False if attribute.type = reference
}
# Database table columns FcbGroup.fields that are supported by the application. In fact that
# means that FcbGroup.fields[field] returns True for field in FcbGroup.fields. But because
# of database keywords or because of convenience needs, the table column used by the
# database may differ from the field name used by Facebook Graph api. Then
# FcbGroup.table_fields maps Facebook Graph API field to database table column
self.table_fields = { # Mapping between facebook fields and table fields. For efficiency,
# Only field names which change are referenced here
'id':'facebook_id',
}
self.__table_name = str() # To be set by the child class
self.__table_columns = list() # columns in the database
self.connections = dict() # Facebook's object connections
# Update attributes with child properties
variable_attributes = ['fields',
'table_fields',
'connections',
'references',]
if kwargs:
if 'facebook_table' in kwargs:
facebook_table = kwargs.pop('facebook_table')
self.table_name = facebook_table
for attr in variable_attributes:
if attr in kwargs:
# kwargs.pop(key) to remove from the dictionnary. It will
# remain some (key value) pairs in the kwargs. Those key
# value pairs will be used to initialize the object since
# it is itself a dictionnary.
value = kwargs.pop(attr)
# Value should be a dictionnary
assert type(value) is dict, u"%s attribute should be a\
dictionnary" % attr
field = getattr(self, attr)
field.update(value)
# Define table columns
# Add table columns whose name are adapted. This is done in
# FcbGroup.table_fields
# Most of the time Facebook id attribute will be changed to avoid
# collision with attribute autogenerated by ORM framework. Another
# side effect of not changing id attribute is that there will be clash
# for foreign key to id attributes since there should be, most of the
# time integer, but those from facebook are strings.
self.__table_columns.extend([table_field \
for field, table_field in self.table_fields.iteritems()\
if self.fields[field]])
# FcbGroup.fields contains all fields provided by Facebook Graph API with their support
# status in the application here provided.
# Database table columns FcbGroup.fields that are supported by the application. In fact that
# means that FcbGroup.fields[field] returns True for field in FcbGroup.fields. But because
# of database keywords or because of convenience needs, the table column used by the
# database may differ from the field name used by Facebook Graph api. Then
# FcbGroup.table_fields maps Facebook Graph API fields to desired
# table columns
for field in self.fields:
if self.fields[field]\
and field not in self.table_fields:
self.__table_columns.append(field)
pass
# Retrieve the table from the database
self.table = getattr(db, self.table_name)
# Call the parent child and use it to initialize the object with
# remaining (key, value) pairs in kwargs.
super(Base, self).__init__(**kwargs)
pass
def __getitem__(self,key):
# An other implementation which allows to get a list of items from the
# dictionnary
if isinstance(key, (tuple,list)):
return [super(Base,self).__getitem__(m) for m in key if m in\
self]
else:
return super(Base,self).__getitem__(key)\
if key in self else None
@property
def table_name(self):
return u'%s_%s' % (table_name_prefix, self.__table_name)
@table_name.setter
def table_name(self, table_name):
self.__table_name = table_name
@property
def table_columns(self):
return self.__table_columns or None
@table_columns.setter
def table_columns(self, column_names):
# table_columns can be a tuple or a list, in wich case we ensure that
# the column is not already existant in the self.table_columns because
# a list doesn't update but instead just append. There is no
# integrated mean to verify that the column already exists in the
# database.
# table_columns is a list. The class user can provide a list of
# columns or a tuple of columns.
if isinstance(column_names, (tuple, list)):
[self.__table_columns.append(column_name)\
for column_name in column_names\
if column_name not in self.__table_columns]
else:
# If provided data is not a list, it should be a string. If not
# already took into account, it will be added to the list
assert isinstance(column_names, str), "Either a list or a str expected"
self.__table_columns.append(column_names)
if column_names not in self.__table_columns:
self.__table_columns.append(column_names)
def exists(self, *args, **kwargs):
assert hasattr(self, 'facebook_id'), "Missing Object ID"
rows = db(self.table.facebook_id == self.facebook_id)
if rows.count():
return True
else:
return False
def get(self, function, *args, **kwargs):
assert isinstance(function, type(self.get)), "Expecting a function\
as first argument"
response = None
try:
response = function(*args, **kwargs)
except (facebook.AppOAuthError, facebook.PasswordOAuthError,
facebook.ExpiredOAuthError, facebook.InvalidOAuthError), e:
# Extend access Token life.
# This method returs {'access_token': TOKEN, 'expires': EXPIRES}
result = self.graph.extend_access_token(app_id, app_secret)
self.graph.access_token = result['access_token']
# Once token life is extended replay
response = function(*args, **kwargs)
except facebook.ServerError, e:
# The server is throttling, retry later
pass
except facebook.UserError, e:
# API Permission Denied or API Permission
# The user has to grant the application
# Inform the administrator by sending him a mail. Then we need
# here the mail information of the administrator.
pass
except facebook.UserOAuthError, e:
# User needs to log on www.facebook.com or m.facebook.com
pass
except facebook.AppOAuthError, e:
# User removed the app from its settings
pass
except facebook.UnconfirmedOAuthError, e:
# User needs to log on www.facebook.com or m.facebook.com
pass
except Exception, e:
print e
pass
return response
def get_object(self, *args, **kwargs):
self.facebook_object = self.get(self.graph.get_object,
self.facebook_id, *args, **kwargs)
return self.facebook_object
def get_connection(self, connection, **kwargs):
assert connection in self.connections, "Connection not supported"
# Obtain data from Facebook
# Use built-in get method because it takes care of all exception
# handling
response = self.get(self.graph.get_connections,\
self.facebook_id,
connection,
as_generator=True,
fields='id',
**kwargs)
result = []
for page, url in response:
for item in page:
result.append(item)
pass
pass
return result
def filter_object(self, facebook_object):
"""
filter_object will remove from fields all fields not supported
by the framework. The parameter is response because most
facebook_object: dictionary containing Facebook object fields for
current object
"""
# metadata information can be requested to identify the type of
# object to instantiate
metadata = None
if 'metatadata' in facebook_object:
metadata = facebook_object['metadata']
# Get only fields supported by the framework by removing not
# supported fields looking in self.fields
facebook_object_keys = facebook_object.keys()
for field in facebook_object_keys:
if field not in self.fields or not self.fields[field]:
facebook_object.pop(field)
# Now just fields supported by the framework are remaining. It's
# time to add metadata information if it has been requested
if metadata:
facebook_object['metadata'] = metadata
pass
return facebook_object
def update(self, *args, **kwargs):
"""
Update the group object
"""
assert hasattr(self, 'facebook_object'), "call self.get_object first"
# args is a tuple of positionned arguments
# kwargs is a dictionary of named arguments
# assert args, "There should be at list one argument"
def update_arg(arg):
"""
For non string values, instantiate a new object
For instance a group has an owner whos is represented by a
User object.
A Post has many comments which are represented by many
Comment objects
A comment may have many message tags
"""
def update_object(Object):
# Get object ID
try:
facebook_id = Object['id']
except Exception, e:
print Object
print
print e
import sys
sys.exit()
# Get facebook object with metadata
facebook_object = \
self.graph.get_object(facebook_id,
metadata=1)
# Get object type
object_type = facebook_object['metadata']['type']
# Remove metadata information
del facebook_object['metadata']
# Instanciate the object
Object = Base.factory(object_type,
facebook_id,
graph=self.graph)
Object.update(facebook_object)
return Object
# Update the dictionary with key and values in arg
references = self.references
for reference in references: # For each referenced field
if reference in arg: # if referenced field data
# is in arg
if references[reference]: # references[reference]:True
# if the attribute type
# defined in database
# models is list:reference else
# False
for i, Object in enumerate(arg[reference]):
arg[reference][i] = update_object(Object)
pass
pass
else: # references[reference]:False
arg[reference] = update_object(arg[reference])
pass
pass
pass
return arg
# Update the dictionnary with self.facebook_object data
facebook_object_filtered = self.filter_object(self.facebook_object)
facebook_object_updated = update_arg(facebook_object_filtered)
super(Base, self).update(facebook_object_updated)
# Update the dictionnary with optional positional and keyed
# arguments
for arg in args:
assert hasattr(arg, 'keys'), "Arguments should be iterable"
arg = self.filter_object(arg)
arg = update_arg(arg)
if kwargs:
kwargs = self.filter_object(kwargs)
kwargs = update_arg(kwargs)
pass
# Update, here we call dict.update method
super(Base, self).update(*args, **kwargs)
# Add table_fields values
for field,table_field in self.table_fields.iteritems():
# table_fields : {facebook_field : table_field}
# Initialize self[table_field] = self[facebook_field]
self[table_field] = self[field]
pass
def db_update(self, *args, **kwargs):
# Verify that facebook_object exists
assert hasattr(self, 'facebook_object'), "call self.get_object() first"
# Create a dictionnary from the list of table_columns
data = dict().fromkeys(self.table_columns)
# Populate the dictionnary with data in self
for key in data:
try:
data[key] = self[key]
except KeyError, e:
# Key is missing, not provided
pass
# Update kwargs with collected data
kwargs.update(data)
if self.exists():
# if args: there is a key specified for lookup
if args:
self.table.update_or_insert(*args, **kwargs)
else:
self.table.update_or_insert(**kwargs)
# Retrieve the record ID in our database
self.record_id = record_id = \
db(self.table.facebook_id ==
self.facebook_id).select().first()['id']
return record_id
else:
self.record_id = record_id = self.table.insert(**kwargs)
return record_id
def db_truncate(self, *args, **kwargs):
self.table.truncate()
pass
pass
class FcbUser(Base):
def __init__(self, facebook_id, graph, facebook_table='user'):
# Update the list of fields for Facebook User
fields = {
'first_name':True, # The URL for the group's icon, string
'last_name':True, # Array containing a valid URL, cover_id and image offset. Just the url is kept
'gender':True, # The profile that created this group, string
'username':True, # A brief description of the group, string
'link': True, # The URL for the group's website, string
'locale':True, # The privacy setting of the group
'updated_time':True,# The last time the group was updated
}
# Update the list of connexions supported by the app
super(FcbUser, self).__init__(facebook_id, graph=graph, fields = fields,
facebook_table = facebook_table)
self.get_object()
self.update()
pass
class FcbComment(Base):
"""
Child class should extend providing:
reference to object
list of custom coumns
"""
def __init__(self, facebook_id, graph, facebook_table, **kwargs):
# Avoid direct instantiation
if type(self) == FcbPost:
raise TypeError, "FcbComment must be subclassed"
fields = dict()
table_fields = dict()
references = dict()
connections = dict()
# Update attributes from subclasses
if 'fields' in kwargs:
fields.update(kwargs['fields'])
pass
if 'table_fields' in kwargs:
table_fields.update(kwargs['table_fields'])
pass
if 'references' in kwargs:
references.update(kwargs['references'])
pass
if 'connections' in kwargs:
references.update(kwargs['connections'])
pass
# Update the list of fields for Facebook Group
fields.update({
'from':True,
'to':False,
'message':True,
'message_tags':False,
'actions':False,
'application':False,
'created_time':True,
'updated_time':True,
'like_count':False,
'comment_count':True,
})
# Update the list of table_fields
table_fields.update({
'id':'facebook_id',
'from':'facebook_user',
'to':'facebook_object_to',
})
references.update({ # True if attribute.type = list:reference
# False if attribute.type = reference
'facebook_user' : False,
'actions':True,
'with_tags':True,
'place':False,
})
# Update the list of connexions supported by the app
connections.update({
'comments': False,
'likes': False,
})
self.graph = graph # Initialize facebook.GraphAPI
super(FcbPost, self).__init__(facebook_id, fields=fields, graph=graph,
connections=connections, references=references,
facebook_table = facebook_table, table_fields=table_fields)
self.get_object()
self.update()
# Extend self.columns to support non Facebook fields
# In fact self.columns is built using:
# - Supported fields provided by self.fields
# - Table fields provided by self.table_fields
# But there could be extra columns used by a specific framework for
# linking data and for data management purposes. Thoses columns can
# be enumerated after initializing the object.
pass
class FcbGroupComment(FcbPost):
"""
Post sent to a group page
"""
def __init__(self, facebook_id, graph, facebook_table='group_comment'):
# Update the list of references
# references are not updated because this attribute defines which
# facebook fields refer to an object to be created.
#
#references = {
# 'facebook_group': False, # This references a facebook
# # group to which this post is
# # tied to. False means that
# # this attribute is not a list
# # of references
# }
super(FcbGroupPost, self).__init__(facebook_id, graph,
facebook_table=facebook_table, )
self.table_columns = [
'facebook_group_post',
]
def db_update(self, facebook_group):
# A post has a reference to either a group or an event, in the
# framework of this work. In other words, a post is an item of the
# feed which is a list of posts. In our framework only events and
# groups have a feed. So we have to mention which object we are
# refering to. The subclass, when instantiated, will define the
# parent object and its ID which will be used here
record_id = super(FcbGroupPost,
self).db_update(facebook_group_post=facebook_group_post)
return record_id
class FcbPost(Base):
"""
Child class should extend providing:
reference to object's feed (group, event, ....)
list of custom columns (adding them to self.table_columns after
calling base class)
"""
def __init__(self, facebook_id, graph, facebook_table, **kwargs):
# Avoid direct instantiation
if type(self) == FcbPost:
raise TypeError, "FcbPost must be sublcassed"
fields = dict()
table_fields = dict()
references = dict()
connections = dict()
# Update attributes from subclasses
if 'fields' in kwargs:
fields.update(kwargs['fields'])
pass
if 'table_fields' in kwargs:
table_fields.update(kwargs['table_fields'])
pass
if 'references' in kwargs:
references.update(kwargs['references'])
pass
if 'connections' in kwargs:
references.update(kwargs['connections'])
pass
# Update the list of fields for Facebook Group
fields.update({
'from':True,
'to':False,
'message':True,
'message_tags':False,
'picture': True,
'link':True,
'caption':True,
'description': True,
'source': True,
'properties': False,
'icon': False,
'actions':False,
'privacy':False,
'type':True,
'place':True,
'story':False,
'story_tags':False,
'with_tags':False,
'comments' : False,
'object_id':True,
'application':False,
'created_time':True,
'updated_time':True,
'shares':True,
'include_hidden':True,
'status_type':True,
})
# Update the list of table_fields
table_fields.update({
'id':'facebook_id',
'from':'facebook_user',
'to':'facebook_object_to',
})
references.update({ # True if attribute.type = list:reference
# False if attribute.type = reference
'facebook_user' : False,
'actions':True,
'with_tags':True,
'place':False,
})
# Update the list of connexions supported by the app
connections.update({
'comments': False,
'likes': False,
})
self.graph = graph # Initialize facebook.GraphAPI
super(FcbPost, self).__init__(facebook_id, fields=fields, graph=graph,
connections=connections, references=references,
facebook_table = facebook_table, table_fields=table_fields)
self.get_object()
self.update()
# Extend self.columns to support non Facebook fields
# In fact self.columns is built using:
# - Supported fields provided by self.fields
# - Table fields provided by self.table_fields
# But there could be extra columns used by a specific framework for
# linking data and for data management purposes. Thoses columns can
# be enumerated after initializing the object.
self.table_columns = [
'paging_next',
'paging_previous',
'paging_cursor_before',
'paging_cursor_after',
]
pass
def get_comments(self):
self.comments = comments = self.get_connection('comments')
return comments
def set_comments(self):
assert hasattr(self, 'comments'), "Call self.get_comments first"
for _comment in self.comments:
comment = FcbGroupComment(_member['id'], self.graph)
comment.db_update(self.facebook_id)
pass
pass
class FcbGroupPost(FcbPost):
"""
Post sent to a group page
"""
def __init__(self, facebook_id, graph, facebook_table='group_post'):
# Update the list of references
# references are not updated because this attribute defines which
# facebook fields refer to an object to be created.
#
#references = {
# 'facebook_group': False, # This references a facebook
# # group to which this post is
# # tied to. False means that
# # this attribute is not a list
# # of references
# }
super(FcbGroupPost, self).__init__(facebook_id, graph,
facebook_table='group_post', )
self.table_columns = [
'facebook_group',
]
def db_update(self, facebook_group):
# A post has a reference to either a group or an event, in the
# framework of this work. In other words, a post is an item of the
# feed which is a list of posts. In our framework only events and
# groups have a feed. So we have to mention which object we are
# refering to. The subclass, when instantiated, will define the
# parent object and its ID which will be used here
record_id = super(FcbGroupPost,
self).db_update(facebook_group=facebook_group)
return record_id
class FcbGroup(Base):
def __init__(self, facebook_id, graph, facebook_table='group'):
# Update the list of fields for Facebook Group
fields = {
'icon':True, # The URL for the group's icon, string
'cover':True, # Array containing a valid URL, cover_id and image offset. Just the url is kept
'owner':True, # The profile that created this group, string
'description':True, # A brief description of the group, string
'link': True, # The URL for the group's website, string
'privacy':True, # The privacy setting of the group
'updated_time':True,# The last time the group was updated
}
# Update the list of references
references = { # True if attribute.type = list:reference
# False if attribute.type = reference
'owner' : False,
}
# Update the list of connexions supported by the app
connections = {
'events': False,
'feed': False,
'members':True,
'picture':False,
'docs':False,
}
self.graph = graph # Initialize facebook.GraphAPI
super(FcbGroup, self).__init__(facebook_id, fields=fields, graph=graph,
connections=connections, references=references,
facebook_table = facebook_table)
self.get_object()
self.update()
pass
def update(self, *args, **kwargs):
"""
Update the group object
"""
assert hasattr(self, 'facebook_object'), "Get object first"
super(FcbGroup, self).update(self.facebook_object, *args, **kwargs)
def get_members(self):
self.members = members = self.get_connection('members')
return members
def set_members(self):
assert hasattr(self, 'members'), "Call self.get_members first"
for _member in self.members:
member = FcbUser(_member['id'], self.graph)
member.db_update()
pass
pass
def get_feed(self):
self.feed = feed = self.get_connection('feed')
return feed
def set_feed(self):
assert hasattr(self, 'feed'), "Call self.get_feed first"
for _post in self.feed:
post = FcbGroupPost(_post['id'], self.graph)
post.db_update(self.facebook_id)
pass
pass
def truncate(self,):
"""
This will truncate the database table and then remove all data in
this table and related ones. This is provided to ease debugging.
"""
pass
pass
def main():
# Browse https://developers.facebook.com/apps. Then Login in to go to the
# deveoppers' page of the dummy profile you created for the app
# Set facebook_group_id
facebook_group_id = '335662792434'
# Get access token from facebook
graph = facebook.GraphAPI(access_token)
group = FcbGroup(facebook_group_id, graph)
# Update group information and data. If there is no data in the database, then
# we pull data instead from Facebook
# group.update()
if __name__ == '__main__':
main()
| 37.525901 | 183 | 0.570867 | 3,684 | 33,323 | 5.04696 | 0.129207 | 0.017426 | 0.011295 | 0.010488 | 0.417415 | 0.376432 | 0.358683 | 0.353036 | 0.337331 | 0.314903 | 0 | 0.006223 | 0.358581 | 33,323 | 887 | 184 | 37.568207 | 0.863666 | 0.314708 | 0 | 0.392034 | 0 | 0 | 0.094113 | 0.011066 | 0 | 0 | 0 | 0 | 0.033543 | 0 | null | null | 0.090147 | 0.008386 | null | null | 0.008386 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
6ca4d93890d22e9a2bedb2bb76a5374dabfbe327 | 336 | py | Python | misago/misago/faker/utils.py | vascoalramos/misago-deployment | 20226072138403108046c0afad9d99eb4163cedc | [
"MIT"
] | 2 | 2021-03-06T21:06:13.000Z | 2021-03-09T15:05:12.000Z | misago/misago/faker/utils.py | vascoalramos/misago-deployment | 20226072138403108046c0afad9d99eb4163cedc | [
"MIT"
] | null | null | null | misago/misago/faker/utils.py | vascoalramos/misago-deployment | 20226072138403108046c0afad9d99eb4163cedc | [
"MIT"
] | null | null | null | from django.db import IntegrityError
from django.db.transaction import TransactionManagementError
def retry_on_db_error(f):
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except (IntegrityError, TransactionManagementError):
return wrapper(*args, **kwargs)
return wrapper
| 25.846154 | 60 | 0.684524 | 35 | 336 | 6.485714 | 0.514286 | 0.132159 | 0.105727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.22619 | 336 | 12 | 61 | 28 | 0.873077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.222222 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6caca7fd040bbdd2d75d59a202e32614d79c8ded | 319 | py | Python | Algorithms/Maths/Perfect number/Python/Perfect_Number.py | NripeshKumar/interview-techdev-guide | 79356533d608a4332a4701d091c248db18ef664d | [
"MIT"
] | 1 | 2020-07-26T15:14:15.000Z | 2020-07-26T15:14:15.000Z | Algorithms/Maths/Perfect number/Python/Perfect_Number.py | NripeshKumar/interview-techdev-guide | 79356533d608a4332a4701d091c248db18ef664d | [
"MIT"
] | 1 | 2019-10-07T07:33:23.000Z | 2019-10-07T07:33:23.000Z | Algorithms/Maths/Perfect number/Python/Perfect_Number.py | NripeshKumar/interview-techdev-guide | 79356533d608a4332a4701d091c248db18ef664d | [
"MIT"
] | 1 | 2019-10-14T19:48:06.000Z | 2019-10-14T19:48:06.000Z | # Python Program to find Perfect Number using For loop
Number = int(input(" Please Enter any Number: "))
Sum = 0
for i in range(1, Number):
if(Number % i == 0):
Sum = Sum + i
if (Sum == Number):
print(" %d is a Perfect Number" %Number)
else:
print(" %d is not a Perfect Number" %Number)
| 26.583333 | 55 | 0.598746 | 50 | 319 | 3.82 | 0.54 | 0.204188 | 0.08377 | 0.209424 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012987 | 0.275862 | 319 | 11 | 56 | 29 | 0.813853 | 0.163009 | 0 | 0 | 0 | 0 | 0.299213 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6cb311e39d686ae2703733a8113dcd95dda0b5a8 | 520 | py | Python | bluebiulding/reports/forms.py | ruilobos/bluebuilding | b08c3c91e5402f4f24bf643d4c198b2b60b10428 | [
"bzip2-1.0.6"
] | null | null | null | bluebiulding/reports/forms.py | ruilobos/bluebuilding | b08c3c91e5402f4f24bf643d4c198b2b60b10428 | [
"bzip2-1.0.6"
] | 3 | 2021-03-30T14:17:51.000Z | 2021-06-08T22:34:57.000Z | bluebiulding/reports/forms.py | ruilobos/bluebuilding | b08c3c91e5402f4f24bf643d4c198b2b60b10428 | [
"bzip2-1.0.6"
] | null | null | null | from django.forms import ModelForm
from .models import Report, Cryptocurrency
from django import forms
from django.db import models
from django.forms import ModelChoiceField
class RequestReport(ModelForm):
class Meta:
model = Report
#fields = '__all__'
fields = ['cryptocurrency']
class VisitorReport(forms.Form):
name = forms.CharField(label='Your Name:', max_length=100)
cryptocurrency = forms.ModelChoiceField(queryset=Cryptocurrency.objects.all(), to_field_name="symbol")
| 30.588235 | 107 | 0.740385 | 59 | 520 | 6.40678 | 0.508475 | 0.10582 | 0.079365 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006961 | 0.171154 | 520 | 16 | 108 | 32.5 | 0.87007 | 0.034615 | 0 | 0 | 0 | 0 | 0.05988 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.416667 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
6cb53a382361eb93ecebadcf22dbd37a4bab4e96 | 521 | py | Python | coding/migrations/0003_auto_20160506_0427.py | geosoco/reddit_coding | 0a0d6f4c768d9cc0ae6835e6b1bcc8334de77fba | [
"BSD-3-Clause"
] | null | null | null | coding/migrations/0003_auto_20160506_0427.py | geosoco/reddit_coding | 0a0d6f4c768d9cc0ae6835e6b1bcc8334de77fba | [
"BSD-3-Clause"
] | null | null | null | coding/migrations/0003_auto_20160506_0427.py | geosoco/reddit_coding | 0a0d6f4c768d9cc0ae6835e6b1bcc8334de77fba | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-06 04:27
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('coding', '0002_auto_20160506_0424'),
]
operations = [
migrations.RemoveField(
model_name='commentcodeinstance',
name='coder',
),
migrations.RemoveField(
model_name='submissioncodeinstance',
name='coder',
),
]
| 21.708333 | 48 | 0.600768 | 50 | 521 | 6.06 | 0.72 | 0.138614 | 0.171617 | 0.19802 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086253 | 0.287908 | 521 | 23 | 49 | 22.652174 | 0.730458 | 0.128599 | 0 | 0.375 | 1 | 0 | 0.177384 | 0.099778 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.3125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6ccc0bd20c12a016ff39d19e1726e32aef0e42ce | 98 | py | Python | reveerse.py | shubhamkanungoo007/competitive-programming-solutions | 7a3429b6ffe0fd113de5188d5a85bc90423f53e7 | [
"MIT"
] | null | null | null | reveerse.py | shubhamkanungoo007/competitive-programming-solutions | 7a3429b6ffe0fd113de5188d5a85bc90423f53e7 | [
"MIT"
] | null | null | null | reveerse.py | shubhamkanungoo007/competitive-programming-solutions | 7a3429b6ffe0fd113de5188d5a85bc90423f53e7 | [
"MIT"
] | null | null | null | ls=[12,3,9,4,1]
a=len(ls)
l=[]
for i in reversed(range(a)):
l.append(ls[i])
print(l)
| 14 | 29 | 0.520408 | 22 | 98 | 2.318182 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.078947 | 0.22449 | 98 | 7 | 30 | 14 | 0.592105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6cddb035d7a92e680caf83b8691d6c972db8ac8e | 546 | py | Python | famille/utils/context_processors.py | huguesmayolle/famille | c7b3399e88a6922cadc0c7c9f2ff7447e7c95377 | [
"Apache-2.0"
] | null | null | null | famille/utils/context_processors.py | huguesmayolle/famille | c7b3399e88a6922cadc0c7c9f2ff7447e7c95377 | [
"Apache-2.0"
] | null | null | null | famille/utils/context_processors.py | huguesmayolle/famille | c7b3399e88a6922cadc0c7c9f2ff7447e7c95377 | [
"Apache-2.0"
] | null | null | null | from famille.models import has_user_related, get_user_related
from famille.utils import get_context
def related_user(request):
"""
A context processor that returns the related user from a request.user.
:param request: the request to be processed
"""
if not has_user_related(request.user):
return {}
return {"related_user": get_user_related(request.user)}
def base(request):
"""
Providing base variables.
:param request: the request to be processed
"""
return get_context()
| 22.75 | 74 | 0.684982 | 71 | 546 | 5.098592 | 0.380282 | 0.121547 | 0.077348 | 0.121547 | 0.19337 | 0.19337 | 0.19337 | 0 | 0 | 0 | 0 | 0 | 0.234432 | 546 | 23 | 75 | 23.73913 | 0.866029 | 0.3663 | 0 | 0 | 0 | 0 | 0.039088 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6ce44fe289733af4b0df3431d14b746a053fb375 | 1,424 | py | Python | demo/demo/py/t_cv_nspp.py | xiaoshangpeng/l_sdk_doc | 5f110aea2113b786d8d2536b430113f1c3649af1 | [
"Apache-2.0"
] | 2 | 2019-03-06T05:15:51.000Z | 2020-03-20T01:27:47.000Z | demo/demo/py/t_cv_nspp.py | lishaoliang/l_sdk_doc | c687d49bffdd1eaa234ac96cb1871e7dcf2fb2af | [
"Apache-2.0"
] | null | null | null | demo/demo/py/t_cv_nspp.py | lishaoliang/l_sdk_doc | c687d49bffdd1eaa234ac96cb1871e7dcf2fb2af | [
"Apache-2.0"
] | 3 | 2019-07-22T08:31:14.000Z | 2019-12-18T02:39:44.000Z | #!/usr/bin/python3
#-*-coding:utf-8-*-
"""
///////////////////////////////////////////////////////////////////////////
// Copyright(c) 2019, 武汉舜立软件, All Rights Reserved
// Created: 2019/06/19
//
/// @file t_cv_nspp.py
/// @brief NSPP私有协议获取视频流, opencv显示
/// 每帧视频图像, 可以使用 opencv, numpy 等做中间处理
/// @version 0.1
/// @author 李绍良
/// @see https://github.com/lishaoliang/l_sdk_doc
///////////////////////////////////////////////////////////////////////////
"""
# 添加基础搜索目录
import l_sdk
l_sdk.append_path()
import time
import target as tg
import cv2 as cv
# 流ID 0.主码流; 1.子码流
idx = 0
# 处理图像帧
def process(frame):
"""
使用cv处理帧
"""
#frame = cv.medianBlur(frame, 5) # 测试中值滤波
#frame = cv.bilateralFilter(frame, 5, 100, 3); # 测试双边滤波
return frame
with l_sdk.c(ip = tg.ip,
port = tg.port,
username = tg.username,
passwd = tg.passwd) as a:
# 登录
a.login()
a.open_stream(idx = idx)
# 创建窗口显示
wndname = 'opencv nspp demo'
cv.namedWindow(wndname, cv.WINDOW_NORMAL)
cv.resizeWindow(wndname, 960, 540)
# 循环显示每帧
while True:
try:
frame = a.get_stream(idx = idx)
frame = process(frame)
cv.imshow(wndname, frame)
except Exception as e:
pass
ch = cv.waitKey(1)
if 27 == ch :
break
# 销毁所有cv窗口
cv.destroyAllWindows()
| 19.777778 | 75 | 0.503511 | 165 | 1,424 | 4.284848 | 0.636364 | 0.022631 | 0.033946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033816 | 0.273174 | 1,424 | 71 | 76 | 20.056338 | 0.648309 | 0.141152 | 0 | 0 | 0 | 0 | 0.02097 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.071429 | 0.142857 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
6ce9e3d320672010c6669e3e454cd3141ada9d8f | 735 | py | Python | fboss/system_tests/TestServer.py | axbaretto/fboss | 3145bfb9747d5788c8bca1c43d031143c6f0a4b5 | [
"BSD-3-Clause"
] | 7 | 2016-09-22T22:53:09.000Z | 2016-10-05T13:19:16.000Z | fboss/system_tests/TestServer.py | axbaretto/fboss | 3145bfb9747d5788c8bca1c43d031143c6f0a4b5 | [
"BSD-3-Clause"
] | null | null | null | fboss/system_tests/TestServer.py | axbaretto/fboss | 3145bfb9747d5788c8bca1c43d031143c6f0a4b5 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from libfb.py.controller.base import BaseController
from fboss.system_tests.test import TestService
from openr.cli.transition_tmp.breeze_cli_utils import ip_addr_to_str
import os
class TestServer(BaseController, TestService.Iface):
SERVICE = TestService
def ping(self, ip):
str_ip = ip_addr_to_str(ip)
print(str_ip)
response = os.system("ping -c 1 " + str_ip)
if response == 0:
return True
else:
return False
def status(self):
return True
if __name__ == '__main__':
TestServer.initFromCLI()
| 24.5 | 68 | 0.714286 | 95 | 735 | 5.105263 | 0.526316 | 0.082474 | 0.131959 | 0.045361 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003503 | 0.223129 | 735 | 29 | 69 | 25.344828 | 0.845884 | 0 | 0 | 0.090909 | 0 | 0 | 0.02449 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.363636 | 0.045455 | 0.681818 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
6cf61474813421e848ec894d71860fa8d7985cef | 1,981 | py | Python | data/transcoder_evaluation_gfg/python/MINIMUM_SWAPS_REQUIRED_BRING_ELEMENTS_LESS_EQUAL_K_TOGETHER.py | mxl1n/CodeGen | e5101dd5c5e9c3720c70c80f78b18f13e118335a | [
"MIT"
] | 241 | 2021-07-20T08:35:20.000Z | 2022-03-31T02:39:08.000Z | data/transcoder_evaluation_gfg/python/MINIMUM_SWAPS_REQUIRED_BRING_ELEMENTS_LESS_EQUAL_K_TOGETHER.py | mxl1n/CodeGen | e5101dd5c5e9c3720c70c80f78b18f13e118335a | [
"MIT"
] | 49 | 2021-07-22T23:18:42.000Z | 2022-03-24T09:15:26.000Z | data/transcoder_evaluation_gfg/python/MINIMUM_SWAPS_REQUIRED_BRING_ELEMENTS_LESS_EQUAL_K_TOGETHER.py | mxl1n/CodeGen | e5101dd5c5e9c3720c70c80f78b18f13e118335a | [
"MIT"
] | 71 | 2021-07-21T05:17:52.000Z | 2022-03-29T23:49:28.000Z | # Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( arr , n , k ) :
count = 0
for i in range ( 0 , n ) :
if ( arr [ i ] <= k ) :
count = count + 1
bad = 0
for i in range ( 0 , count ) :
if ( arr [ i ] > k ) :
bad = bad + 1
ans = bad
j = count
for i in range ( 0 , n ) :
if ( j == n ) :
break
if ( arr [ i ] > k ) :
bad = bad - 1
if ( arr [ j ] > k ) :
bad = bad + 1
ans = min ( ans , bad )
j = j + 1
return ans
#TOFILL
if __name__ == '__main__':
param = [
([7, 12, 15, 30, 33, 34, 53, 66, 73, 74, 76, 77, 85, 90],9,8,),
([-62, -20, -26, -24, 92, 66, -74, -4, 18, -82, -36, 92, -4, 92, -80, 56, -24, 4, -48, -10, -14, -46, -16, -58, -58, -6, -68, -22, -82, -16, 76, -30, -86, -38, -66, 28, 58, 30, -44, -56],24,28,),
([0, 0, 0, 0, 0, 1, 1],5,6,),
([8, 48, 64, 77, 61, 60, 96, 95, 41, 68, 9, 67, 10, 66, 16, 59, 83, 21, 47, 16, 13, 85, 52, 11, 48, 31, 99, 57, 57, 44, 66, 93, 80, 69, 23, 2, 55, 90],36,24,),
([-80, -58, -40, -34, 14, 36, 48, 56, 58, 60, 84, 90, 92, 92],7,8,),
([1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1],26,23,),
([4, 4, 8, 9, 13, 17, 18, 19, 21, 22, 22, 23, 27, 28, 30, 44, 46, 48, 53, 53, 55, 60, 61, 62, 68, 70, 70, 71, 73, 80, 82, 82, 85, 88, 90, 93, 99],28,36,),
([-28, 50, 82, -32, 32, -78, 12, 50, 38, 34, -10, 6, 86, -56, -2],13,9,),
([0, 0, 0, 0, 1, 1, 1, 1, 1, 1],9,8,),
([37, 88, 83, 91, 11, 39, 98, 70, 93, 74, 24, 90, 66, 3, 6, 28],12,12,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param))) | 40.428571 | 199 | 0.44523 | 378 | 1,981 | 2.28836 | 0.354497 | 0.034682 | 0.034682 | 0.027746 | 0.150289 | 0.120231 | 0.090173 | 0.023121 | 0.023121 | 0.023121 | 0 | 0.319789 | 0.329127 | 1,981 | 49 | 200 | 40.428571 | 0.331076 | 0.093387 | 0 | 0.153846 | 0 | 0 | 0.013415 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025641 | false | 0 | 0 | 0 | 0.051282 | 0.025641 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6cf7345da10ea4a949f36a48ef5cb2ab2e3bb7f5 | 193 | py | Python | Python/leapyear.py | BiswajitDeb/My_all_programs | 4717cfc0b3b1aeda75f8eec0b7ff643e8556d262 | [
"Unlicense"
] | null | null | null | Python/leapyear.py | BiswajitDeb/My_all_programs | 4717cfc0b3b1aeda75f8eec0b7ff643e8556d262 | [
"Unlicense"
] | null | null | null | Python/leapyear.py | BiswajitDeb/My_all_programs | 4717cfc0b3b1aeda75f8eec0b7ff643e8556d262 | [
"Unlicense"
] | null | null | null | def is_leap(year):
leap = False
if year%400==0:
return True
if year%4==0 and year%100!=0:
return True
return leap
year = int(input())
print(is_leap(year)) | 16.083333 | 33 | 0.569948 | 31 | 193 | 3.483871 | 0.516129 | 0.222222 | 0.185185 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074627 | 0.305699 | 193 | 12 | 34 | 16.083333 | 0.731343 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0 | 0 | 0.444444 | 0.111111 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9f093ad1af3aaeb501797a39d3e4569e78f8af02 | 968 | py | Python | atividade1/questao9.py | AbraaoHonorio/PDI | 96cc0839c38b1b13989eca762f98c472ac11ed00 | [
"MIT"
] | 2 | 2018-07-05T00:46:21.000Z | 2018-07-09T15:14:57.000Z | atividade1/questao9.py | AbraaoHonorio/PDI | 96cc0839c38b1b13989eca762f98c472ac11ed00 | [
"MIT"
] | null | null | null | atividade1/questao9.py | AbraaoHonorio/PDI | 96cc0839c38b1b13989eca762f98c472ac11ed00 | [
"MIT"
] | null | null | null | from util import *
import numpy as np
import cv2
def questao9A(img):
'''
aplicar o filtro
0 -1 0
+ +
-1 5 -1
+ +
0 -1 0
'''
kernel = np.array(([0,-1,0],[-1, 5, -1],[ 0, -1, 0]))
return applyFilter3x3(img, kernel)
def questao9B(img):
'''
aplicar o filtro
0 0 0
+ +
0 1 0
+ +
0 0 -1
'''
kernel = np.array(([0, 0, 0], [0, 1, 0], [0, 0, -1]))
return applyFilter3x3(img, kernel)
def questao9ATest(img):
'''
Usa a funcao da propria biblioteca prar observar o resultado
'''
kernel = np.array(([0,-1,0],[-1, 5, -1],[ 0, -1, 0]))
resultImage = img # pegar o mesmo tamanho da imaem original
cv2.filter2D(src=img, ddepth=-1, kernel=kernel, dst=resultImage)
return resultImage
def questao9BTest(img):
kernel = np.array(([0, 0, 0], [0, 1, 0], [0, 0, -1]))
resultImage = img # pegar o mesmo tamanho da imaem original
cv2.filter2D(src=img, ddepth=-1, kernel=kernel, dst=resultImage)
return resultImage | 21.043478 | 65 | 0.596074 | 153 | 968 | 3.771242 | 0.27451 | 0.051993 | 0.046794 | 0.041594 | 0.7487 | 0.578856 | 0.578856 | 0.578856 | 0.578856 | 0.54766 | 0 | 0.092867 | 0.232438 | 968 | 46 | 66 | 21.043478 | 0.683715 | 0.353306 | 0 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0.157895 | 0 | 0.578947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
9f22453504bb54f13cc13f66c6c9f55f6453d74c | 833 | py | Python | learning_python/ch21/timeseqs_timer2.py | dantin/python-by-example | 5769c7a332ebd60fd54e477b6813f2f2a0f3f37f | [
"BSD-3-Clause"
] | null | null | null | learning_python/ch21/timeseqs_timer2.py | dantin/python-by-example | 5769c7a332ebd60fd54e477b6813f2f2a0f3f37f | [
"BSD-3-Clause"
] | null | null | null | learning_python/ch21/timeseqs_timer2.py | dantin/python-by-example | 5769c7a332ebd60fd54e477b6813f2f2a0f3f37f | [
"BSD-3-Clause"
] | null | null | null | #-*- coding: utf-8 -*-
"""Test the relative speed of iteration tool alternatives"""
import sys
import timer2
reps = 10000
repslist = list(range(reps))
def forLoop():
res = []
for x in repslist:
res.append(x + 10)
return res
def listComp():
return [x + 10 for x in repslist]
def mapCall():
return list(map((lambda x: x + 10), repslist))
def genExp():
return list(x + 10 for x in repslist)
def genFunc():
def gen():
for x in repslist:
yield x + 10
return list(gen())
if __name__ == '__main__':
print(sys.version)
for test in (forLoop, listComp, mapCall, genExp, genFunc):
(total, result) = timer2.bestoftotal(test, _reps1=5, _reps=1000)
print('%-9s: %.5f ==> [%s..%s]' %
(test.__name__, total, result[0], result[-1]))
| 18.511111 | 72 | 0.582233 | 112 | 833 | 4.205357 | 0.464286 | 0.031847 | 0.050955 | 0.118896 | 0.084926 | 0.084926 | 0.084926 | 0 | 0 | 0 | 0 | 0.045977 | 0.268908 | 833 | 44 | 73 | 18.931818 | 0.727422 | 0.091236 | 0 | 0.076923 | 0 | 0 | 0.041278 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.076923 | 0.115385 | 0.5 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
9f4b269e063867cb3ebcd17759660d88c6dc8f64 | 2,276 | py | Python | spoteno/steps/replacement.py | Z-80/spoteno | 5d2ae7da437cfd8f9cf351b9602269c115dcd46f | [
"MIT"
] | 2 | 2020-01-16T10:23:05.000Z | 2021-11-17T15:44:29.000Z | spoteno/steps/replacement.py | Z-80/spoteno | 5d2ae7da437cfd8f9cf351b9602269c115dcd46f | [
"MIT"
] | null | null | null | spoteno/steps/replacement.py | Z-80/spoteno | 5d2ae7da437cfd8f9cf351b9602269c115dcd46f | [
"MIT"
] | 2 | 2021-03-25T12:06:36.000Z | 2021-11-17T15:44:30.000Z | import re
class ReplaceChar:
def __init__(self, mapping):
self.mapping = mapping
def run(self, token):
for x, y in self.mapping.items():
token = token.replace(x, y)
return [token]
class ReplaceFull:
def __init__(self, mapping):
self.mapping = mapping
def run(self, token):
if token in self.mapping.keys():
return [self.mapping[token]]
return [token]
class ReplaceRegex:
def __init__(self, mapping):
self.mapping = mapping
def run(self, token):
for x, y in self.mapping.items():
token = re.sub(x, y, token)
return [token]
class ReplaceIfNotSurroundedByDigits:
"""
Replace char if a digit is not on both sides of the char.
"""
def __init__(self, chars):
self.chars = chars
self.digit_pattern = re.compile(r'^\d$')
def run(self, token):
for char, replace_with in self.chars.items():
# Find all occurences of char
matches = re.finditer(re.escape(char), token)
for m in matches:
left_match = self.digit_pattern.match(
token[m.start()-1: m.start()])
right_match = self.digit_pattern.match(
token[m.end(): m.end()+1])
# Replace if either no digit on the left or on the right
if left_match is None or right_match is None:
token = token[:m.start()] + replace_with + token[m.end():]
return [token]
class ReplaceIfNotPrecededByDigit:
"""
Replace char if no digit is in front of char.
"""
def __init__(self, chars):
self.chars = chars
self.digit_pattern = re.compile(r'^\d$')
def run(self, token):
for char, replace_with in self.chars.items():
# Find all occurences of char
matches = re.finditer(re.escape(char), token)
for m in matches:
left_match = self.digit_pattern.match(
token[m.start()-1: m.start()])
# Replace if no digit on the left
if left_match is None:
token = token[:m.start()] + replace_with + token[m.end():]
return [token]
| 25.288889 | 78 | 0.553163 | 285 | 2,276 | 4.294737 | 0.203509 | 0.089869 | 0.044935 | 0.061275 | 0.70915 | 0.664216 | 0.664216 | 0.638072 | 0.638072 | 0.638072 | 0 | 0.001997 | 0.34007 | 2,276 | 89 | 79 | 25.573034 | 0.812916 | 0.108524 | 0 | 0.72 | 0 | 0 | 0.00401 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.02 | 0 | 0.44 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9f62529f8128b6b921e47558ce93bd143c779036 | 590 | py | Python | _tests/vars.py | deuteronomy-works/Jeremiah | f72b45284ebde9493a87d16072520b8717152aeb | [
"MIT"
] | 1 | 2019-11-26T14:50:59.000Z | 2019-11-26T14:50:59.000Z | _tests/vars.py | deuteronomy-works/Jeremiah | f72b45284ebde9493a87d16072520b8717152aeb | [
"MIT"
] | 32 | 2019-11-26T13:24:56.000Z | 2019-12-06T20:19:45.000Z | _tests/vars.py | deuteronomy-works/Jeremiah | f72b45284ebde9493a87d16072520b8717152aeb | [
"MIT"
] | null | null | null | LOVE = 'this'
def lover():
"""
docstring
"""
print(LOVE)
def print_nothing():
"""
printing nothing
"""
print('nothing')
def print_nothings():
"""
printing nothings
"""
print('nothings')
class All():
"""
Some doc right here
"""
def __init__(self):
"""
Doctst
"""
self.hell = 'ok'
def alpha(self):
"""
Some doc right here
"""
print(self.hell)
def alphax(self):
"""
Some doc right heres
"""
print(self.hell)
| 13.409091 | 31 | 0.444068 | 54 | 590 | 4.740741 | 0.425926 | 0.082031 | 0.140625 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.410169 | 590 | 43 | 32 | 13.72093 | 0.735632 | 0.189831 | 0 | 0.142857 | 0 | 0 | 0.061947 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0 | 0.5 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
9f7ae0ac7056a261a0432319313701756b54a16b | 1,560 | py | Python | notification/serializers.py | ThusharaX/mumbleapi | 8435fe9d86869cce81961f42c9860fa3810c171b | [
"Apache-2.0"
] | 187 | 2021-04-24T14:49:44.000Z | 2022-03-31T14:25:22.000Z | notification/serializers.py | shukl08vk/mumbleapi | 101825d8aecba7eac4e31046e7b4b15b36c55f77 | [
"Apache-2.0"
] | 119 | 2021-04-24T18:08:43.000Z | 2022-01-09T00:57:19.000Z | notification/serializers.py | shukl08vk/mumbleapi | 101825d8aecba7eac4e31046e7b4b15b36c55f77 | [
"Apache-2.0"
] | 174 | 2021-04-24T15:57:23.000Z | 2022-03-11T02:09:04.000Z | from rest_framework import serializers
from .models import Notification
from users.serializers import UserProfileSerializer
from feed.serializers import MumbleSerializer
from article.serializers import ArticleSerializer
from discussion.serializers import DiscussionSerializer
class NotificationSerializer(serializers.ModelSerializer):
created_by = serializers.SerializerMethodField(read_only=True)
followed_by = serializers.SerializerMethodField(read_only=True)
mumble = serializers.SerializerMethodField(read_only=True)
article = serializers.SerializerMethodField(read_only=True)
discussion = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Notification
fields = '__all__'
def get_created_by(self, obj):
return UserProfileSerializer(obj.created_by.userprofile, many=False).data
def get_followed_by(self, obj):
if obj.notification_type == 'follow':
return UserProfileSerializer(obj.followed_by.userprofile, many=False).data
return None
def get_mumble(self, obj):
if obj.notification_type == 'mumble':
return MumbleSerializer(obj.mumble, many=False).data
return None
def get_article(self, obj):
if obj.notification_type == 'article':
return ArticleSerializer(obj.article, many=False).data
return None
def get_discussion(self, obj):
if obj.notification_type == 'discussion':
return DiscussionSerializer(obj.discussion, many=False).data
return None
| 37.142857 | 86 | 0.737179 | 165 | 1,560 | 6.818182 | 0.260606 | 0.142222 | 0.16 | 0.177778 | 0.431111 | 0.258667 | 0.077333 | 0 | 0 | 0 | 0 | 0 | 0.189103 | 1,560 | 41 | 87 | 38.04878 | 0.889328 | 0 | 0 | 0.121212 | 0 | 0 | 0.023077 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.151515 | false | 0 | 0.181818 | 0.030303 | 0.818182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
9f7ae861999c2a8c42390a157e32278aae1639e3 | 630 | py | Python | oops_fhir/r4/value_set/v3_query_status_code.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/value_set/v3_query_status_code.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/value_set/v3_query_status_code.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | from pathlib import Path
from fhir.resources.valueset import ValueSet as _ValueSet
from oops_fhir.utils import ValueSet
from oops_fhir.r4.code_system.v3_query_status_code import (
v3QueryStatusCode as v3QueryStatusCode_,
)
__all__ = ["v3QueryStatusCode"]
_resource = _ValueSet.parse_file(Path(__file__).with_suffix(".json"))
class v3QueryStatusCode(v3QueryStatusCode_):
"""
v3 Code System QueryStatusCode
A code specifying the state of the Query.
Status: active - Version: 2018-08-12
http://terminology.hl7.org/ValueSet/v3-QueryStatusCode
"""
class Meta:
resource = _resource
| 20.322581 | 69 | 0.746032 | 75 | 630 | 5.973333 | 0.546667 | 0.0625 | 0.071429 | 0.089286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034549 | 0.173016 | 630 | 30 | 70 | 21 | 0.825336 | 0.266667 | 0 | 0 | 0 | 0 | 0.050691 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.363636 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
9f7ecd588a6c9c87f31a63286985dd3a7530b9ee | 1,316 | py | Python | keyboards/shop.py | roomdie/KingsEmpiresBot | 6de6d033318fef1d243dc3347d91b67e84ee285a | [
"MIT"
] | null | null | null | keyboards/shop.py | roomdie/KingsEmpiresBot | 6de6d033318fef1d243dc3347d91b67e84ee285a | [
"MIT"
] | null | null | null | keyboards/shop.py | roomdie/KingsEmpiresBot | 6de6d033318fef1d243dc3347d91b67e84ee285a | [
"MIT"
] | null | null | null | from aiogram import types
kb_shop = types.InlineKeyboardMarkup()
btn_items = types.InlineKeyboardButton(
text="🗃 Сундуки", callback_data="shop_chest"
)
btn_money = types.InlineKeyboardButton(
text="💰 Монеты", callback_data="shop_money"
)
btn_stock = types.InlineKeyboardButton(
text="⚒ Ресурсы", callback_data="shop_stock"
)
btn_donate = types.InlineKeyboardButton(
text="💎 Донат", callback_data="shop_donate"
)
kb_shop.add(btn_items)
kb_shop.row(btn_money, btn_stock)
kb_shop.add(btn_donate)
btn_back = types.InlineKeyboardButton(
text="назад", callback_data="back_shop"
)
kb_donate = types.InlineKeyboardMarkup()
btn_go_to_donate = types.InlineKeyboardButton(
text="купить 💎", url="https://t.me/KingsEmpiresDonateBot"
)
kb_donate.add(btn_go_to_donate)
kb_donate.add(btn_back)
kb_buy_chest = types.InlineKeyboardMarkup()
btn_buy_chest = types.InlineKeyboardButton(
text="🔑 открыть", callback_data="buy_chest"
)
btn_back_chest = types.InlineKeyboardButton(
text="назад", callback_data="back_chest"
)
kb_buy_chest.add(btn_buy_chest)
kb_buy_chest.add(btn_back_chest)
kb_url_private_chat = types.InlineKeyboardMarkup()
btn_url_private_chat = types.InlineKeyboardButton(
text="перейти", url="https://t.me/KingsEmpiresBot"
)
kb_url_private_chat.add(btn_url_private_chat) | 23.927273 | 61 | 0.778875 | 179 | 1,316 | 5.407821 | 0.24581 | 0.232438 | 0.269628 | 0.072314 | 0.146694 | 0.146694 | 0.103306 | 0 | 0 | 0 | 0 | 0 | 0.102584 | 1,316 | 55 | 62 | 23.927273 | 0.814564 | 0 | 0 | 0 | 0 | 0 | 0.150342 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.025 | 0 | 0.025 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9f902222f5bf5775ab73afb0aa80aa5aca016103 | 318 | py | Python | DataStructures/Stacks/stack_start.py | mithunpal989/pystart | 8551b91636dcb113c4de3e715e458bd51db8801f | [
"MIT"
] | null | null | null | DataStructures/Stacks/stack_start.py | mithunpal989/pystart | 8551b91636dcb113c4de3e715e458bd51db8801f | [
"MIT"
] | null | null | null | DataStructures/Stacks/stack_start.py | mithunpal989/pystart | 8551b91636dcb113c4de3e715e458bd51db8801f | [
"MIT"
] | null | null | null | # try out the Python stack functions
# TODO: create a new empty stack
stack = []
# TODO: push items onto the stack
stack.append(1)
stack.append(2)
stack.append(3)
stack.append(4)
# TODO: print the stack contents
print(stack)
#TODO: pop an item off the stack
x = stack.pop()
print(x)
print(stack)
| 16.736842 | 37 | 0.676101 | 52 | 318 | 4.134615 | 0.5 | 0.204651 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016064 | 0.216981 | 318 | 18 | 38 | 17.666667 | 0.84739 | 0.5 | 0 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.055556 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9f925e6b744ad82229b1420236c113e20272adc2 | 393 | py | Python | options.py | HrPedersen/anderson-lake-python | 141d25fd4b0b23cd4a288cfb9d786307da4ab292 | [
"MIT"
] | 8 | 2020-04-02T16:04:21.000Z | 2022-01-07T22:48:35.000Z | anderson_lake/options.py | tcpedersen/ANNOptionPricingAndCalibration | 9db7b6ccd4609f399aa817869b41104eac488e88 | [
"MIT"
] | null | null | null | anderson_lake/options.py | tcpedersen/ANNOptionPricingAndCalibration | 9db7b6ccd4609f399aa817869b41104eac488e88 | [
"MIT"
] | 3 | 2020-07-17T05:17:54.000Z | 2020-11-19T09:49:52.000Z | '# -*- coding: utf-8 -*-'
import numpy as np
class EuropeanCallOption:
def __init__(self, tau, strike):
self.tau = tau
self.strike = strike
def __call__(self, forward):
return np.maximum(forward - self.strike, 0)
def __str__(self):
out_str = f"tau: {self.tau}\n\r" +\
f"strike: {self.strike}\n\r"
return out_str
| 24.5625 | 51 | 0.557252 | 51 | 393 | 4.019608 | 0.470588 | 0.102439 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007353 | 0.307888 | 393 | 15 | 52 | 26.2 | 0.746324 | 0.058524 | 0 | 0 | 0 | 0 | 0.170483 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.083333 | 0.083333 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
9fb335a53c3044bf96a366fe93b6ebdd81e28a4a | 2,121 | py | Python | eta/modules/video_stream_info.py | ErfanTagh/eta | 3aa51006439a89cc5e2c78bbe1f98234bbc347ea | [
"Apache-2.0"
] | 25 | 2018-07-21T02:37:34.000Z | 2022-03-30T12:57:54.000Z | eta/modules/video_stream_info.py | ErfanTagh/eta | 3aa51006439a89cc5e2c78bbe1f98234bbc347ea | [
"Apache-2.0"
] | 183 | 2018-06-13T18:57:00.000Z | 2022-02-24T14:35:49.000Z | eta/modules/video_stream_info.py | ErfanTagh/eta | 3aa51006439a89cc5e2c78bbe1f98234bbc347ea | [
"Apache-2.0"
] | 13 | 2018-09-10T18:46:58.000Z | 2022-02-07T02:25:31.000Z | #!/usr/bin/env python
"""
A module for getting the stream info for a video.
Info:
type: eta.core.types.Module
version: 0.1.0
Copyright 2017-2021, Voxel51, Inc.
voxel51.com
"""
# pragma pylint: disable=redefined-builtin
# pragma pylint: disable=unused-wildcard-import
# pragma pylint: disable=wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import *
# pragma pylint: enable=redefined-builtin
# pragma pylint: enable=unused-wildcard-import
# pragma pylint: enable=wildcard-import
import logging
import sys
from eta.core.config import Config
import eta.core.module as etam
import eta.core.video as etav
logger = logging.getLogger(__name__)
class ModuleConfig(etam.BaseModuleConfig):
"""Module configuration settings.
Attributes:
data (DataConfig)
"""
def __init__(self, d):
super(ModuleConfig, self).__init__(d)
self.data = self.parse_object_array(d, "data", DataConfig)
class DataConfig(Config):
"""Data configuration settings.
Inputs:
video (eta.core.types.Video): The input video
Outputs:
stream_info (eta.core.types.VideoStreamInfo): The video stream info
"""
def __init__(self, d):
self.video = self.parse_string(d, "video")
self.stream_info = self.parse_string(d, "stream_info")
def _video_stream_info(config):
for data in config.data:
logger.info("Reading stream info for %s", data.video)
vsi = etav.VideoStreamInfo.build_for(data.video)
vsi.write_json(data.stream_info)
def run(config_path, pipeline_config_path=None):
"""Run the video_stream_info module.
Args:
config_path: path to a ModuleConfig file
pipeline_config_path: optional path to a PipelineConfig file
"""
config = ModuleConfig.from_json(config_path)
etam.setup(config, pipeline_config_path=pipeline_config_path)
_video_stream_info(config)
if __name__ == "__main__":
run(*sys.argv[1:]) # pylint: disable=no-value-for-parameter
| 25.25 | 75 | 0.721829 | 280 | 2,121 | 5.207143 | 0.342857 | 0.068587 | 0.043896 | 0.038409 | 0.082305 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009211 | 0.181047 | 2,121 | 83 | 76 | 25.554217 | 0.830167 | 0.401226 | 0 | 0.066667 | 0 | 0 | 0.045416 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.333333 | 0 | 0.533333 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
9fbf7d357bbf69ce07d8009febcd7e8bb70c0382 | 1,627 | py | Python | electionnight/admin/page_content.py | The-Politico/politico-civic-election-night | a8aaf5be43872a7b84d2b0d7c2b6151d32d4d8b6 | [
"MIT"
] | null | null | null | electionnight/admin/page_content.py | The-Politico/politico-civic-election-night | a8aaf5be43872a7b84d2b0d7c2b6151d32d4d8b6 | [
"MIT"
] | 55 | 2018-03-19T20:56:04.000Z | 2018-10-10T21:28:26.000Z | electionnight/admin/page_content.py | The-Politico/politico-civic-election-night | a8aaf5be43872a7b84d2b0d7c2b6151d32d4d8b6 | [
"MIT"
] | null | null | null | from django import forms
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from election.models import ElectionDay
from electionnight.models import PageContentBlock
class BlockAdminForm(forms.ModelForm):
# content = forms.CharField(widget=CKEditorWidget()) # TODO: To markdown
class Meta:
model = PageContentBlock
fields = ("content_type", "content")
class PageContentBlockInline(admin.StackedInline):
model = PageContentBlock
extra = 0
form = BlockAdminForm
class ElectionDayFilter(admin.SimpleListFilter):
title = _("Election date")
parameter_name = "date"
def lookups(self, request, model_admin):
return (
(e.date, e.date.strftime("%Y-%m-%d"))
for e in ElectionDay.objects.all()
)
def queryset(self, request, queryset):
if self.value() == "":
return queryset
return queryset.filter(election_day__date=self.value())
class PageContentAdmin(admin.ModelAdmin):
inlines = [PageContentBlockInline]
list_filter = ("content_type", ElectionDayFilter)
list_display = ("page_location",)
search_fields = ("page_location",)
filter_horizontal = ("featured",)
# actions = None
readonly_fields = (
"election_day",
"page_location",
"content_object",
"division",
)
fieldsets = (
(None, {"fields": ("page_location",)}),
(
"Page Meta",
{"fields": ("election_day", "content_object", "division")},
),
("Relationships", {"fields": ("featured",)}),
)
| 27.576271 | 76 | 0.637369 | 155 | 1,627 | 6.541935 | 0.496774 | 0.047337 | 0.035503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00081 | 0.241549 | 1,627 | 58 | 77 | 28.051724 | 0.820908 | 0.051629 | 0 | 0.044444 | 0 | 0 | 0.150747 | 0 | 0 | 0 | 0 | 0.017241 | 0 | 1 | 0.044444 | false | 0 | 0.111111 | 0.022222 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
4c80d9b19b5b249d3eba719cc5bbfc65a7251f09 | 1,225 | py | Python | Calc_K-Means.py | leenakhote/visualSearchFlask | 5ab666335e1176dcf82a863f72760bb0ce1b840c | [
"Apache-2.0"
] | null | null | null | Calc_K-Means.py | leenakhote/visualSearchFlask | 5ab666335e1176dcf82a863f72760bb0ce1b840c | [
"Apache-2.0"
] | null | null | null | Calc_K-Means.py | leenakhote/visualSearchFlask | 5ab666335e1176dcf82a863f72760bb0ce1b840c | [
"Apache-2.0"
] | null | null | null | from pyspark import SparkConf, SparkContext #from pacakge import classes/function
sc = SparkContext.getOrCreate()
from numpy import array
from math import sqrt
from pyspark.ml.linalg import DenseVector, SparseVector, Vectors, VectorUDT
from pyspark.mllib.linalg import Vector as MLLibVector, Vectors as MLLibVectors
from pyspark.mllib.clustering import KMeans, KMeansModel
data = sc.textFile("/home/siddhesh/train_no_1.csv")
parsed_data = data.map(lambda x : x.split("/n")).flatMap(lambda words : (word.split(",") for word in words))\
.map(lambda x : [elem.strip('"') for elem in x]).map(lambda x: (MLLibVectors.dense(x[3:])))
print parsed_data
# Build the model (cluster the data)
clusters = KMeans.train(parsed_data, 2, maxIterations=20, initializationMode="random")
# Evaluate clustering by computing Within Set Sum of Squared Errors
def error(point):
# print "PREDICT :",clusters.predict(point)
center = clusters.centers[clusters.predict(point)]
return sqrt(sum([x**2 for x in (point - center)]))
#
WSSSE = parsed_data.map(lambda point: error(point)).collect() #.reduce(lambda x, y: x + y)
# print("Within Set Sum of Squared Error = " + str(WSSSE))
# print clusters.centers
| 35 | 112 | 0.72898 | 171 | 1,225 | 5.187135 | 0.48538 | 0.049605 | 0.033822 | 0.031567 | 0.047351 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005758 | 0.149388 | 1,225 | 34 | 113 | 36.029412 | 0.845489 | 0.233469 | 0 | 0 | 0 | 0 | 0.04189 | 0.031149 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.375 | null | null | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
4c827f1c4198d9cec8e9bb450f5664a4c6fc6fee | 4,486 | py | Python | tests/unit/wall-e/models/test_canvas.py | dbwebb-se/umbridge | 76dd1a15f2f481c1fc3819990ab41e20ca65afe3 | [
"MIT"
] | null | null | null | tests/unit/wall-e/models/test_canvas.py | dbwebb-se/umbridge | 76dd1a15f2f481c1fc3819990ab41e20ca65afe3 | [
"MIT"
] | 12 | 2021-09-07T12:11:31.000Z | 2022-03-22T10:05:03.000Z | tests/unit/wall-e/models/test_canvas.py | dbwebb-se/umbridge | 76dd1a15f2f481c1fc3819990ab41e20ca65afe3 | [
"MIT"
] | null | null | null | """
Contains tests for app.wall_e.models.canvas_api.Canvas class
"""
# pylint: disable=unused-argument, disable=protected-access
from unittest import mock
import pytest
from tests.mock.mock_requester import get_mocked_canvas_set_response
@mock.patch('app.wall_e.models.requester.Requester._base_request')
def test_init(mock_base_req, test_app):
"""
Tests that the courses user are formatted properly.
"""
mock_value = [
{ 'login_id': 'moc@bth.se', 'id': 0 }
]
c = get_mocked_canvas_set_response(mock_base_req, mock_value)
assert mock_base_req.call_count == 2
assert c.users == mock_value
assert c.assignments == mock_value
assert c.course_id == 1
assert c._course_name == 'name'
assert c._config is not None
@mock.patch('app.wall_e.models.requester.Requester._base_request')
def test_users_and_acronyms(mock_base_req, test_app):
"""
Tests that the courses user are formatted properly.
"""
c = get_mocked_canvas_set_response(mock_base_req, [
{ 'login_id': 'moc@bth.se', 'id': 0 },
{ 'login_id': 'mabn17@bth.se', 'id': 1 }
])
mock_base_req.reset_mock()
assert c.users_and_acronyms() == { 0: 'moc', 1: 'mabn17' }
assert mock_base_req.call_count == 0
c = get_mocked_canvas_set_response(mock_base_req, [])
assert c.users_and_acronyms() == {}
@mock.patch('app.wall_e.models.requester.Requester._base_request')
def test_get_user_by_acronym(mock_base_req, test_app):
"""
Tests that the courses user are formatted properly.
"""
c = get_mocked_canvas_set_response(mock_base_req, [
{ 'login_id': 'moc@bth.se', 'id': 0 },
{ 'login_id': 'mabn17@student.bth.se', 'id': 1 }
])
mock_base_req.reset_mock()
assert c.get_user_by_acronym('moc') == { 'login_id': 'moc@bth.se', 'id': 0 }
assert c.get_user_by_acronym('mabn17') == { 'login_id': 'mabn17@student.bth.se', 'id': 1 }
assert mock_base_req.call_count == 0
c = get_mocked_canvas_set_response(mock_base_req, [])
with pytest.raises(IndexError, match=r"out of range"):
c.get_user_by_acronym('aar')
@mock.patch('app.wall_e.models.requester.Requester._base_request')
def test_get_assignment_by_name(mock_base_req, test_app):
""" Tests to get an assignment object by name key """
c = get_mocked_canvas_set_response(mock_base_req, [
{'id': 1, 'name': 'kmom01'},
{'id': 2, 'name': 'kmom02'},
{'id': 3, 'name': 'kmom03'}
])
mock_base_req.reset_mock()
assert c.get_assignment_by_name('kmom03') == { 'id': 3, 'name': 'kmom03' }
assert c.get_assignment_by_name('kmom01') == { 'id': 1, 'name': 'kmom01' }
assert c.get_assignment_by_name('kmom02') == { 'id': 2, 'name': 'kmom02' }
with pytest.raises(IndexError, match=r"out of range"):
c.get_assignment_by_name('kmom10')
assert mock_base_req.call_count == 0
@mock.patch('app.wall_e.models.requester.Requester._base_request')
def test_get_assignment_name_by_id(mock_base_req, test_app):
""" Tests to get an assignment name by its id """
c = get_mocked_canvas_set_response(mock_base_req, [
{'id': 1, 'name': 'kmom01'},
{'id': 2, 'name': 'kmom02'},
{'id': 3, 'name': 'kmom03'}
])
mock_base_req.reset_mock()
assert c.get_assignment_name_by_id(1) == 'kmom01'
assert c.get_assignment_name_by_id(3) == 'kmom03'
assert c.get_assignment_name_by_id(2) == 'kmom02'
with pytest.raises(IndexError, match=r"out of range"):
c.get_assignment_name_by_id(10)
assert mock_base_req.call_count == 0
@mock.patch('app.wall_e.models.requester.Requester._base_request')
def test_get_gradeable_submissions(mock_base_req, test_app):
""" Tests if gradable submissions filters the correct values """
mock_values = [
{'id': 1, 'assignment_id': 1, 'name': 'kmom01'},
{'id': 2, 'assignment_id': 2, 'name': 'kmom02'},
{'id': 3, 'assignment_id': 3, 'name': 'kmom03'}
]
c = get_mocked_canvas_set_response(mock_base_req, mock_values)
mock_base_req.reset_mock()
c._config = { 'default': { 'ignore_assignments': [] } }
assert c.get_gradeable_submissions() == mock_values
assert mock_base_req.call_count == 1
c._config = { 'default': { 'ignore_assignments': [ 'kmom02' ] } }
assert c.get_gradeable_submissions() == [
{'id': 1, 'assignment_id': 1, 'name': 'kmom01'},
{'id': 3, 'assignment_id': 3, 'name': 'kmom03'}
]
| 32.744526 | 94 | 0.66206 | 647 | 4,486 | 4.256569 | 0.151468 | 0.072622 | 0.099855 | 0.058824 | 0.806826 | 0.722585 | 0.643791 | 0.589688 | 0.555556 | 0.555556 | 0 | 0.025768 | 0.186803 | 4,486 | 136 | 95 | 32.985294 | 0.729167 | 0.094293 | 0 | 0.47619 | 0 | 0 | 0.216182 | 0.087174 | 0 | 0 | 0 | 0 | 0.27381 | 1 | 0.071429 | false | 0 | 0.035714 | 0 | 0.107143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4ca67912af5653e709401aee9a4326a25e334fd4 | 1,119 | py | Python | apps/molecular_generation/JT_VAE/src/utils.py | agave233/PaddleHelix | e5578f72c2a203a27d9df7da111f1ced826c1429 | [
"Apache-2.0"
] | 454 | 2020-11-21T01:02:45.000Z | 2022-03-29T12:53:40.000Z | apps/molecular_generation/JT_VAE/src/utils.py | chupvl/PaddleHelix | 6e082f89b8090c3c360593d40a08bffc884165dd | [
"Apache-2.0"
] | 161 | 2020-12-12T06:35:54.000Z | 2022-03-27T11:31:13.000Z | apps/molecular_generation/JT_VAE/src/utils.py | chupvl/PaddleHelix | 6e082f89b8090c3c360593d40a08bffc884165dd | [
"Apache-2.0"
] | 108 | 2020-12-07T09:01:10.000Z | 2022-03-31T14:42:29.000Z | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""general utils"""
import json
def read_file(path):
"""Return a file into list"""
with open(path, 'r') as f:
data = f.read().splitlines()
return data
def load_json_config(path):
"""Reutrn a json file into dict"""
with open(path, 'r') as f:
data = json.load(f)
return data
def onek_encoding_unk(x, allowable_set):
"""One-hot embedding"""
if x not in allowable_set:
x = allowable_set[-1]
return list(map(lambda s: int(x == s), allowable_set)) | 31.083333 | 74 | 0.693476 | 172 | 1,119 | 4.459302 | 0.587209 | 0.078227 | 0.033898 | 0.041721 | 0.052151 | 0.052151 | 0.052151 | 0 | 0 | 0 | 0 | 0.010112 | 0.204647 | 1,119 | 36 | 75 | 31.083333 | 0.851685 | 0.599643 | 0 | 0.307692 | 0 | 0 | 0.004808 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.076923 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
4cb01fee0a527d341eb8da1c1ac1ea103d2904d9 | 205 | py | Python | lesson12/mysql_select_test.py | drednout/letspython | 9747442d63873b5f71e2c15ed5528bd98ad5ac31 | [
"BSD-2-Clause"
] | 1 | 2015-11-26T15:53:58.000Z | 2015-11-26T15:53:58.000Z | lesson12/mysql_select_test.py | drednout/letspython | 9747442d63873b5f71e2c15ed5528bd98ad5ac31 | [
"BSD-2-Clause"
] | null | null | null | lesson12/mysql_select_test.py | drednout/letspython | 9747442d63873b5f71e2c15ed5528bd98ad5ac31 | [
"BSD-2-Clause"
] | null | null | null | import MySQLdb
con = MySQLdb.connect('localhost', 'root', 'root', 'lesson12');
with con:
cur = con.cursor()
cur.execute("SELECT * FROM players")
for row in cur.fetchall():
print(row)
| 20.5 | 63 | 0.62439 | 26 | 205 | 4.923077 | 0.730769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012422 | 0.214634 | 205 | 9 | 64 | 22.777778 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0.22439 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0.142857 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4cb9f704b90938663901e82123c3f1fce9122ee6 | 259 | py | Python | python/isogram/isogram.py | danielsunzhongyuan/exercism | 5ec68fe3ede6e037c4c493b32e88265f81528aec | [
"Apache-2.0"
] | null | null | null | python/isogram/isogram.py | danielsunzhongyuan/exercism | 5ec68fe3ede6e037c4c493b32e88265f81528aec | [
"Apache-2.0"
] | null | null | null | python/isogram/isogram.py | danielsunzhongyuan/exercism | 5ec68fe3ede6e037c4c493b32e88265f81528aec | [
"Apache-2.0"
] | null | null | null | def is_isogram(word):
word_dict = dict()
for c in word:
c = c.lower()
if ord('a') <= ord(c) <= ord('z'):
word_dict[c] = word_dict.get(c, 0) + 1
if word_dict[c] >= 2:
return False
return True
| 25.9 | 50 | 0.46332 | 39 | 259 | 2.948718 | 0.512821 | 0.278261 | 0.156522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018868 | 0.3861 | 259 | 9 | 51 | 28.777778 | 0.704403 | 0 | 0 | 0 | 0 | 0 | 0.007722 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4cbcf41c49a14f9da89fc82283cfc95d4f536979 | 1,604 | py | Python | plugins/jsmin.py | magentix/magentix.github.io | c1096c791acb04ccd857bef30d71839ad8328b5d | [
"BSD-2-Clause"
] | null | null | null | plugins/jsmin.py | magentix/magentix.github.io | c1096c791acb04ccd857bef30d71839ad8328b5d | [
"BSD-2-Clause"
] | null | null | null | plugins/jsmin.py | magentix/magentix.github.io | c1096c791acb04ccd857bef30d71839ad8328b5d | [
"BSD-2-Clause"
] | null | null | null | """
Copyright (c) 2022, Magentix
This code is licensed under simplified BSD license (see LICENSE for details)
StaPy JsMin Plugin - Version 1.0.0
Requirements:
- jsmin
"""
from pathlib import Path
import jsmin
import os
def file_content_opened(content, args: dict) -> str:
if _get_file_extension(args['path']) != 'js':
return content
return jsmin.jsmin(content.decode()).encode()
def file_copy_before(source: str, args: dict) -> str:
if _get_file_extension(source) != 'js':
return source
return _get_min_file(source, jsmin.jsmin(_get_file_content(source)))
def _get_min_file(source: str, content: str) -> str:
file = open(_get_tmp_file_path(source), 'w', encoding='utf-8')
file.write(content)
file.close()
return _get_tmp_file_path(source)
def _get_tmp_file_path(source: str) -> str:
name = os.path.normpath(_get_current_directory() + '/../tmp/' + os.path.basename(source) + '.min')
_create_directory(name)
return name
def _create_directory(path) -> None:
if _get_file_extension(path):
path = os.path.dirname(path)
Path(os.path.normpath(path)).mkdir(parents=True, exist_ok=True)
def _get_current_directory() -> str:
return os.path.dirname(os.path.realpath(__file__))
def _get_file_content(source: str) -> str:
file = open(os.path.normpath(source), 'r', encoding="utf-8")
content = file.read()
file.close()
return content
def _get_file_extension(file: str) -> str:
name, extension = os.path.splitext(file)
if not extension:
extension = ''
return extension.replace('.', '')
| 26.295082 | 102 | 0.688279 | 224 | 1,604 | 4.683036 | 0.321429 | 0.045758 | 0.06101 | 0.051478 | 0.112488 | 0.055291 | 0.055291 | 0 | 0 | 0 | 0 | 0.006808 | 0.17581 | 1,604 | 60 | 103 | 26.733333 | 0.786687 | 0.101621 | 0 | 0.111111 | 0 | 0 | 0.023029 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.083333 | 0.027778 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
4cc8314facfcab335b1d75fc23696cd6781e2aec | 284 | py | Python | exa/_version.py | herbertludowieg/exa | 62a22eb47f18d468c5e1ae43f96e27bd60ec3e57 | [
"Apache-2.0"
] | 2 | 2019-11-30T09:58:12.000Z | 2020-01-28T13:32:40.000Z | exa/_version.py | herbertludowieg/exa | 62a22eb47f18d468c5e1ae43f96e27bd60ec3e57 | [
"Apache-2.0"
] | 108 | 2016-01-08T18:42:50.000Z | 2021-04-10T16:36:30.000Z | exa/_version.py | herbertludowieg/exa | 62a22eb47f18d468c5e1ae43f96e27bd60ec3e57 | [
"Apache-2.0"
] | 11 | 2016-01-08T17:11:52.000Z | 2020-07-04T12:30:22.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2020, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
import os
with open(os.path.abspath(os.path.join(os.path.dirname(__file__), "static", "version.txt"))) as f:
__version__ = f.read().strip()
| 31.555556 | 98 | 0.700704 | 44 | 284 | 4.340909 | 0.818182 | 0.094241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045082 | 0.140845 | 284 | 8 | 99 | 35.5 | 0.737705 | 0.461268 | 0 | 0 | 0 | 0 | 0.114094 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
4cd6a5c091b7021909e9aa730ab662c4644895aa | 569 | py | Python | docs/example/yaml-example/modal.py | connordelacruz/webdriver-test-tools | fe6906839e4423562c6d4d0aa6b10b2ea90bff6b | [
"MIT"
] | 5 | 2018-07-02T13:18:59.000Z | 2019-10-14T04:55:31.000Z | docs/example/yaml-example/modal.py | connordelacruz/webdriver-test-tools | fe6906839e4423562c6d4d0aa6b10b2ea90bff6b | [
"MIT"
] | 1 | 2019-10-16T20:54:25.000Z | 2019-10-16T20:54:25.000Z | docs/example/yaml-example/modal.py | connordelacruz/webdriver-test-tools | fe6906839e4423562c6d4d0aa6b10b2ea90bff6b | [
"MIT"
] | 1 | 2019-09-03T05:29:41.000Z | 2019-09-03T05:29:41.000Z | import os
from selenium.webdriver.common.by import By
from webdriver_test_tools.pageobject import *
from webdriver_test_tools.webdriver import actions, locate
class ExampleModal(prototypes.ModalObject):
"""YAML ModalObject example"""
# Path to YAML file representing the object
YAML_FILE = os.path.join(os.path.dirname(__file__), 'modal.yml')
# (Optional) Page object of the contents of the modal body. If set to a
# subclass of BasePage, the get_modal_body() method will return an instance
# of the page object
MODAL_BODY_CLASS = None
| 31.611111 | 79 | 0.753954 | 82 | 569 | 5.073171 | 0.54878 | 0.036058 | 0.081731 | 0.105769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173989 | 569 | 17 | 80 | 33.470588 | 0.885106 | 0.404218 | 0 | 0 | 0 | 0 | 0.027273 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.571429 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
4ce539c5c020f55af3832858c7c28118a24947a5 | 7,156 | py | Python | pysnmp/HM2-TRAFFICMGMT-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/HM2-TRAFFICMGMT-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/HM2-TRAFFICMGMT-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module HM2-TRAFFICMGMT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HM2-TRAFFICMGMT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:19:47 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint")
hm2ConfigurationMibs, HmEnabledStatus = mibBuilder.importSymbols("HM2-TC-MIB", "hm2ConfigurationMibs", "HmEnabledStatus")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
iso, Counter32, IpAddress, TimeTicks, Gauge32, Counter64, ModuleIdentity, Bits, MibIdentifier, Integer32, NotificationType, Unsigned32, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "Counter32", "IpAddress", "TimeTicks", "Gauge32", "Counter64", "ModuleIdentity", "Bits", "MibIdentifier", "Integer32", "NotificationType", "Unsigned32", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
hm2TrafficMgmtMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 248, 11, 31))
hm2TrafficMgmtMib.setRevisions(('2011-03-16 00:00',))
if mibBuilder.loadTexts: hm2TrafficMgmtMib.setLastUpdated('201103160000Z')
if mibBuilder.loadTexts: hm2TrafficMgmtMib.setOrganization('Hirschmann Automation and Control GmbH')
hm2TrafficMgmtMibNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 31, 0))
hm2TrafficMgmtMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 31, 1))
hm2TrafficMgmtIfTable = MibTable((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1), )
if mibBuilder.loadTexts: hm2TrafficMgmtIfTable.setStatus('current')
hm2TrafficMgmtIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: hm2TrafficMgmtIfEntry.setStatus('current')
hm2TrafficMgmtIfFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 1), HmEnabledStatus().clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfFlowControl.setStatus('current')
hm2TrafficMgmtIfEgressShapingRate = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 2), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfEgressShapingRate.setStatus('current')
hm2TrafficMgmtIfEgressShapingRateUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("percent", 1), ("kbps", 2))).clone('percent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: hm2TrafficMgmtIfEgressShapingRateUnit.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlThresholdUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("percent", 1), ("pps", 2))).clone('percent')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlThresholdUnit.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlBcastMode = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 5), HmEnabledStatus().clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlBcastMode.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlBcastThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 14880000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlBcastThreshold.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlMcastMode = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 7), HmEnabledStatus().clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlMcastMode.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlMcastThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 14880000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlMcastThreshold.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlUcastMode = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 9), HmEnabledStatus().clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlUcastMode.setStatus('current')
hm2TrafficMgmtIfIngressStormCtlUcastThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 1, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 14880000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtIfIngressStormCtlUcastThreshold.setStatus('current')
hm2TrafficMgmtFlowControl = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 2), HmEnabledStatus().clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2TrafficMgmtFlowControl.setStatus('current')
hm2TrafficMgmtIngressStormBucketType = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 31, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("single-bucket", 1), ("multi-bucket", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hm2TrafficMgmtIngressStormBucketType.setStatus('current')
mibBuilder.exportSymbols("HM2-TRAFFICMGMT-MIB", hm2TrafficMgmtIfIngressStormCtlBcastThreshold=hm2TrafficMgmtIfIngressStormCtlBcastThreshold, PYSNMP_MODULE_ID=hm2TrafficMgmtMib, hm2TrafficMgmtIngressStormBucketType=hm2TrafficMgmtIngressStormBucketType, hm2TrafficMgmtIfEgressShapingRateUnit=hm2TrafficMgmtIfEgressShapingRateUnit, hm2TrafficMgmtMibNotifications=hm2TrafficMgmtMibNotifications, hm2TrafficMgmtIfIngressStormCtlBcastMode=hm2TrafficMgmtIfIngressStormCtlBcastMode, hm2TrafficMgmtIfIngressStormCtlThresholdUnit=hm2TrafficMgmtIfIngressStormCtlThresholdUnit, hm2TrafficMgmtIfIngressStormCtlMcastThreshold=hm2TrafficMgmtIfIngressStormCtlMcastThreshold, hm2TrafficMgmtIfTable=hm2TrafficMgmtIfTable, hm2TrafficMgmtFlowControl=hm2TrafficMgmtFlowControl, hm2TrafficMgmtIfIngressStormCtlMcastMode=hm2TrafficMgmtIfIngressStormCtlMcastMode, hm2TrafficMgmtIfIngressStormCtlUcastMode=hm2TrafficMgmtIfIngressStormCtlUcastMode, hm2TrafficMgmtIfEgressShapingRate=hm2TrafficMgmtIfEgressShapingRate, hm2TrafficMgmtMib=hm2TrafficMgmtMib, hm2TrafficMgmtIfFlowControl=hm2TrafficMgmtIfFlowControl, hm2TrafficMgmtMibObjects=hm2TrafficMgmtMibObjects, hm2TrafficMgmtIfEntry=hm2TrafficMgmtIfEntry, hm2TrafficMgmtIfIngressStormCtlUcastThreshold=hm2TrafficMgmtIfIngressStormCtlUcastThreshold)
| 140.313725 | 1,274 | 0.807015 | 662 | 7,156 | 8.720544 | 0.226586 | 0.008315 | 0.008834 | 0.011779 | 0.355967 | 0.28564 | 0.28564 | 0.28564 | 0.237831 | 0.235579 | 0 | 0.074013 | 0.0654 | 7,156 | 50 | 1,275 | 143.12 | 0.789175 | 0.046395 | 0 | 0 | 0 | 0 | 0.131328 | 0.006456 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.186047 | 0 | 0.186047 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4ceca8cf16451832e3365ad6362d93a22cf008d1 | 8,078 | py | Python | tests/step_utils/test_run_variables_resolver.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
] | 11 | 2022-01-30T14:36:13.000Z | 2022-03-22T09:40:57.000Z | tests/step_utils/test_run_variables_resolver.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
] | 2 | 2022-03-23T07:56:49.000Z | 2022-03-24T12:01:42.000Z | tests/step_utils/test_run_variables_resolver.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
] | 8 | 2022-01-28T10:32:31.000Z | 2022-03-22T09:40:59.000Z | import unittest
from icolos.core.containers.compound import Conformer, Enumeration, Compound
from icolos.core.step_utils.run_variables_resolver import RunVariablesResolver
from icolos.utils.enums.step_enums import StepBaseEnum
_SBE = StepBaseEnum
class Test_RunVariablesResolver(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.resolver = RunVariablesResolver()
def setUp(self):
# comp1 has 2 enumerations, one with 2 and one with 3 conformers
comp1 = Compound(name="test_molecule", compound_number=0)
comp1_enum1 = Enumeration(
smile="abc", molecule=None, enumeration_id=1, compound_object=comp1
)
comp1_enum1.add_conformer(
Conformer(conformer_id=0, enumeration_object=comp1_enum1), auto_update=True
)
comp1_enum1.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp1_enum2 = Enumeration(smile="def", molecule=None, enumeration_id=2)
comp1_enum2.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp1_enum2.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp1_enum2.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp1.add_enumeration(comp1_enum1, auto_update=False)
comp1.add_enumeration(comp1_enum2, auto_update=False)
# comp2 has 3 enumerations, one with 1, one with 3 and one with 4 conformers
comp2 = Compound(name="test_molecule_new", compound_number=0)
comp2_enum1 = Enumeration(smile="kk", molecule=None, enumeration_id=0)
comp2_enum1.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum1.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum2 = Enumeration(smile="abc", molecule=None, enumeration_id=1)
comp2_enum2.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum2.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum2.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum3 = Enumeration(smile="xyz", molecule=None, enumeration_id=2)
comp2_enum3.add_conformer(
Conformer(conformer_id=0, enumeration_object=comp2_enum3), auto_update=True
)
comp2_enum3.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum3.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2_enum3.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp2.add_enumeration(comp2_enum1, auto_update=False)
comp2.add_enumeration(comp2_enum2, auto_update=False)
comp2.add_enumeration(comp2_enum3, auto_update=False)
# comp3 has 1 enumeration, with 2 conformers (and a different number and name)
comp3 = Compound(name="test_molecule", compound_number=1)
comp3_enum1 = Enumeration(smile="abc", molecule=None, enumeration_id=0)
comp3_enum1.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp3_enum1.add_conformer(Conformer(conformer_id=0), auto_update=True)
comp3_enum2 = Enumeration(
smile="def", molecule=None, enumeration_id=1, compound_object=comp3
)
comp3_enum2.add_conformer(Conformer(conformer_id=0), auto_update=False)
comp3_enum2.add_conformer(Conformer(conformer_id=0), auto_update=False)
comp3_enum2.add_conformer(Conformer(conformer_id=0), auto_update=False)
comp3.add_enumeration(comp3_enum1, auto_update=False)
comp3.add_enumeration(comp3_enum2, auto_update=False)
self.list_compounds = [comp1, comp2, comp3]
@classmethod
def tearDownClass(cls):
pass
def test_compound_replacements(self):
inp = "/a/path/to/nowhere/[compound_id]/[compound_id]/compound_id/whatever/[compound_name]"
self.assertEqual(
self.resolver.resolve_compound_level(inp, self.list_compounds[0]),
"/a/path/to/nowhere/0/0/compound_id/whatever/test_molecule",
)
self.assertEqual(
self.resolver.resolve_compound_level(inp, self.list_compounds[1]),
"/a/path/to/nowhere/0/0/compound_id/whatever/test_molecule_new",
)
self.assertEqual(
self.resolver.resolve_compound_level(inp, self.list_compounds[2]),
"/a/path/to/nowhere/1/1/compound_id/whatever/test_molecule",
)
# test what happens, when no replacement is done
inp = "/a/string/withouttreplacement"
self.assertEqual(
self.resolver.resolve_compound_level(inp, self.list_compounds[0]), inp
)
def test_enumeration_replacements(self):
inp = "/a/path/to/nowhere/[compound_id]/[enumeration_id]/[enumeration_string]/whatever/[enumeration_id]"
self.assertEqual(
self.resolver.resolve_enumeration_level(inp, self.list_compounds[0][0]),
"/a/path/to/nowhere/[compound_id]/1/0:1/whatever/1",
)
self.assertEqual(
self.resolver.resolve_enumeration_level(inp, self.list_compounds[0][1]),
"/a/path/to/nowhere/[compound_id]/2/:2/whatever/2",
)
self.assertEqual(
self.resolver.resolve_enumeration_level(inp, self.list_compounds[2][1]),
"/a/path/to/nowhere/[compound_id]/1/1:1/whatever/1",
)
# test what happens, when no replacement is done
inp = "/a/string/withouttreplacement"
self.assertEqual(
self.resolver.resolve_enumeration_level(inp, self.list_compounds[0][0]), inp
)
def test_conformer_replacements(self):
inp = "/a/path/[conformer_string]to/nowhere/[compound_id]/[conformer_id]/[enumeration_string]/whatever/[conformer_id]"
self.assertEqual(
self.resolver.resolve_conformer_level(inp, self.list_compounds[0][0][0]),
"/a/path/0:1:0to/nowhere/[compound_id]/0/[enumeration_string]/whatever/0",
)
self.assertEqual(
self.resolver.resolve_conformer_level(inp, self.list_compounds[0][0][1]),
"/a/path/0:1:1to/nowhere/[compound_id]/1/[enumeration_string]/whatever/1",
)
self.assertEqual(
self.resolver.resolve_conformer_level(inp, self.list_compounds[2][0][1]),
"/a/path/:0:1to/nowhere/[compound_id]/1/[enumeration_string]/whatever/1",
)
self.assertEqual(
self.resolver.resolve_conformer_level(inp, self.list_compounds[1][2][0]),
"/a/path/:2:0to/nowhere/[compound_id]/0/[enumeration_string]/whatever/0",
)
# test what happens, when no replacement is done
inp = "/a/string/withouttreplacement"
self.assertEqual(
self.resolver.resolve_conformer_level(inp, self.list_compounds[0][0][0]),
inp,
)
def test_resolve(self):
inp = "/a/path/[conformer_string]to/nowhere/[compound_id]/[conformer_id]/[enumeration_string]/whatever/[compound_name]"
self.assertEqual(
self.resolver.resolve(inp, self.list_compounds[0][0][0]),
"/a/path/0:1:0to/nowhere/0/0/0:1/whatever/test_molecule",
)
self.assertEqual(
self.resolver.resolve(inp, self.list_compounds[0][0]),
"/a/path/[conformer_string]to/nowhere/0/[conformer_id]/0:1/whatever/test_molecule",
)
self.assertEqual(
self.resolver.resolve(inp, self.list_compounds[0]),
"/a/path/[conformer_string]to/nowhere/0/[conformer_id]/[enumeration_string]/whatever/test_molecule",
)
# fails for cases where the linking conformer -> enumeration -> compound is not established
try:
self.resolver.resolve(inp, self.list_compounds[2][0][1])
except Exception as e:
self.assertEqual(
e.__str__(), "'NoneType' object has no attribute 'get_compound_number'"
)
# test what happens, when no replacement is done
inp = "/a/string/withouttreplacement"
self.assertEqual(self.resolver.resolve(inp, self.list_compounds[0][0][0]), inp)
| 48.957576 | 127 | 0.680862 | 1,013 | 8,078 | 5.200395 | 0.101678 | 0.129841 | 0.045558 | 0.1082 | 0.77202 | 0.737661 | 0.721716 | 0.666667 | 0.589787 | 0.557517 | 0 | 0.032769 | 0.202897 | 8,078 | 164 | 128 | 49.256098 | 0.78537 | 0.060906 | 0 | 0.294964 | 0 | 0.043165 | 0.193876 | 0.180942 | 0 | 0 | 0 | 0 | 0.129496 | 1 | 0.05036 | false | 0.007194 | 0.028777 | 0 | 0.086331 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4cf05b4cb3c593907a2d293d518d5f797aa046d6 | 757 | py | Python | selenible/drivers/chrome.py | wtnb75/selenible | 9a73c1c6b31f281992904cb63a4f7516e49d9e42 | [
"MIT"
] | 2 | 2020-04-24T16:38:09.000Z | 2021-01-28T12:21:37.000Z | selenible/drivers/chrome.py | wtnb75/selenible | 9a73c1c6b31f281992904cb63a4f7516e49d9e42 | [
"MIT"
] | 21 | 2018-05-14T14:31:45.000Z | 2019-07-18T03:44:16.000Z | selenible/drivers/chrome.py | wtnb75/selenible | 9a73c1c6b31f281992904cb63a4f7516e49d9e42 | [
"MIT"
] | 3 | 2018-11-29T07:01:02.000Z | 2019-04-21T17:13:57.000Z | from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from . import Base
class Chrome(Base):
def get_options(self):
return Options()
def boot_driver(self):
self.log.debug("chrome: %s", self.browser_args)
return webdriver.Chrome(**self.browser_args)
def do_network_conditions(self, params):
"""
- name: network emulation settings
network_conditions:
latency: 4
download_throughput: 2
upload_throughput: 1
offline: false
"""
prep = self.driver.get_network_conditions()
self.log.info("network emulation settings: %s -> %s", prep, params)
self.driver.set_network_conditions(**params)
| 29.115385 | 75 | 0.638045 | 85 | 757 | 5.529412 | 0.447059 | 0.144681 | 0.06383 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005405 | 0.266843 | 757 | 25 | 76 | 30.28 | 0.841441 | 0.187583 | 0 | 0 | 0 | 0 | 0.083485 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.230769 | 0.076923 | 0.692308 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
4cfd0096af4cc500d88b428ca09c8d2ca0e1dec8 | 1,766 | py | Python | create_schemas.py | awesomepack/Steam-Recommender | 04377e8e710e4af1f1c495d15df45bd1527ea8d6 | [
"MIT"
] | null | null | null | create_schemas.py | awesomepack/Steam-Recommender | 04377e8e710e4af1f1c495d15df45bd1527ea8d6 | [
"MIT"
] | 3 | 2021-10-21T04:03:29.000Z | 2021-10-21T04:08:13.000Z | create_schemas.py | awesomepack/Steam-Recommender | 04377e8e710e4af1f1c495d15df45bd1527ea8d6 | [
"MIT"
] | null | null | null | '''
create_schemas.py does the following:
* Creates a database named steam_recommender if it does not exist within a local instance of PostgreSQL
* Creates schemas for our set of csv data in steam_recommender
requirements:
* in lines 24 and 27 you need to specify the file paths to the csv source files
* import your Postgres credentials for connection to Postgres
'''
# Importing dependencies
import psycopg2
import sqlalchemy #version 1.4.7
import pandas as pd
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from sqlalchemy_utils import database_exists , create_database
from sqlalchemy import Table , Column , Integer , String , MetaData , Float , JSON
from credentials import password , user_name
# Connect to PostgreSQL using Pscopg2
postgresEngine = create_engine(f'postgresql+psycopg2://{user_name}:{password}@localhost/Steam_Recommender' , echo = True)
# if the database steam_recommender does not exist , create it
if not database_exists(postgresEngine.url):
create_database(postgresEngine.url)
def make_table(file, key):
tablename = file.split(os.path.sep)[-1][:-4]
pd.read_csv(file).to_sql(tablename, postgresEngine, if_exists = 'replace', index = False)
with postgresEngine.connect() as con:
con.execute(f'ALTER TABLE "{tablename}" ADD PRIMARY KEY ("{key}");')
datafolder = 'data'
for filename in os.listdir(datafolder):
print(f'Making: {filename} table')
if filename.endswith('.csv'):
if filename.startswith("metacritic"):
# make_table(os.path.join(datafolder, filename), 'name')
continue
else:
make_table(os.path.join(datafolder, filename), 'appid')
else:
continue | 36.791667 | 121 | 0.742356 | 238 | 1,766 | 5.420168 | 0.483193 | 0.054264 | 0.018605 | 0.023256 | 0.057364 | 0.057364 | 0.057364 | 0 | 0 | 0 | 0 | 0.00818 | 0.169309 | 1,766 | 48 | 122 | 36.791667 | 0.871166 | 0.313137 | 0 | 0.142857 | 0 | 0 | 0.147963 | 0.05985 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035714 | false | 0.071429 | 0.357143 | 0 | 0.392857 | 0.035714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
98005489f6cefc093a2aaea615e460bfdd95d217 | 17,774 | py | Python | google/ads/google_ads/v5/proto/common/asset_types_pb2.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | 1 | 2021-04-09T04:28:47.000Z | 2021-04-09T04:28:47.000Z | google/ads/google_ads/v5/proto/common/asset_types_pb2.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v5/proto/common/asset_types_pb2.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v5/proto/common/asset_types.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v5.proto.enums import mime_type_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_mime__type__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v5/proto/common/asset_types.proto',
package='google.ads.googleads.v5.common',
syntax='proto3',
serialized_options=b'\n\"com.google.ads.googleads.v5.commonB\017AssetTypesProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v5/common;common\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V5.Common\312\002\036Google\\Ads\\GoogleAds\\V5\\Common\352\002\"Google::Ads::GoogleAds::V5::Common',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n6google/ads/googleads_v5/proto/common/asset_types.proto\x12\x1egoogle.ads.googleads.v5.common\x1a\x33google/ads/googleads_v5/proto/enums/mime_type.proto\x1a\x1cgoogle/api/annotations.proto\"G\n\x11YoutubeVideoAsset\x12\x1d\n\x10youtube_video_id\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x13\n\x11_youtube_video_id\".\n\x10MediaBundleAsset\x12\x11\n\x04\x64\x61ta\x18\x02 \x01(\x0cH\x00\x88\x01\x01\x42\x07\n\x05_data\"\xda\x01\n\nImageAsset\x12\x11\n\x04\x64\x61ta\x18\x05 \x01(\x0cH\x00\x88\x01\x01\x12\x16\n\tfile_size\x18\x06 \x01(\x03H\x01\x88\x01\x01\x12G\n\tmime_type\x18\x03 \x01(\x0e\x32\x34.google.ads.googleads.v5.enums.MimeTypeEnum.MimeType\x12\x41\n\tfull_size\x18\x04 \x01(\x0b\x32..google.ads.googleads.v5.common.ImageDimensionB\x07\n\x05_dataB\x0c\n\n_file_size\"\x84\x01\n\x0eImageDimension\x12\x1a\n\rheight_pixels\x18\x04 \x01(\x03H\x00\x88\x01\x01\x12\x19\n\x0cwidth_pixels\x18\x05 \x01(\x03H\x01\x88\x01\x01\x12\x10\n\x03url\x18\x06 \x01(\tH\x02\x88\x01\x01\x42\x10\n\x0e_height_pixelsB\x0f\n\r_width_pixelsB\x06\n\x04_url\"\'\n\tTextAsset\x12\x11\n\x04text\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_text\"\x13\n\x11\x42ookOnGoogleAssetB\xea\x01\n\"com.google.ads.googleads.v5.commonB\x0f\x41ssetTypesProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v5/common;common\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V5.Common\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V5\\Common\xea\x02\"Google::Ads::GoogleAds::V5::Commonb\x06proto3'
,
dependencies=[google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_mime__type__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_YOUTUBEVIDEOASSET = _descriptor.Descriptor(
name='YoutubeVideoAsset',
full_name='google.ads.googleads.v5.common.YoutubeVideoAsset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='youtube_video_id', full_name='google.ads.googleads.v5.common.YoutubeVideoAsset.youtube_video_id', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_youtube_video_id', full_name='google.ads.googleads.v5.common.YoutubeVideoAsset._youtube_video_id',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=173,
serialized_end=244,
)
_MEDIABUNDLEASSET = _descriptor.Descriptor(
name='MediaBundleAsset',
full_name='google.ads.googleads.v5.common.MediaBundleAsset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='google.ads.googleads.v5.common.MediaBundleAsset.data', index=0,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_data', full_name='google.ads.googleads.v5.common.MediaBundleAsset._data',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=246,
serialized_end=292,
)
_IMAGEASSET = _descriptor.Descriptor(
name='ImageAsset',
full_name='google.ads.googleads.v5.common.ImageAsset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='google.ads.googleads.v5.common.ImageAsset.data', index=0,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='file_size', full_name='google.ads.googleads.v5.common.ImageAsset.file_size', index=1,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mime_type', full_name='google.ads.googleads.v5.common.ImageAsset.mime_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='full_size', full_name='google.ads.googleads.v5.common.ImageAsset.full_size', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_data', full_name='google.ads.googleads.v5.common.ImageAsset._data',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_file_size', full_name='google.ads.googleads.v5.common.ImageAsset._file_size',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=295,
serialized_end=513,
)
_IMAGEDIMENSION = _descriptor.Descriptor(
name='ImageDimension',
full_name='google.ads.googleads.v5.common.ImageDimension',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='height_pixels', full_name='google.ads.googleads.v5.common.ImageDimension.height_pixels', index=0,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='width_pixels', full_name='google.ads.googleads.v5.common.ImageDimension.width_pixels', index=1,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='url', full_name='google.ads.googleads.v5.common.ImageDimension.url', index=2,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_height_pixels', full_name='google.ads.googleads.v5.common.ImageDimension._height_pixels',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_width_pixels', full_name='google.ads.googleads.v5.common.ImageDimension._width_pixels',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_url', full_name='google.ads.googleads.v5.common.ImageDimension._url',
index=2, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=516,
serialized_end=648,
)
_TEXTASSET = _descriptor.Descriptor(
name='TextAsset',
full_name='google.ads.googleads.v5.common.TextAsset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='text', full_name='google.ads.googleads.v5.common.TextAsset.text', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_text', full_name='google.ads.googleads.v5.common.TextAsset._text',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=650,
serialized_end=689,
)
_BOOKONGOOGLEASSET = _descriptor.Descriptor(
name='BookOnGoogleAsset',
full_name='google.ads.googleads.v5.common.BookOnGoogleAsset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=691,
serialized_end=710,
)
_YOUTUBEVIDEOASSET.oneofs_by_name['_youtube_video_id'].fields.append(
_YOUTUBEVIDEOASSET.fields_by_name['youtube_video_id'])
_YOUTUBEVIDEOASSET.fields_by_name['youtube_video_id'].containing_oneof = _YOUTUBEVIDEOASSET.oneofs_by_name['_youtube_video_id']
_MEDIABUNDLEASSET.oneofs_by_name['_data'].fields.append(
_MEDIABUNDLEASSET.fields_by_name['data'])
_MEDIABUNDLEASSET.fields_by_name['data'].containing_oneof = _MEDIABUNDLEASSET.oneofs_by_name['_data']
_IMAGEASSET.fields_by_name['mime_type'].enum_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_mime__type__pb2._MIMETYPEENUM_MIMETYPE
_IMAGEASSET.fields_by_name['full_size'].message_type = _IMAGEDIMENSION
_IMAGEASSET.oneofs_by_name['_data'].fields.append(
_IMAGEASSET.fields_by_name['data'])
_IMAGEASSET.fields_by_name['data'].containing_oneof = _IMAGEASSET.oneofs_by_name['_data']
_IMAGEASSET.oneofs_by_name['_file_size'].fields.append(
_IMAGEASSET.fields_by_name['file_size'])
_IMAGEASSET.fields_by_name['file_size'].containing_oneof = _IMAGEASSET.oneofs_by_name['_file_size']
_IMAGEDIMENSION.oneofs_by_name['_height_pixels'].fields.append(
_IMAGEDIMENSION.fields_by_name['height_pixels'])
_IMAGEDIMENSION.fields_by_name['height_pixels'].containing_oneof = _IMAGEDIMENSION.oneofs_by_name['_height_pixels']
_IMAGEDIMENSION.oneofs_by_name['_width_pixels'].fields.append(
_IMAGEDIMENSION.fields_by_name['width_pixels'])
_IMAGEDIMENSION.fields_by_name['width_pixels'].containing_oneof = _IMAGEDIMENSION.oneofs_by_name['_width_pixels']
_IMAGEDIMENSION.oneofs_by_name['_url'].fields.append(
_IMAGEDIMENSION.fields_by_name['url'])
_IMAGEDIMENSION.fields_by_name['url'].containing_oneof = _IMAGEDIMENSION.oneofs_by_name['_url']
_TEXTASSET.oneofs_by_name['_text'].fields.append(
_TEXTASSET.fields_by_name['text'])
_TEXTASSET.fields_by_name['text'].containing_oneof = _TEXTASSET.oneofs_by_name['_text']
DESCRIPTOR.message_types_by_name['YoutubeVideoAsset'] = _YOUTUBEVIDEOASSET
DESCRIPTOR.message_types_by_name['MediaBundleAsset'] = _MEDIABUNDLEASSET
DESCRIPTOR.message_types_by_name['ImageAsset'] = _IMAGEASSET
DESCRIPTOR.message_types_by_name['ImageDimension'] = _IMAGEDIMENSION
DESCRIPTOR.message_types_by_name['TextAsset'] = _TEXTASSET
DESCRIPTOR.message_types_by_name['BookOnGoogleAsset'] = _BOOKONGOOGLEASSET
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
YoutubeVideoAsset = _reflection.GeneratedProtocolMessageType('YoutubeVideoAsset', (_message.Message,), {
'DESCRIPTOR' : _YOUTUBEVIDEOASSET,
'__module__' : 'google.ads.googleads_v5.proto.common.asset_types_pb2'
,
'__doc__': """A YouTube asset.
Attributes:
youtube_video_id:
YouTube video id. This is the 11 character string value used
in the YouTube video URL.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.common.YoutubeVideoAsset)
})
_sym_db.RegisterMessage(YoutubeVideoAsset)
MediaBundleAsset = _reflection.GeneratedProtocolMessageType('MediaBundleAsset', (_message.Message,), {
'DESCRIPTOR' : _MEDIABUNDLEASSET,
'__module__' : 'google.ads.googleads_v5.proto.common.asset_types_pb2'
,
'__doc__': """A MediaBundle asset.
Attributes:
data:
Media bundle (ZIP file) asset data. The format of the uploaded
ZIP file depends on the ad field where it will be used. For
more information on the format, see the documentation of the
ad field where you plan on using the MediaBundleAsset. This
field is mutate only.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.common.MediaBundleAsset)
})
_sym_db.RegisterMessage(MediaBundleAsset)
ImageAsset = _reflection.GeneratedProtocolMessageType('ImageAsset', (_message.Message,), {
'DESCRIPTOR' : _IMAGEASSET,
'__module__' : 'google.ads.googleads_v5.proto.common.asset_types_pb2'
,
'__doc__': """An Image asset.
Attributes:
data:
The raw bytes data of an image. This field is mutate only.
file_size:
File size of the image asset in bytes.
mime_type:
MIME type of the image asset.
full_size:
Metadata for this image at its original size.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.common.ImageAsset)
})
_sym_db.RegisterMessage(ImageAsset)
ImageDimension = _reflection.GeneratedProtocolMessageType('ImageDimension', (_message.Message,), {
'DESCRIPTOR' : _IMAGEDIMENSION,
'__module__' : 'google.ads.googleads_v5.proto.common.asset_types_pb2'
,
'__doc__': """Metadata for an image at a certain size, either original or resized.
Attributes:
height_pixels:
Height of the image.
width_pixels:
Width of the image.
url:
A URL that returns the image with this height and width.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.common.ImageDimension)
})
_sym_db.RegisterMessage(ImageDimension)
TextAsset = _reflection.GeneratedProtocolMessageType('TextAsset', (_message.Message,), {
'DESCRIPTOR' : _TEXTASSET,
'__module__' : 'google.ads.googleads_v5.proto.common.asset_types_pb2'
,
'__doc__': """A Text asset.
Attributes:
text:
Text content of the text asset.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.common.TextAsset)
})
_sym_db.RegisterMessage(TextAsset)
BookOnGoogleAsset = _reflection.GeneratedProtocolMessageType('BookOnGoogleAsset', (_message.Message,), {
'DESCRIPTOR' : _BOOKONGOOGLEASSET,
'__module__' : 'google.ads.googleads_v5.proto.common.asset_types_pb2'
,
'__doc__': """A Book on Google asset. Used to redirect user to book through Google.
Book on Google will change the redirect url to book directly through
Google.""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.common.BookOnGoogleAsset)
})
_sym_db.RegisterMessage(BookOnGoogleAsset)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 42.018913 | 1,480 | 0.761618 | 2,268 | 17,774 | 5.622134 | 0.119489 | 0.049173 | 0.059289 | 0.070583 | 0.701121 | 0.651165 | 0.582935 | 0.533213 | 0.502863 | 0.479492 | 0 | 0.032059 | 0.11725 | 17,774 | 422 | 1,481 | 42.118483 | 0.780625 | 0.04034 | 0 | 0.557029 | 1 | 0.03183 | 0.308536 | 0.174831 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.015915 | 0 | 0.015915 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9802164b700b504290e43d054de7c71aa8a8be97 | 33,300 | py | Python | app.py | chamithshirantha/kapila-bakers | 522cf5c97ecda634053e148a4b8523c134af3801 | [
"MIT"
] | null | null | null | app.py | chamithshirantha/kapila-bakers | 522cf5c97ecda634053e148a4b8523c134af3801 | [
"MIT"
] | null | null | null | app.py | chamithshirantha/kapila-bakers | 522cf5c97ecda634053e148a4b8523c134af3801 | [
"MIT"
] | null | null | null | from flask import Flask,render_template,request,session,redirect,url_for,flash,Response
from flask_mysqldb import MySQL
from werkzeug.security import generate_password_hash,check_password_hash
import MySQLdb.cursors
from werkzeug.utils import secure_filename
import os
from datetime import datetime
from flask_mail import Mail,Message
import io
import xlwt
app = Flask(__name__)
app.config['SECRET_KEY'] = 'this is secret key'
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = ''
app.config['MYSQL_DB'] = 'myproject'
mysql = MySQL(app)
app.config['MAIL_SERVER'] = 'smtp.gmail.com'
app.config['MAIL_PORT'] = '465'
app.config['MAIL_USERNAME'] = 'kapilabakers123@gmail.com'
app.config['MAIL_PASSWORD'] = 'kapila@123'
app.config['MAIL_USE_SSL'] = True
mail = Mail(app)
UPLOAD_FOLDER = 'static/uploads'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/')
def index():
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("select * from items")
data = cursor.fetchall()
return render_template("index.html",data=data)
# send message box
@app.route('/contactus',methods=['POST'])
def send_message():
if request.method == 'POST':
fist_name = request.form['firstname']
last_name = request.form['lastname']
mobile_number = request.form['mobile']
email_address = request.form['email']
description = request.form['description']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("INSERT INTO inbox (first_name,last_name,mobile_number,email,description) VALUES (%s,%s,%s,%s,%s)",(fist_name,last_name,mobile_number,email_address,description,))
mysql.connection.commit()
cursor.close()
return render_template("index.html")
@app.route('/add', methods=['POST'])
def add_product_to_cart():
if 'session_username' in session:
if request.method == 'POST':
_customer_id = request.form['customerid']
_user_name = request.form['username']
_code = request.form['code']
_quantity = int(request.form['quantity'])
status = "pending"
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("INSERT INTO orders (customer_id,username,item_name,quantity,status) VALUES (%s, %s, %s, %s, %s)",(_customer_id,_user_name,_code,_quantity,status,))
mysql.connection.commit()
cursor.close()
if _quantity and _code and request.method == 'POST':
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT * FROM items WHERE name = %s", [_code])
row = cursor.fetchone()
itemArray = {row['name']: {'item_id': row['item_id'], 'name': row['name'], 'quantity': _quantity,'price': row['price'], 'file_name': row['file_name'],'total_price': _quantity * row['price']}}
all_total_price = 0
all_total_quantity = 0
session.modified = True
if 'cart_item' in session:
if row['name'] in session['cart_item']:
for key, value in session['cart_item'].items():
if row['name'] == key:
old_quantity = session['cart_item'][key]['quantity']
total_quantity = old_quantity + _quantity
session['cart_item'][key]['quantity'] = total_quantity
session['cart_item'][key]['total_price'] = total_quantity * row['price']
else:
session['cart_item'] = array_merge(session['cart_item'], itemArray)
for key, value in session['cart_item'].items():
individual_quantity = int(session['cart_item'][key]['quantity'])
individual_price = float(session['cart_item'][key]['total_price'])
all_total_quantity = all_total_quantity + individual_quantity
all_total_price = all_total_price + individual_price
else:
session['cart_item'] = itemArray
all_total_quantity = all_total_quantity + _quantity
all_total_price = all_total_price + _quantity * row['price']
session['all_total_quantity'] = all_total_quantity
session['all_total_price'] = all_total_price
return redirect('/#menu')
else:
return redirect("/login")
@app.route('/empty')
def empty_cart():
try:
session.clear()
return render_template("user_cart.html")
except Exception as e:
print(e)
@app.route('/cart_delete/<string:code>,<string:us>')
def delete_product(code,us):
all_total_price = 0
all_total_quantity = 0
session.modified = True
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DELETE FROM orders WHERE username = %s and item_name = %s",(us,code))
mysql.connection.commit()
for item in session['cart_item'].items():
if item[0] == code:
session['cart_item'].pop(item[0], None)
if 'cart_item' in session:
for key, value in session['cart_item'].items():
individual_quantity = int(session['cart_item'][key]['quantity'])
individual_price = float(session['cart_item'][key]['total_price'])
all_total_quantity = all_total_quantity + individual_quantity
all_total_price = all_total_price + individual_price
break
if all_total_quantity == 0:
session.clear()
else:
session['all_total_quantity'] = all_total_quantity
session['all_total_price'] = all_total_price
return redirect('/user_cart')
'''
@app.route('/testitem')
def testitem():
if 'cart_item' in session:
obj = session.items()
return render_template("test.html",obj=obj)
'''
@app.route('/user_cart')
def user_cart():
return render_template("user_cart.html")
def array_merge(first_array, second_array):
if isinstance(first_array, list) and isinstance(second_array, list):
return first_array + second_array
elif isinstance(first_array, dict) and isinstance(second_array, dict):
return dict(list(first_array.items()) + list(second_array.items()))
elif isinstance(first_array, set) and isinstance(second_array, set):
return first_array.union(second_array)
return False
@app.route('/submit_order', methods=['POST'])
def submit_order():
status = "pending"
user_name = request.form['username']
total_quantity = request.form['totalquantity']
total_price = request.form['totalprice']
first_name = request.form['fname']
last_name = request.form['lname']
address = request.form['address']
mobile_number = request.form['mobile']
customer_id = request.form['customer_id']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("INSERT INTO orders_info(customer_id,username,status,total_quantity,total_price,first_name,last_name,address,contact_number) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)",(customer_id,user_name,status,total_quantity,total_price,first_name,last_name,address,mobile_number,))
mysql.connection.commit()
cursor.close()
return redirect('/user_account/my_orders')
# test
@app.route('/test', methods=['GET', 'POST'])
def test():
if request.method == 'POST':
username = request.form['username']
email = request.form['email']
password = request.form['password']
secret_key = request.form['secretkey']
_hashed_password = generate_password_hash(password)
_hashed_secretkey = generate_password_hash(secret_key)
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("INSERT INTO admin_info (username, email, password,secret_key) VALUES (%s, %s, %s, %s)", (username,email,_hashed_password,_hashed_secretkey))
mysql.connection.commit()
cursor.close()
return render_template("test.html")
# customer login
@app.route('/login', methods=['GET', 'POST'])
def login():
msg = ''
msg1 = ''
msg2 = ''
if request.method == 'POST':
# Get Form Fields
_username = request.form['username']
_password = request.form['password']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
# Get user by username
result = cursor.execute("SELECT * FROM register_info WHERE username = %s", [_username])
if result > 0:
# Get stored hash
data = cursor.fetchone()
password = data['password']
# Compare Passwords
if check_password_hash(password, _password):
cur = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
use = cur.execute("SELECT * FROM register_info WHERE username = %s", [_username])
use =cur.fetchone()
# Passed
session['logged_in'] = True
session['session_username'] = _username
session['session_customer_id'] = use['customer_id']
session['session_first_name'] = use['first_name']
session['session_last_name'] = use['last_name']
session['session_email'] = use['email']
session['session_address'] = use['address']
session['session_mobile_number'] = use['mobile_number']
return redirect(url_for('user_account'))
#return render_template('index.html',msg1=msg1,msg2=msg2)
else:
msg = 'Incorrect username / password !'
session.clear()
return render_template('login.html',msg = msg)
# Close connection
cursor.close()
else:
msg = 'Invalid login'
return render_template('login.html', msg=msg)
'''
if request.method == 'POST' and 'username' in request.form and 'password' in request.form:
username = request.form['username']
password = request.form['password']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM register_info WHERE username = % s AND password = % s', (username, password,))
account = cursor.fetchone()
if account:
session['loggedin'] = True
session['customer_id'] = account['customer_id']
session['username'] = account['username']
msg1 = 'Hello '
msg2 = 'Welcome to Kapila bakers'
return render_template('index.html',msg1=msg1,msg2=msg2)
else:
msg3 = 'Incorrect username / password !'
return render_template('login.html', msg3=msg3)
'''
# user forgot password
@app.route('/forgot_password')
def forgot_password():
return render_template("user_forgot_password.html")
# customer user account
@app.route('/user_account')
def user_account():
if 'session_username' in session:
username = session['session_username']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM register_info WHERE username = % s', (username,))
data = cursor.fetchone()
return render_template('user_account.html',data=data)
else:
return redirect(url_for('index'))
# customer user account settings
@app.route('/user_account/user_account_settings')
def user_account_settings():
if 'session_username' in session:
username = session['session_username']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM register_info WHERE username = % s', (username,))
data = cursor.fetchone()
return render_template('user_account_settings.html',data=data)
# customer user account change settings
@app.route('/user_account/user_account_settings/edit_user_details/<int:customer_id>',methods=['GET', 'POST'])
def edit_user_details(customer_id):
if request.method == 'POST':
#if and 'username' in request.form and 'first_name' in request.form and 'last_name' in request.form and 'email' in request.form and 'password' in request.form and 'address' in request.form and 'mobile_number' in request.form:
_first_name = request.form['firstname']
_last_name = request.form['lastname']
_email_address = request.form['email']
_address = request.form['address']
_mobile_number = request.form['mobile_number']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('UPDATE register_info SET first_name = %s,last_name = %s,email = %s,address = %s,mobile_number = %s WHERE customer_id = %s',(_first_name, _last_name, _email_address, _address, _mobile_number, customer_id))
mysql.connection.commit()
cursor.close()
return redirect('/user_account/user_account_settings')
return redirect('/user_account/user_account_settings')
# change user account password
@app.route('/user_account/user_account_settings/change_password/<int:customer_id>',methods = ['GET','POST'])
def change_password(customer_id):
msg = ''
if request.method == 'POST':
old_password = request.form['oldpassword']
new_password = request.form['newpassword']
confirm_password = request.form['confirmpassword']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
data = cursor.execute('SELECT * FROM register_info WHERE customer_id = % s', (customer_id,))
if data > 0:
# Get stored hash
data = cursor.fetchone()
password = data['password']
# Compare Passwords
if check_password_hash(password, old_password):
if new_password == confirm_password:
_hashed_password = generate_password_hash(new_password)
cursor.execute('UPDATE register_info SET password = %s WHERE customer_id = %s',(_hashed_password,customer_id))
mysql.connection.commit()
msg = "successful"
return render_template("user_change_password.html",msg=msg)
else:
msg = 'New Password and Old Password Mismatch !!'
return render_template("user_change_password.html",msg=msg)
else:
msg = 'Old Password is Mismatch !!'
return render_template("user_change_password.html",msg=msg)
else:
return render_template("user_change_password.html")
# customer orders display
@app.route('/user_account/my_orders')
def my_orders():
status1 = "pending"
status2 = "done"
username = session['session_username']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM orders WHERE username = %s and status = %s', (username,status1,))
data = cursor.fetchall()
cursor.execute('SELECT * FROM orders_info WHERE username = %s and status = %s', (username, status1,))
info = cursor.fetchall()
cursor.execute('SELECT * FROM orders WHERE username = %s and status = %s', (username, status2,))
donedata = cursor.fetchall()
cursor.execute('SELECT * FROM orders_info WHERE username = %s and status = %s', (username, status2,))
doneinfo = cursor.fetchall()
return render_template("user_orders.html",data=data,info=info,donedata=donedata,doneinfo=doneinfo)
# customer orders display
@app.route('/user_account/my_orders/delete')
def my_orders_delete():
status = "pending"
username = session['session_username']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('DELETE FROM orders WHERE username = %s and status = %s ', (username, status))
mysql.connection.commit()
cursor.execute('DELETE FROM orders_info WHERE username = %s and status = %s ', (username, status))
mysql.connection.commit()
return render_template("user_orders.html")
'''
if old_password == '' and old_secret_key == '' and new_password == '' and new_secret_key == '':
return redirect('/admin/home')
else:
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
data = cursor.execute('SELECT * FROM admin_info WHERE username = % s', (username,))
if data > 0:
data = cursor.fetchone()
_password = data['password']
_secret_key = data['secret_key']
if check_password_hash(_password,old_password) and check_password_hash(_secret_key,old_secret_key):
_hashed_new_password = generate_password_hash(new_password)
_hashed_new_secret_key = generate_password_hash(new_secret_key)
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('UPDATE admin_info SET password = %s,secret_key = %s WHERE username = %s',(_hashed_new_password,_hashed_new_secret_key,username))
mysql.connection.commit()
cursor.close()
return redirect('/admin/home')
else:
session.clear()
return redirect(url_for('admin_logout'))
return redirect('/admin/home')
'''
'''
_username = request.form['username']
_first_name = request.form['firstname']
_last_name = request.form['lastname']
_email = request.form['email']
_password = request.form['password']
_address = request.form['address']
_mobile_number = request.form['mobile_number']
#if 'username' in request.form and 'firstname' in request.form and 'lastname' in request.form and 'email' in request.form and 'password' in request.form and 'address' in request.form and 'mobile_number' in request.form:
if _username and _first_name and _last_name and _email and _password and _address and _mobile_number == True:
print(_username)
else:
print(_mobile_number)
'''
# customer user account contact
@app.route('/user_account/contact')
def user_account_contact():
return render_template("user_account_contact.html")
# customer logout
@app.route('/logout')
def logout():
session.clear()
session.pop('cart_item',None)
return redirect(url_for('index'))
# customer register form
@app.route('/register', methods=['GET', 'POST'])
def register():
msg = ''
if request.method == 'POST':
_username = request.form['username']
_first_name = request.form['firstname']
_last_name = request.form['lastname']
_email = request.form['emailaddress']
_password = request.form['password']
_address = request.form['address']
_mobile_number = request.form['mobile_number']
if _username and _first_name and _last_name and _email and _password and _address and _mobile_number:
_hashed_password = generate_password_hash(_password)
curl = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
curl.execute("SELECT * FROM register_info WHERE username = %s", (_username,))
user = curl.fetchone()
# print(user)
if user is None:
curl.execute("INSERT INTO register_info (username,first_name,last_name,email,password,address,mobile_number) VALUES (%s,%s,%s,%s,%s,%s,%s)",
(_username,_first_name,_last_name,_email, _hashed_password,_address,_mobile_number))
mysql.connection.commit()
curl.close()
msg = 'Account Created Successfully !'
return render_template("register.html",msg=msg)
else:
msg = 'This username already use another customer'
return render_template("register.html",msg=msg)
else:
msg = 'Please fill out the form !'
return render_template("register.html",msg=msg)
else:
return render_template("register.html",msg=msg)
# ------------------------- admin panel ---------------------
# admin login page
@app.route('/admin',methods=['GET', 'POST'])
def admin_login():
msg = ''
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
secret_key = request.form['secret_key']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
data = cursor.execute("SELECT * FROM admin_info WHERE username = %s", [username])
if data > 0:
data = cursor.fetchone()
_password = data['password']
_secret_key = data['secret_key']
if check_password_hash(_password,password):
if check_password_hash(_secret_key,secret_key):
session['session_admin_username'] = username
return redirect('/admin/home')
else:
msg = 'Incorrect username / password !'
return redirect('/admin')
return render_template("admin_login.html")
'''
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM admin_info WHERE username = % s AND password = % s', (username, password,))
account = cursor.fetchone()
if account:
session['username'] = account['username']
session['password'] = account['password']
return redirect('/admin/home')
else:
msg3 = 'Incorrect username / password !'
return render_template("admin_login.html")
'''
# admin logout
@app.route('/admin_logout')
def admin_logout():
session.pop('session_admin_username',None)
return redirect(url_for('admin_login'))
# admin home page.after longin successful and load this page
@app.route('/admin/home')
def admin_home():
if 'session_admin_username' in session:
username = session['session_admin_username']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM admin_info WHERE username = % s', (username,))
addata=cursor.fetchone()
return render_template('admin_home.html' ,addata=addata)
return render_template("admin_login.html")
# admin home page,change admin password
@app.route('/admin/home/change/<string:username>',methods=['GET', 'POST'])
def admin_home_change(username):
if request.method == 'POST':
old_password = request.form['oldpassword']
old_secret_key = request.form['oldsecretkey']
new_password = request.form['newpassword']
new_secret_key = request.form['newsecretkey']
if old_password == '' and old_secret_key == '' and new_password == '' and new_secret_key == '':
return redirect('/admin/home')
else:
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
data = cursor.execute('SELECT * FROM admin_info WHERE username = % s', (username,))
if data > 0:
data = cursor.fetchone()
_password = data['password']
_secret_key = data['secret_key']
if check_password_hash(_password,old_password) and check_password_hash(_secret_key,old_secret_key):
_hashed_new_password = generate_password_hash(new_password)
_hashed_new_secret_key = generate_password_hash(new_secret_key)
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('UPDATE admin_info SET password = %s,secret_key = %s WHERE username = %s',(_hashed_new_password,_hashed_new_secret_key,username))
mysql.connection.commit()
cursor.close()
return redirect('/admin/home')
else:
session.clear()
return redirect(url_for('admin_logout'))
return redirect('/admin/home')
# admin generate report
@app.route('/download/report/excel')
def download_report():
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT customer_id, username, total_quantity, total_price, first_name, last_name, address, contact_number FROM orders_info")
result = cursor.fetchall()
# output in bytes
output = io.BytesIO()
# create WorkBook object
workbook = xlwt.Workbook()
# add a sheet
sh = workbook.add_sheet('Orders Report')
# add headers
sh.write(0, 0, 'customer id')
sh.write(0, 1, 'user name')
sh.write(0, 2, 'total quantity')
sh.write(0, 3, 'total price')
sh.write(0, 4, 'first name')
sh.write(0, 5, 'last name')
sh.write(0, 6, 'address')
sh.write(0, 7, 'contact number')
idx = 0
for row in result:
sh.write(idx + 1, 0, str(row['customer_id']))
sh.write(idx + 1, 1, row['username'])
sh.write(idx + 1, 2, row['total_quantity'])
sh.write(idx + 1, 3, row['total_price'])
sh.write(idx + 1, 4, row['first_name'])
sh.write(idx + 1, 5, row['last_name'])
sh.write(idx + 1, 6, row['address'])
sh.write(idx + 1, 7, row['contact_number'])
idx += 1
workbook.save(output)
output.seek(0)
return Response(output, mimetype="application/ms-excel",headers={"Content-Disposition": "attachment;filename=orders_report.xls"})
# admin orders
@app.route('/admin/orders')
def admin_orders():
status = "pending"
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM orders WHERE status = %s', (status,))
data = cursor.fetchall()
cursor.execute('SELECT * FROM orders_info WHERE status = %s', (status,))
info = cursor.fetchall()
return render_template("admin_orders.html",data=data,info=info)
@app.route('/admin/orders/done/<string:id>')
def admin_orders_done(id):
current_status = "pending"
new_status = "done"
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('UPDATE orders_info SET status = %s WHERE customer_id = %s',(new_status,id))
cursor.execute('UPDATE orders SET status = %s WHERE customer_id = %s', (new_status, id))
mysql.connection.commit()
cursor.close()
return redirect('/admin/orders')
'''
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM orders WHERE status = %s', (status,))
data = cursor.fetchall()
cursor.execute('SELECT * FROM orders_info WHERE status = %s', (status,))
info = cursor.fetchall()
'''
# admin panel.account settings
@app.route('/admin/account_settings')
def admin_account_settings():
if 'session_admin_username' in session:
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("select * from register_info")
data = cursor.fetchall()
return render_template("admin_account_settings.html",data=data)
else:
return redirect('/admin')
# admin panel,account settings/account delete
@app.route('/admin/account_settings/delete/<int:customer_id>',methods=['GET', 'POST'])
def admin_account_settings_delete(customer_id):
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DELETE FROM register_info WHERE customer_id = %s", (customer_id,))
mysql.connection.commit()
flash("Record Has Been Deleted Successfully")
return redirect( url_for('admin_account_settings'))
'''
if request.method == 'POST':
_customer_id = request.form['customer_id']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DELETE FROM register_info WHERE customer_id = %s", (_customer_id,))
cursor.fetchall()
return redirect('/admin/account_settings')
'''
# admin panel.show items list
@app.route('/admin/items')
def admin_items():
if 'session_admin_username' in session:
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("select * from items")
data = cursor.fetchall()
return render_template("admin_items.html",data=data)
else:
return redirect('/admin')
# admin panel.Manage Item List
@app.route('/admin/manage_items_list')
def admin_manage_items_list():
if 'session_admin_username' in session:
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("select * from items")
data = cursor.fetchall()
return render_template("admin_manage_items_list.html",data=data)
else:
return redirect('/admin')
# admin panel.item delete
@app.route('/manage_items/delete/<string:item_id>/<string:file_name>', methods=['POST', 'GET'])
def delete(item_id,file_name):
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('DELETE FROM items WHERE item_id = %s and file_name = %s ',(item_id,file_name))
os.remove(os.path.join(app.config['UPLOAD_FOLDER'],file_name))
mysql.connection.commit()
return redirect('/admin/manage_items_list')
@app.route('/manage_items/update/<int:item_id>',methods=["POST", "GET"])
def update_item(item_id):
msg = ''
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM items WHERE item_id = % s', (item_id,))
data = cursor.fetchone()
if request.method == 'POST':
name = request.form['name']
price = request.form['price']
include = request.form['include']
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('UPDATE items SET name = %s,price = %s,include = %s WHERE item_id = %s', (name,price,include,item_id))
mysql.connection.commit()
msg = "Update Successful"
return render_template("admin_update_item.html",data=data,msg=msg)
return render_template("admin_update_item.html",data=data)
# admin panel.Add item
@app.route("/upload", methods=["POST", "GET"])
def upload():
cur = mysql.connection.cursor()
cur = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
now = datetime.now()
if request.method == 'POST':
files = request.files.getlist('files[]')
# print(files)
for file in files:
if file and allowed_file(file.filename):
name = request.form['name']
price = request.form['price']
includes = request.form['includes']
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
cur.execute("INSERT INTO items (name,price,include,file_name,uploaded_on) VALUES (%s,%s,%s,%s,%s)", [name,price,includes,filename,now])
mysql.connection.commit()
print(file)
cur.close()
return redirect('/admin/manage_items_list')
@app.route('/admin/inbox')
def inbox():
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("select * from inbox")
data = cursor.fetchall()
return render_template("admin_inbox.html",data=data)
@app.route('/admin/inbox/delete/<int:id>')
def inbox_delete(id):
cursor = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DELETE FROM inbox WHERE inbox_id = %s", (id,))
mysql.connection.commit()
return redirect('/admin/inbox')
@app.route('/admin/inbox/reply',methods = ["POST"])
def inbox_reply():
msg = ''
if request.method == "POST":
_email = request.form['email']
_subject = request.form['subject']
_message = request.form['textbox']
message = Message(_subject, sender="kapilabakers123@gmail.com", recipients=[_email])
message.body = _message
mail.send(message)
return redirect('/admin/inbox')
return render_template("admin_inbox.html")
if __name__ == "__main__":
app.run(debug=True) | 37.165179 | 284 | 0.622673 | 3,759 | 33,300 | 5.3195 | 0.07901 | 0.044009 | 0.042008 | 0.054611 | 0.655281 | 0.590418 | 0.537157 | 0.50205 | 0.453341 | 0.40168 | 0 | 0.003341 | 0.253904 | 33,300 | 896 | 285 | 37.165179 | 0.801489 | 0.035676 | 0 | 0.41527 | 1 | 0.014898 | 0.241594 | 0.061576 | 0 | 0 | 0 | 0 | 0 | 1 | 0.070764 | false | 0.089385 | 0.018622 | 0.007449 | 0.206704 | 0.003724 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
e21856412d884864c86c6233f822774b99cf079b | 280 | py | Python | core/component/extractor.py | HyokaChen/DailyNewsSpider | ea70c69fb4cf10130a45e00a148246525571c013 | [
"MIT"
] | 10 | 2020-07-30T14:46:43.000Z | 2021-11-16T12:04:01.000Z | core/component/extractor.py | HyokaChen/DailyNewsSpider | ea70c69fb4cf10130a45e00a148246525571c013 | [
"MIT"
] | null | null | null | core/component/extractor.py | HyokaChen/DailyNewsSpider | ea70c69fb4cf10130a45e00a148246525571c013 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@File : extractor.py
@Time : 2020/4/19 14:55
@Author : Empty Chan
@Contact : chen19941018@gmail.com
@Description: 解析器,解析爬虫模板的内容,并做处理
@License : (C) Copyright 2016-2020, iFuture Corporation Limited.
""" | 25.454545 | 68 | 0.621429 | 35 | 280 | 4.971429 | 0.971429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.127854 | 0.217857 | 280 | 11 | 69 | 25.454545 | 0.666667 | 0.935714 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e21ae5a75e215110f308b014b2ec805a19e97d62 | 232 | py | Python | tdd_wallet/views/get_wallet_balance/request_response_mocks.py | kapeed2091/tdd_practice | df8def8f91e528232d1a86b363523b99f6d2a818 | [
"Apache-2.0"
] | null | null | null | tdd_wallet/views/get_wallet_balance/request_response_mocks.py | kapeed2091/tdd_practice | df8def8f91e528232d1a86b363523b99f6d2a818 | [
"Apache-2.0"
] | null | null | null | tdd_wallet/views/get_wallet_balance/request_response_mocks.py | kapeed2091/tdd_practice | df8def8f91e528232d1a86b363523b99f6d2a818 | [
"Apache-2.0"
] | null | null | null |
REQUEST_BODY_JSON = """
{
"customer_ids": [
"string"
]
}
"""
RESPONSE_200_JSON = """
{
"customers_balance": [
{
"balance": 1.1,
"customer_id": "string"
}
]
}
"""
| 10.086957 | 35 | 0.409483 | 17 | 232 | 5.176471 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036496 | 0.409483 | 232 | 22 | 36 | 10.545455 | 0.605839 | 0 | 0 | 0.117647 | 0 | 0 | 0.755459 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e22cfb005598daffbb8357d2bbb482be04a79192 | 1,496 | py | Python | dump-pkg/src/dumpshmamp/collectors/files.py | sha1n/macos-devenv-dump-poc | be439ad4a0c0ac265fe62d44bded73eab1a0c31d | [
"MIT"
] | null | null | null | dump-pkg/src/dumpshmamp/collectors/files.py | sha1n/macos-devenv-dump-poc | be439ad4a0c0ac265fe62d44bded73eab1a0c31d | [
"MIT"
] | null | null | null | dump-pkg/src/dumpshmamp/collectors/files.py | sha1n/macos-devenv-dump-poc | be439ad4a0c0ac265fe62d44bded73eab1a0c31d | [
"MIT"
] | null | null | null | import os
import time
from shutil import copytree, copyfile
from shminspector.util.error_handling import raised_to_none_wrapper
from shminspector.util.logger import NOOP_LOGGER
def mkdir(path):
os.mkdir(path)
def path_exists(path):
return os.path.exists(path)
def file_path(path, *paths):
return os.path.join(path, *paths)
def copytree_if(source_dir, target_dir, modified_in_the_past_sec):
copytree(
src=source_dir,
dst=target_dir,
ignore=_ignore_files_mtime_gt(modified_in_the_past_sec)
)
def try_copyfile(source_file, target_file, logger=NOOP_LOGGER):
logger.progress("Trying to copy '{}' to '{}'".format(source_file, target_file))
raised_to_none_wrapper(copyfile, logger)(source_file, target_file)
def try_copytree(source_dir, target_dir, logger=NOOP_LOGGER):
logger.progress("Trying to copy '{}' to '{}'".format(source_dir, target_dir))
raised_to_none_wrapper(copytree, logger)(source_dir, target_dir)
def try_copytree_if(source_dir, target_dir, modified_in_the_past_sec, logger=NOOP_LOGGER):
raised_to_none_wrapper(copytree_if, logger)(source_dir, target_dir, modified_in_the_past_sec)
def file_name_from(path):
path_segments = os.path.split(path)
return path_segments[len(path_segments) - 1]
def _ignore_files_mtime_gt(interval_sec):
def ignore(path, names):
return (name for name in names if os.path.getmtime("{}/{}".format(path, name)) < time.time() - interval_sec)
return ignore
| 28.226415 | 116 | 0.752674 | 223 | 1,496 | 4.70852 | 0.242152 | 0.06 | 0.085714 | 0.102857 | 0.339048 | 0.259048 | 0.234286 | 0.234286 | 0.234286 | 0.198095 | 0 | 0.000779 | 0.14238 | 1,496 | 52 | 117 | 28.769231 | 0.817615 | 0 | 0 | 0 | 0 | 0 | 0.039439 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3125 | false | 0 | 0.15625 | 0.09375 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e236236aafb8294a5432d56d42fa0c9adb895ff5 | 314 | py | Python | src/autonlp/tasks.py | goncaloperes/autonlp | 6ce277027ca72ab01664d372ea106a2fd59d7a96 | [
"Apache-2.0"
] | 667 | 2021-03-05T14:00:05.000Z | 2022-03-24T06:43:41.000Z | src/autonlp/tasks.py | goncaloperes/autonlp | 6ce277027ca72ab01664d372ea106a2fd59d7a96 | [
"Apache-2.0"
] | 54 | 2021-03-05T18:31:39.000Z | 2022-01-24T13:17:33.000Z | src/autonlp/tasks.py | goncaloperes/autonlp | 6ce277027ca72ab01664d372ea106a2fd59d7a96 | [
"Apache-2.0"
] | 45 | 2021-03-07T13:02:18.000Z | 2021-12-11T08:09:53.000Z | TASKS = {
"binary_classification": 1,
"multi_class_classification": 2,
"entity_extraction": 4,
"extractive_question_answering": 5,
"summarization": 8,
"single_column_regression": 10,
"speech_recognition": 11,
}
DATASETS_TASKS = ["text-classification", "question-answering-extractive"]
| 26.166667 | 73 | 0.703822 | 31 | 314 | 6.806452 | 0.806452 | 0.161137 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034221 | 0.16242 | 314 | 11 | 74 | 28.545455 | 0.768061 | 0 | 0 | 0 | 0 | 0 | 0.624204 | 0.410828 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e24599cb1e59c5ae1a3f7e6bceabe1a12d5fe1cc | 601 | py | Python | backend/reqlock/models/organisation.py | ChaikaBogdan/reqlock | 6b38f341da4c7ea4102ce598dfe1d8e7957a9f69 | [
"MIT"
] | 1 | 2021-08-14T08:53:36.000Z | 2021-08-14T08:53:36.000Z | backend/reqlock/models/organisation.py | ChaikaBogdan/reqlock | 6b38f341da4c7ea4102ce598dfe1d8e7957a9f69 | [
"MIT"
] | null | null | null | backend/reqlock/models/organisation.py | ChaikaBogdan/reqlock | 6b38f341da4c7ea4102ce598dfe1d8e7957a9f69 | [
"MIT"
] | null | null | null | from django.contrib.contenttypes.fields import GenericRelation
from django.conf import settings
from django.db import models
from .custom_field import CustomField
from .model_mixins import SoftDeleteModel
class Organisation(SoftDeleteModel):
name = models.CharField(max_length=255)
owner = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE)
members = models.ManyToManyField(
settings.AUTH_USER_MODEL, blank=True, related_name='+')
custom_fields = GenericRelation(CustomField)
def __str__(self):
return self.name
| 31.631579 | 63 | 0.74376 | 68 | 601 | 6.367647 | 0.588235 | 0.069284 | 0.073903 | 0.096998 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006135 | 0.186356 | 601 | 18 | 64 | 33.388889 | 0.879346 | 0 | 0 | 0 | 0 | 0 | 0.001664 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.357143 | 0.071429 | 0.857143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
e249890e88cfbab8c769d8c2fcc921cea8f7822f | 3,194 | py | Python | python_magnetrun/HMagnet.py | Trophime/python_magnetrun | 7e8ae95a1059b98d5101e0eb3f307346a4821633 | [
"MIT"
] | null | null | null | python_magnetrun/HMagnet.py | Trophime/python_magnetrun | 7e8ae95a1059b98d5101e0eb3f307346a4821633 | [
"MIT"
] | 8 | 2020-11-12T08:46:50.000Z | 2021-03-15T03:16:04.000Z | python_magnetrun/HMagnet.py | Trophime/python_magnetrun | 7e8ae95a1059b98d5101e0eb3f307346a4821633 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#-*- coding:utf-8 -*-
"""HMagnet Object"""
import json
class HMagnet:
"""
name
cadref
MAGfile(s)
status: Dead/Alive
index
"""
def __init__(self, name: str, cadref: str, MAGfile: list, status: str, index: int):
"""defaut constructor"""
self.name = name
self.cadref = cadref
self.MAGfile = MAGfile
self.status = status
self.index = index
def __repr__(self):
"""
representation of object
"""
return "%s(name=%r, cadref=%r, MAGfile=%r, status=%r, index=%r)" % \
(self.__class__.__name__,
self.name,
self.cadref,
self.MAGfile,
self.status,
self.index
)
def setIndex(self, index):
"""set Index"""
self.index = index
def getIndex(self):
"""get index"""
return self.index
def setCadref(self, cadref):
"""set Cadref"""
self.cadref = cadref
def getCadref(self):
"""get cadref"""
return self.cadref
def setStatus(self, status):
"""set status"""
self.status = status
def getStatus(self):
"""get status"""
return self.status
def setRecords(self, records):
"""set records list (aka txt files)"""
def addRecord(self, record):
"""add a record to records list (aka txt files)"""
def getRecords(self):
"""get records list (aka txt files)"""
def setRapidRecords(self, records):
"""set records list (aka tdms files)"""
def addRapidRecord(self, record):
"""add a rpid record to records list (aka tdms files)"""
def getRapidRecords(self):
"""get rapid records list (aka tdms files)"""
def setGObjects(self, GObjects):
"""set list of GObjects composing Magnets"""
def addGObject(self, GObject):
"""add GObject to list of GObjects composing Magnets"""
def getGObjects(self):
"""get list of GObjects composing Magnets"""
def getGObjects(self, category):
"""
get list of GObjects composing Magnets
from a given category
"""
def getGObjects(self):
"""get list of GObjects composing Magnets"""
def setMAGfile(self, MAGfile):
"""set MAGfile configuration file(s)"""
self.MAGfile = MAGfile
def getMAGfile(self):
"""get MAGfile configuration file(s)"""
return self.MAGfile
# def download(self):
# """Download MAGfile"""
# # need test-request.py
#
# payload = {
# 'email': email_address,
# 'password': password
# }
# session = createSession(url_logging, payload)
#
# url = base_url + "/" + "downloadM.php"
# for f in self.MAGfile:
# params = ( ('ID', self.index), ('NAME', f), )
# download(session, , params, f, save=True, debug=False)
#
def to_json(self):
"""
convert to json
"""
import deserialize
return json.dumps(self, default=deserialize.serialize_instance, sort_keys=True, indent=4)
| 24.75969 | 97 | 0.550094 | 342 | 3,194 | 5.073099 | 0.312866 | 0.032277 | 0.048415 | 0.066282 | 0.239769 | 0.230548 | 0.088184 | 0.088184 | 0.062248 | 0.062248 | 0 | 0.001384 | 0.32154 | 3,194 | 128 | 98 | 24.953125 | 0.799262 | 0.360989 | 0 | 0.212766 | 0 | 0.021277 | 0.03032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.468085 | false | 0 | 0.042553 | 0 | 0.659574 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e24dc77fca13dc31d7fa3bbe8bdd9f35316dce1f | 1,386 | py | Python | avae/types.py | mkuiper/deepmind-research | 1642ae3499c8d1135ec6fe620a68911091dd25ef | [
"Apache-2.0"
] | 2 | 2022-03-14T18:36:23.000Z | 2022-03-14T22:35:20.000Z | avae/types.py | sunjinhao123/deepmind-research | 1642ae3499c8d1135ec6fe620a68911091dd25ef | [
"Apache-2.0"
] | null | null | null | avae/types.py | sunjinhao123/deepmind-research | 1642ae3499c8d1135ec6fe620a68911091dd25ef | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 DeepMind Technologies Limited.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Useful dataclasses types used across the code."""
from typing import Optional
import dataclasses
import jax.numpy as jnp
import numpy as np
@dataclasses.dataclass(frozen=True)
class ELBOOutputs:
elbo: jnp.ndarray
data_fidelity: jnp.ndarray
kl: jnp.ndarray
@dataclasses.dataclass(frozen=True)
class LabelledData:
"""A batch of labelled examples.
Attributes:
data: Array of shape (batch_size, ...).
label: Array of shape (batch_size, ...).
"""
data: np.ndarray
label: Optional[np.ndarray]
@dataclasses.dataclass(frozen=True)
class NormalParams:
"""Parameters of a normal distribution.
Attributes:
mean: Array of shape (batch_size, latent_dim).
variance: Array of shape (batch_size, latent_dim).
"""
mean: jnp.ndarray
variance: jnp.ndarray
| 25.666667 | 74 | 0.743867 | 194 | 1,386 | 5.278351 | 0.541237 | 0.058594 | 0.046875 | 0.066406 | 0.21582 | 0.140625 | 0.058594 | 0 | 0 | 0 | 0 | 0.006932 | 0.167388 | 1,386 | 53 | 75 | 26.150943 | 0.880416 | 0.643579 | 0 | 0.176471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.235294 | 0 | 0.823529 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e25c9b76108c2ff31fa8e40ab53eb09cb8788655 | 399 | py | Python | Learn_Python_the_Hard_Way/ex20.py | XC-Li/old_python_programs | 8f9b10120c94cf06978aec3f47be4517f463ca76 | [
"MIT"
] | null | null | null | Learn_Python_the_Hard_Way/ex20.py | XC-Li/old_python_programs | 8f9b10120c94cf06978aec3f47be4517f463ca76 | [
"MIT"
] | null | null | null | Learn_Python_the_Hard_Way/ex20.py | XC-Li/old_python_programs | 8f9b10120c94cf06978aec3f47be4517f463ca76 | [
"MIT"
] | null | null | null | #ex20
from sys import argv
script, input_file = argv
def print_all(f):
print f.read()
def rewind(f):
f.seek(0)
def print_a_line(line_count, f):
line = f.readline().strip()
print line_count, line, len(line)
current_file = open(input_file)
print_all(current_file)
rewind(current_file)
for i in [1, 2, 3]:
print_a_line(i,current_file)
| 15.346154 | 38 | 0.62406 | 63 | 399 | 3.730159 | 0.47619 | 0.187234 | 0.085106 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020408 | 0.263158 | 399 | 26 | 39 | 15.346154 | 0.778912 | 0.010025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.071429 | null | null | 0.428571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
e25cf80ff9e3da302005072e50ab2a48f982e28b | 212 | py | Python | ATX/ATX/startTest.py | cxMax/Python-practise-sample | 23fa9b2bec785b19555cb2ffef359d03a8765af4 | [
"MIT"
] | null | null | null | ATX/ATX/startTest.py | cxMax/Python-practise-sample | 23fa9b2bec785b19555cb2ffef359d03a8765af4 | [
"MIT"
] | null | null | null | ATX/ATX/startTest.py | cxMax/Python-practise-sample | 23fa9b2bec785b19555cb2ffef359d03a8765af4 | [
"MIT"
] | null | null | null | import time
from GameCenterTestMethod import *
package_name = 'com.meizu.flyme.gamecenter'
driver = initializeAtx(package_name)
navigateToMainActivity(driver)
time.sleep(2000)
navigateToWelfareFragment(driver)
| 21.2 | 43 | 0.839623 | 22 | 212 | 8 | 0.727273 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020513 | 0.080189 | 212 | 9 | 44 | 23.555556 | 0.882051 | 0 | 0 | 0 | 0 | 0 | 0.122642 | 0.122642 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e2a378ce029843a17862b6dc41fe904b57f1f99a | 528 | py | Python | tests_integration/helpers.py | daltonmatos/BarterDude | 9f7eb049711d688d61061036e886c33d855e563a | [
"Apache-2.0"
] | 12 | 2020-02-14T20:30:38.000Z | 2022-03-08T17:53:55.000Z | tests_integration/helpers.py | daltonmatos/BarterDude | 9f7eb049711d688d61061036e886c33d855e563a | [
"Apache-2.0"
] | 11 | 2020-02-29T15:06:25.000Z | 2021-05-03T15:23:12.000Z | tests_integration/helpers.py | daltonmatos/BarterDude | 9f7eb049711d688d61061036e886c33d855e563a | [
"Apache-2.0"
] | 3 | 2020-02-28T20:43:11.000Z | 2022-02-07T21:56:34.000Z | from barterdude.hooks import BaseHook
from asyncworker.rabbitmq.message import RabbitMQMessage
class ErrorHook(BaseHook):
async def on_success(self, message: RabbitMQMessage):
raise NotImplementedError
async def on_fail(self, message: RabbitMQMessage, error: Exception):
raise NotImplementedError
async def before_consume(self, message: RabbitMQMessage):
raise NotImplementedError
async def on_connection_fail(self, error: Exception, retries: int):
raise NotImplementedError
| 31.058824 | 72 | 0.763258 | 55 | 528 | 7.236364 | 0.472727 | 0.080402 | 0.075377 | 0.241206 | 0.301508 | 0.301508 | 0.301508 | 0.301508 | 0 | 0 | 0 | 0 | 0.179924 | 528 | 16 | 73 | 33 | 0.919169 | 0 | 0 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.181818 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2c3d4e471a29f3845df84547b5d44c07f04130e7 | 5,470 | py | Python | tests/test_env_loader.py | alexbigkid/ingredients_for_cooking | 38fda905a213544764044923fdfac77c6cc17dba | [
"MIT"
] | null | null | null | tests/test_env_loader.py | alexbigkid/ingredients_for_cooking | 38fda905a213544764044923fdfac77c6cc17dba | [
"MIT"
] | null | null | null | tests/test_env_loader.py | alexbigkid/ingredients_for_cooking | 38fda905a213544764044923fdfac77c6cc17dba | [
"MIT"
] | null | null | null | """Unit tests for env_loader.py"""
# Standard library imports
import os
# from unittest import TestCase, mock
import unittest
from unittest.mock import patch, mock_open
# Local application imports
from context import EnvLoader
class TestEnvLoader(unittest.TestCase):
def setUp(self):
self.__env_loader = EnvLoader()
# -------------------------------------------------------------------------
# Tests for get_environment_variable_value_
# -------------------------------------------------------------------------
@patch.dict(os.environ, {'ABK_TEST_ENV_VAR': '[fake_api_key]'})
def test_get_environment_variable_value_returns_valid_value(self):
"""
get_environment_variable_value returns a value from the set environment variable
"""
actual_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
self.assertEqual(actual_value, '[fake_api_key]')
@patch.dict(os.environ, {'ABK_TEST_ENV_VAR': ''})
def test_get_environment_variable_value_should_return_empty_given_env_var_value_is_empty(self):
"""
get_environment_variable_value returns empty string
given environment variable is set to empty string
"""
actual_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
self.assertEqual(actual_value, '')
def test_get_environment_variable_value_should_return_empty_given_env_var_undefined(self):
"""
get_environment_variable_value returns empty string
given environment variable is not set
"""
actual_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
self.assertEqual(actual_value, '')
# -------------------------------------------------------------------------
# Tests for set_environment_varaibales_from_file
# -------------------------------------------------------------------------
def test_set_environment_varaibales_from_file_sets_one_env_variable(self):
"""
set_environment_varaibales_from_file sets environment variable read from file
"""
env_var_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
self.assertEqual(env_var_value, '')
with patch("builtins.open", mock_open(read_data='ABK_TEST_ENV_VAR=[fake_api_key]')) as mock_file:
self.__env_loader.set_environment_varaibales_from_file('does_not_matter')
env_var_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
self.assertEqual(env_var_value, '[fake_api_key]')
mock_file.assert_called_with('does_not_matter', 'r')
def test_set_environment_varaibales_from_file_sets_several_env_variables(self):
"""
set_environment_varaibales_from_file sets several environment variables
after reading from file
"""
env_var_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR1')
self.assertEqual(env_var_value, '')
env_var_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR2')
self.assertEqual(env_var_value, '')
data_to_read = 'ABK_TEST_ENV_VAR1=[que_chimba]\nABK_TEST_ENV_VAR2=[no_dar_papaya]'
with patch("builtins.open", mock_open(read_data=data_to_read)) as mock_file:
self.__env_loader.set_environment_varaibales_from_file('does_not_matter')
env_var_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR1')
self.assertEqual(env_var_value, '[que_chimba]')
env_var_value = self.__env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR2')
self.assertEqual(env_var_value, '[no_dar_papaya]')
mock_file.assert_called_with('does_not_matter', 'r')
# 1. test cases is missing / not handled if the mal formatted: 'VAR_NAME = VAR_VALUE' not handles spaces
# def test_set_environment_varaibales_from_file_sets_env_variable_given_line_is_with_white_spaces(self):
# env_var_value = self.env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
# self.assertEqual(env_var_value, '')
# with patch("builtins.open", mock_open(read_data='ABK_TEST_ENV_VAR = [it_is_very_late]')) as mock_file:
# self.env_loader.set_environment_varaibales_from_file('does_not_matter')
# env_var_value = self.env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
# self.assertEqual(env_var_value, '[it_is_very_late]')
# mock_file.assert_called_with('does_not_matter', 'r')
# 2. test cases is missing / not handled if the '=' is missing
# def test_set_environment_varaibales_from_file_does_not_set_env_variable_given_no_value_in_file(self):
# env_var_value = self.env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
# self.assertEqual(env_var_value, '')
# with patch("builtins.open", mock_open(read_data='ABK_TEST_ENV_VAR')) as mock_file:
# self.env_loader.set_environment_varaibales_from_file('does_not_matter')
# env_var_value = self.env_loader.get_environment_variable_value('ABK_TEST_ENV_VAR')
# self.assertEqual(env_var_value, '')
# mock_file.assert_called_with('does_not_matter', 'r')
# 3. test case - file does not exist
if __name__ == '__main__':
unittest.main()
| 45.583333 | 112 | 0.686837 | 697 | 5,470 | 4.842181 | 0.144907 | 0.064 | 0.068444 | 0.16 | 0.77363 | 0.762074 | 0.740741 | 0.693333 | 0.606222 | 0.561185 | 0 | 0.002002 | 0.178245 | 5,470 | 119 | 113 | 45.966387 | 0.748832 | 0.430165 | 0 | 0.439024 | 0 | 0 | 0.15139 | 0.032956 | 0 | 0 | 0 | 0 | 0.268293 | 1 | 0.146341 | false | 0 | 0.097561 | 0 | 0.268293 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2c4c15f3c64f8308c7eef034f7a3d169c6ea6ba2 | 304 | py | Python | Element/Main.py | Chinacolt/My-Freaky-Things | 366ee1e77237fdc4fe1eb70e4c8ea7fb68fba65b | [
"MIT"
] | null | null | null | Element/Main.py | Chinacolt/My-Freaky-Things | 366ee1e77237fdc4fe1eb70e4c8ea7fb68fba65b | [
"MIT"
] | null | null | null | Element/Main.py | Chinacolt/My-Freaky-Things | 366ee1e77237fdc4fe1eb70e4c8ea7fb68fba65b | [
"MIT"
] | null | null | null |
import copy
from Element import Element
class div(Element): pass
class img(Element): pass
class a(Element): pass
kkk = div()._class("box box-solid")
kkk < a()._href("emre.cintay.com") < div()._class("image thumbnail soft") < img()._src("emre.cintay.com/images/profile.png")
print(kkk.render())
| 16 | 124 | 0.694079 | 46 | 304 | 4.5 | 0.543478 | 0.15942 | 0.154589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128289 | 304 | 18 | 125 | 16.888889 | 0.781132 | 0 | 0 | 0 | 0 | 0 | 0.273333 | 0.113333 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.375 | 0.25 | 0 | 0.625 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
2c54f15269357fb0d513e56c93fb610e235e1da6 | 850 | py | Python | app/models.py | jimmybutton/newsblog | 17c899d86a8b7a838acf4227bf8e77b809bee375 | [
"MIT"
] | 1 | 2021-10-17T23:38:16.000Z | 2021-10-17T23:38:16.000Z | app/models.py | jimmybutton/newsblog | 17c899d86a8b7a838acf4227bf8e77b809bee375 | [
"MIT"
] | null | null | null | app/models.py | jimmybutton/newsblog | 17c899d86a8b7a838acf4227bf8e77b809bee375 | [
"MIT"
] | null | null | null | from app import db
from datetime import datetime
class Article(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(128))
content = db.Column(db.String(2048))
category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
image = db.Column(db.String(128)) # filename of feature image
created = db.Column(db.DateTime, index=True, default=datetime.utcnow)
updated = db.Column(db.DateTime, index=True, default=datetime.utcnow)
def __repr__(self):
return '<Article {}>'.format(self.title)
class Category(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True, unique=True)
articles = db.relationship('Article', backref='category', lazy='dynamic')
def __repr__(self):
return '<Category {}>'.format(self.name) | 38.636364 | 77 | 0.690588 | 118 | 850 | 4.881356 | 0.364407 | 0.125 | 0.15625 | 0.111111 | 0.404514 | 0.305556 | 0.305556 | 0.305556 | 0.305556 | 0.138889 | 0 | 0.016807 | 0.16 | 850 | 22 | 78 | 38.636364 | 0.789916 | 0.029412 | 0 | 0.222222 | 0 | 0 | 0.070388 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0.111111 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.