hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0347ce2c699c5e1ec571bccc902e6b648616eb2c
| 53
|
py
|
Python
|
project/test/Backend/pages/api/__init__.py
|
fael07/DRF-Project
|
f65b4177e56e7209d2369ba9d6d81bfe00321052
|
[
"MIT"
] | null | null | null |
project/test/Backend/pages/api/__init__.py
|
fael07/DRF-Project
|
f65b4177e56e7209d2369ba9d6d81bfe00321052
|
[
"MIT"
] | null | null | null |
project/test/Backend/pages/api/__init__.py
|
fael07/DRF-Project
|
f65b4177e56e7209d2369ba9d6d81bfe00321052
|
[
"MIT"
] | null | null | null |
from ...class_models.list import TestModelDataListApi
| 53
| 53
| 0.867925
| 6
| 53
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 53
| 1
| 53
| 53
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0356e139e2b81cb6158b675d2913aeabc45c3748
| 131
|
py
|
Python
|
week6/function_list.py
|
melphick/pybasics
|
68508d10b7509943b629b3c627252de60b6a5744
|
[
"Apache-2.0"
] | null | null | null |
week6/function_list.py
|
melphick/pybasics
|
68508d10b7509943b629b3c627252de60b6a5744
|
[
"Apache-2.0"
] | null | null | null |
week6/function_list.py
|
melphick/pybasics
|
68508d10b7509943b629b3c627252de60b6a5744
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
def f1(a_list):
a_list = []
a_list.append("bar")
print a_list
a = range(10)
print a
f1(a)
print a
| 10.076923
| 24
| 0.59542
| 25
| 131
| 2.96
| 0.48
| 0.27027
| 0.243243
| 0.27027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040404
| 0.244275
| 131
| 12
| 25
| 10.916667
| 0.707071
| 0.122137
| 0
| 0.25
| 0
| 0
| 0.026316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.375
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ceeaa77147b15728a1249aab97187c113b4c9d37
| 59
|
py
|
Python
|
node_manager_fkie/src/node_manager_fkie/editor/__init__.py
|
ahoarau/multimaster_fkie
|
82bf341423bd3c2a15005c85eca9de5747cb8069
|
[
"BSD-3-Clause"
] | 1
|
2020-03-10T06:32:51.000Z
|
2020-03-10T06:32:51.000Z
|
node_manager_fkie/src/node_manager_fkie/editor/__init__.py
|
ahoarau/multimaster_fkie
|
82bf341423bd3c2a15005c85eca9de5747cb8069
|
[
"BSD-3-Clause"
] | 1
|
2018-04-20T13:03:34.000Z
|
2018-04-20T13:03:34.000Z
|
node_manager_fkie/src/node_manager_fkie/editor/__init__.py
|
ahoarau/multimaster_fkie
|
82bf341423bd3c2a15005c85eca9de5747cb8069
|
[
"BSD-3-Clause"
] | 1
|
2018-11-07T03:37:23.000Z
|
2018-11-07T03:37:23.000Z
|
from .editor import Editor
from .text_edit import TextEdit
| 19.666667
| 31
| 0.830508
| 9
| 59
| 5.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 2
| 32
| 29.5
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3018be304e4c88570da6295a8e7a6a579831ac98
| 325
|
py
|
Python
|
myexceptions.py
|
carloshernangarrido/whatsapp_sender
|
1489bc6cf12e1557e6e85a5ed2f15e4ba3b86a19
|
[
"MIT"
] | null | null | null |
myexceptions.py
|
carloshernangarrido/whatsapp_sender
|
1489bc6cf12e1557e6e85a5ed2f15e4ba3b86a19
|
[
"MIT"
] | null | null | null |
myexceptions.py
|
carloshernangarrido/whatsapp_sender
|
1489bc6cf12e1557e6e85a5ed2f15e4ba3b86a19
|
[
"MIT"
] | null | null | null |
### Exceptions ###
E1 = "Hay un problema con este número"
E2 = """
****************************************************************************
*** No encontré el archivo. Recordá que debe estar dentro de mi carpeta. ***
****************************************************************************
"""
| 40.625
| 81
| 0.292308
| 21
| 325
| 4.52381
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00738
| 0.166154
| 325
| 8
| 82
| 40.625
| 0.343173
| 0.030769
| 0
| 0.333333
| 0
| 0
| 0.92691
| 0.504983
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3019b61980e68a75e379d6f84dc8be166146691b
| 141
|
py
|
Python
|
1. Dive into Python/1.21. Exercise Grading in CENG314.py
|
ahmetutkuozkan/my_ceng240_exercises_solutions
|
167bb9938515870ec1f01853933edc3b55937bff
|
[
"MIT"
] | null | null | null |
1. Dive into Python/1.21. Exercise Grading in CENG314.py
|
ahmetutkuozkan/my_ceng240_exercises_solutions
|
167bb9938515870ec1f01853933edc3b55937bff
|
[
"MIT"
] | null | null | null |
1. Dive into Python/1.21. Exercise Grading in CENG314.py
|
ahmetutkuozkan/my_ceng240_exercises_solutions
|
167bb9938515870ec1f01853933edc3b55937bff
|
[
"MIT"
] | null | null | null |
g1 = int(input()); g2 = int(input()); g3 = int(input()); g4 = int(input()); g5 = int(input());
print((g1+g2+g3+g4+g5-min(g1,g2,g3,g4,g5))/8)
| 70.5
| 95
| 0.560284
| 28
| 141
| 2.821429
| 0.357143
| 0.506329
| 0.151899
| 0.202532
| 0.253165
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128
| 0.113475
| 141
| 2
| 96
| 70.5
| 0.504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
3045f8cef687c8b569284017ff047114def1211a
| 234
|
py
|
Python
|
api/invitations/exceptions.py
|
liobrdev/simplekanban
|
ececbe15cd34aa53e7d37564879a8c14827e0ebb
|
[
"MIT"
] | null | null | null |
api/invitations/exceptions.py
|
liobrdev/simplekanban
|
ececbe15cd34aa53e7d37564879a8c14827e0ebb
|
[
"MIT"
] | null | null | null |
api/invitations/exceptions.py
|
liobrdev/simplekanban
|
ececbe15cd34aa53e7d37564879a8c14827e0ebb
|
[
"MIT"
] | null | null | null |
from django.db import IntegrityError
class InvitedAlreadyMember(IntegrityError):
def __init__(self, email):
self.email = email
def __str__(self):
return f'<{self.email}> is already a member of this project.'
| 26
| 69
| 0.700855
| 29
| 234
| 5.37931
| 0.724138
| 0.173077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209402
| 234
| 9
| 69
| 26
| 0.843243
| 0
| 0
| 0
| 0
| 0
| 0.217021
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
305bac82d1c20fb14ff96c24e4759b9c235a7aaa
| 42
|
py
|
Python
|
watersnake/__init__.py
|
bjhockley/watersnake
|
a06d2eb8dec0207ee329dc6ab574ba1a347e9522
|
[
"MIT"
] | null | null | null |
watersnake/__init__.py
|
bjhockley/watersnake
|
a06d2eb8dec0207ee329dc6ab574ba1a347e9522
|
[
"MIT"
] | null | null | null |
watersnake/__init__.py
|
bjhockley/watersnake
|
a06d2eb8dec0207ee329dc6ab574ba1a347e9522
|
[
"MIT"
] | null | null | null |
"""module __init__ file for watersnake"""
| 21
| 41
| 0.738095
| 5
| 42
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 1
| 42
| 42
| 0.72973
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
307847be6d58b8759f76299b89b86382c867cbb0
| 265
|
py
|
Python
|
learn_uwsgi/dev.py
|
Carsten-Leue/learn-uwsgi
|
0363b81da20d4faed29a80126cc5c9a1e2035cfc
|
[
"MIT"
] | null | null | null |
learn_uwsgi/dev.py
|
Carsten-Leue/learn-uwsgi
|
0363b81da20d4faed29a80126cc5c9a1e2035cfc
|
[
"MIT"
] | null | null | null |
learn_uwsgi/dev.py
|
Carsten-Leue/learn-uwsgi
|
0363b81da20d4faed29a80126cc5c9a1e2035cfc
|
[
"MIT"
] | null | null | null |
from learn_uwsgi.iplink.mock import createIpLinkMock
from learn_uwsgi.iplink.shell import createIpLinkShell
from logging import Logger
from learn_uwsgi.routes import create_routes
def create_dev_routes(logger: Logger):
create_routes(createIpLinkMock(logger))
| 29.444444
| 54
| 0.85283
| 35
| 265
| 6.257143
| 0.428571
| 0.123288
| 0.191781
| 0.182648
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098113
| 265
| 8
| 55
| 33.125
| 0.916318
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.666667
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
061f99144deb6063d2449993e477f2e34c061f5f
| 17,905
|
py
|
Python
|
lib/datasets/cityscapes.py
|
scenarios/VAE-2
|
3dfbaeea9fa29f88805e728cd26ce35cb5586b1c
|
[
"MIT"
] | 5
|
2021-01-28T15:43:06.000Z
|
2021-12-31T02:55:11.000Z
|
lib/datasets/cityscapes.py
|
scenarios/VAE-2
|
3dfbaeea9fa29f88805e728cd26ce35cb5586b1c
|
[
"MIT"
] | null | null | null |
lib/datasets/cityscapes.py
|
scenarios/VAE-2
|
3dfbaeea9fa29f88805e728cd26ce35cb5586b1c
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Ke Sun (sunk@mail.ustc.edu.cn)
# ------------------------------------------------------------------------------
import os
import logging
import cv2
import numpy as np
from PIL import Image
import torch
from torch.nn import functional as F
from .base_dataset import BaseDataset
from zipfile import ZipFile
class Cityscapes(BaseDataset):
def __init__(self,
root,
list_path,
num_samples=None,
num_classes=19,
multi_scale=True,
flip=True,
ignore_label=-1,
base_size=2048,
crop_size=(512, 1024),
center_crop_test=False,
downsample_rate=1,
scale_factor=16,
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]):
super(Cityscapes, self).__init__(ignore_label, base_size,
crop_size, downsample_rate, scale_factor, mean, std,)
self.root = root
self.list_path = list_path
self.num_classes = num_classes
self.class_weights = torch.FloatTensor([0.8373, 0.918, 0.866, 1.0345,
1.0166, 0.9969, 0.9754, 1.0489,
0.8786, 1.0023, 0.9539, 0.9843,
1.1116, 0.9037, 1.0865, 1.0955,
1.0865, 1.1529, 1.0507]).cuda()
self.multi_scale = multi_scale
self.flip = flip
self.center_crop_test = center_crop_test
self.img_list = [line.strip().split() for line in open(root+list_path)]
self.files = self.read_files()
if num_samples:
self.files = self.files[:num_samples]
self.label_mapping = {-1: ignore_label, 0: ignore_label,
1: ignore_label, 2: ignore_label,
3: ignore_label, 4: ignore_label,
5: ignore_label, 6: ignore_label,
7: 0, 8: 1, 9: ignore_label,
10: ignore_label, 11: 2, 12: 3,
13: 4, 14: ignore_label, 15: ignore_label,
16: ignore_label, 17: 5, 18: ignore_label,
19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11,
25: 12, 26: 13, 27: 14, 28: 15,
29: ignore_label, 30: ignore_label,
31: 16, 32: 17, 33: 18}
def read_files(self):
files = []
if 'test' in self.list_path:
for item in self.img_list:
image_path = item
name = os.path.splitext(os.path.basename(image_path[0]))[0]
files.append({
"img": image_path[0],
"name": name,
})
else:
for item in self.img_list:
image_path, label_path = item
name = os.path.splitext(os.path.basename(label_path))[0]
files.append({
"img": image_path,
"label": label_path,
"name": name,
"weight": 1
})
return files
def convert_label(self, label, inverse=False):
temp = label.copy()
if inverse:
for v, k in self.label_mapping.items():
label[temp == k] = v
else:
for k, v in self.label_mapping.items():
label[temp == k] = v
return label
def __getitem__(self, index):
item = self.files[index]
name = item["name"]
image = cv2.imread(os.path.join(self.root,'cityscapes',item["img"]),
cv2.IMREAD_COLOR)
size = image.shape
if 'test' in self.list_path:
image = self.input_transform(image)
image = image.transpose((2, 0, 1))
return image.copy(), np.array(size), name
label = cv2.imread(os.path.join(self.root,'cityscapes',item["label"]),
cv2.IMREAD_GRAYSCALE)
label = self.convert_label(label)
image, label = self.gen_sample(image, label,
self.multi_scale, self.flip,
self.center_crop_test)
return image.copy(), label.copy(), np.array(size), name
def multi_scale_inference(self, model, image, scales=[1], flip=False):
batch, _, ori_height, ori_width = image.size()
assert batch == 1, "only supporting batchsize 1."
image = image.numpy()[0].transpose((1,2,0)).copy()
stride_h = np.int(self.crop_size[0] * 1.0)
stride_w = np.int(self.crop_size[1] * 1.0)
final_pred = torch.zeros([1, self.num_classes,
ori_height,ori_width]).cuda()
for scale in scales:
new_img = self.multi_scale_aug(image=image,
rand_scale=scale,
rand_crop=False)
height, width = new_img.shape[:-1]
if scale <= 1.0:
new_img = new_img.transpose((2, 0, 1))
new_img = np.expand_dims(new_img, axis=0)
new_img = torch.from_numpy(new_img)
preds = self.inference(model, new_img, flip)
preds = preds[:, :, 0:height, 0:width]
else:
new_h, new_w = new_img.shape[:-1]
rows = np.int(np.ceil(1.0 * (new_h -
self.crop_size[0]) / stride_h)) + 1
cols = np.int(np.ceil(1.0 * (new_w -
self.crop_size[1]) / stride_w)) + 1
preds = torch.zeros([1, self.num_classes,
new_h,new_w]).cuda()
count = torch.zeros([1,1, new_h, new_w]).cuda()
for r in range(rows):
for c in range(cols):
h0 = r * stride_h
w0 = c * stride_w
h1 = min(h0 + self.crop_size[0], new_h)
w1 = min(w0 + self.crop_size[1], new_w)
h0 = max(int(h1 - self.crop_size[0]), 0)
w0 = max(int(w1 - self.crop_size[1]), 0)
crop_img = new_img[h0:h1, w0:w1, :]
crop_img = crop_img.transpose((2, 0, 1))
crop_img = np.expand_dims(crop_img, axis=0)
crop_img = torch.from_numpy(crop_img)
pred = self.inference(model, crop_img, flip)
preds[:,:,h0:h1,w0:w1] += pred[:,:, 0:h1-h0, 0:w1-w0]
count[:,:,h0:h1,w0:w1] += 1
preds = preds / count
preds = preds[:,:,:height,:width]
preds = F.upsample(preds, (ori_height, ori_width),
mode='bilinear')
final_pred += preds
return final_pred
def get_palette(self, n):
palette = [0] * (n * 3)
for j in range(0, n):
lab = j
palette[j * 3 + 0] = 0
palette[j * 3 + 1] = 0
palette[j * 3 + 2] = 0
i = 0
while lab:
palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i))
palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i))
palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i))
i += 1
lab >>= 3
return palette
def save_pred(self, preds, sv_path, name):
palette = self.get_palette(256)
preds = preds.cpu().numpy().copy()
preds = np.asarray(np.argmax(preds, axis=1), dtype=np.uint8)
for i in range(preds.shape[0]):
pred = self.convert_label(preds[i], inverse=True)
save_img = Image.fromarray(pred)
save_img.putpalette(palette)
save_img.save(os.path.join(sv_path, name[i]+'.png'))
class CityscapesSequence(BaseDataset):
def __init__(self,
root,
list_path,
num_samples=None,
num_classes=19,
multi_scale=True,
flip=True,
ignore_label=-1,
base_size=2048,
crop_size=(512, 1024),
center_crop_test=False,
downsample_rate=1,
scale_factor=16,
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225],
clip_length = 3,
clip_num = 3,
random_pos = True,
image_tmpl = '{:06d}_leftImg8bit.png',
fixed_length = None,
is_baseline = None):
super(CityscapesSequence, self).__init__(ignore_label, base_size,
crop_size, downsample_rate, scale_factor, mean, std, )
self.root = root
self.list_path = list_path
self.num_classes = num_classes
self.class_weights = torch.FloatTensor([0.8373, 0.918, 0.866, 1.0345,
1.0166, 0.9969, 0.9754, 1.0489,
0.8786, 1.0023, 0.9539, 0.9843,
1.1116, 0.9037, 1.0865, 1.0955,
1.0865, 1.1529, 1.0507]).cuda()
self.clip_length = clip_length
self.clip_num = clip_num
self.multi_scale = multi_scale
self.flip = flip
self.center_crop_test = center_crop_test
self.random_pos = random_pos
self.image_tmpl = image_tmpl
self.sequence_list = [line.strip() for line in open(list_path)]
self.files = self.read_files()
if num_samples:
self.files = self.files[:num_samples]
self.label_mapping = {-1: ignore_label, 0: ignore_label,
1: ignore_label, 2: ignore_label,
3: ignore_label, 4: ignore_label,
5: ignore_label, 6: ignore_label,
7: 0, 8: 1, 9: ignore_label,
10: ignore_label, 11: 2, 12: 3,
13: 4, 14: ignore_label, 15: ignore_label,
16: ignore_label, 17: 5, 18: ignore_label,
19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11,
25: 12, 26: 13, 27: 14, 28: 15,
29: ignore_label, 30: ignore_label,
31: 16, 32: 17, 33: 18}
def read_files(self):
files = []
for item in self.sequence_list:
sequence_path = item
name = os.path.splitext(os.path.basename(sequence_path))[0]
files.append({
"seq": sequence_path,
"name": name,
})
return files
def convert_label(self, label, inverse=False):
temp = label.copy()
if inverse:
for v, k in self.label_mapping.items():
label[temp == k] = v
else:
for k, v in self.label_mapping.items():
label[temp == k] = v
return label
def _load_image(self, idx, zip_f):
try:
im = Image.open(zip_f.open(self.image_tmpl.format(idx))).convert('RGB')
except Exception as e:
new_idx = idx - 1 if idx > 0 else idx + 1
logging.error('Failed to open {}, open {} instead'.format(self.image_tmpl.format(idx), self.image_tmpl.format(new_idx)))
im = Image.open(zip_f.open(self.image_tmpl.format(new_idx))).convert('RGB')
return im
def get(self, path):
images = list()
with ZipFile(os.path.join(self.root, path), mode='r') as zip_f:
sample_pos = np.random.randint(0, max(1, 30 - self.clip_length * self.clip_num + 1)) if self.random_pos \
else max(0, 30 - self.clip_length * self.clip_num - 1)
for p in range(sample_pos, sample_pos + self.clip_length * self.clip_num):
seg_imgs = np.asarray(self._load_image(p, zip_f).resize((self.crop_size[1], self.crop_size[0])), dtype=np.float32)
images.append(seg_imgs)
return images
def input_transform(self, sequence):
sequence = np.concatenate(sequence, axis=-1)
sequence = sequence / 255.0
sequence -= self.mean * self.clip_length * self.clip_num
sequence /= self.std * self.clip_length * self.clip_num
return sequence
def __getitem__(self, index):
item = self.files[index]
name = item["name"]
sequence = self.get(item['seq'])
sequence = np.transpose(self.input_transform(sequence), (2, 0, 1))
sequences = [sequence[i * (self.clip_length * 3) : (i+1) * (self.clip_length * 3)].copy() for i in range(0, self.clip_num)]
return sequences, name
def multi_scale_inference(self, model, image, scales=[1], flip=False):
batch, _, ori_height, ori_width = image.size()
assert batch == 1, "only supporting batchsize 1."
image = image.numpy()[0].transpose((1, 2, 0)).copy()
stride_h = np.int(self.crop_size[0] * 1.0)
stride_w = np.int(self.crop_size[1] * 1.0)
final_pred = torch.zeros([1, self.num_classes,
ori_height, ori_width]).cuda()
for scale in scales:
new_img = self.multi_scale_aug(image=image,
rand_scale=scale,
rand_crop=False)
height, width = new_img.shape[:-1]
if scale <= 1.0:
new_img = new_img.transpose((2, 0, 1))
new_img = np.expand_dims(new_img, axis=0)
new_img = torch.from_numpy(new_img)
preds = self.inference(model, new_img, flip)
preds = preds[:, :, 0:height, 0:width]
else:
new_h, new_w = new_img.shape[:-1]
rows = np.int(np.ceil(1.0 * (new_h -
self.crop_size[0]) / stride_h)) + 1
cols = np.int(np.ceil(1.0 * (new_w -
self.crop_size[1]) / stride_w)) + 1
preds = torch.zeros([1, self.num_classes,
new_h, new_w]).cuda()
count = torch.zeros([1, 1, new_h, new_w]).cuda()
for r in range(rows):
for c in range(cols):
h0 = r * stride_h
w0 = c * stride_w
h1 = min(h0 + self.crop_size[0], new_h)
w1 = min(w0 + self.crop_size[1], new_w)
h0 = max(int(h1 - self.crop_size[0]), 0)
w0 = max(int(w1 - self.crop_size[1]), 0)
crop_img = new_img[h0:h1, w0:w1, :]
crop_img = crop_img.transpose((2, 0, 1))
crop_img = np.expand_dims(crop_img, axis=0)
crop_img = torch.from_numpy(crop_img)
pred = self.inference(model, crop_img, flip)
preds[:, :, h0:h1, w0:w1] += pred[:, :, 0:h1 - h0, 0:w1 - w0]
count[:, :, h0:h1, w0:w1] += 1
preds = preds / count
preds = preds[:, :, :height, :width]
preds = F.upsample(preds, (ori_height, ori_width),
mode='bilinear')
final_pred += preds
return final_pred
def get_palette(self, n):
palette = [0] * (n * 3)
for j in range(0, n):
lab = j
palette[j * 3 + 0] = 0
palette[j * 3 + 1] = 0
palette[j * 3 + 2] = 0
i = 0
while lab:
palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i))
palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i))
palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i))
i += 1
lab >>= 3
return palette
def save_pred(self, preds, sv_path, name):
palette = self.get_palette(256)
preds = preds.cpu().numpy().copy()
preds = np.asarray(np.argmax(preds, axis=1), dtype=np.uint8)
for i in range(preds.shape[0]):
pred = self.convert_label(preds[i], inverse=True)
save_img = Image.fromarray(pred)
save_img.putpalette(palette)
save_img.save(os.path.join(sv_path, name[i] + '.png'))
if __name__ == '__main__':
import sys
sys.path.insert(0, "/home/yzzhou/workspace/code/video-prediction/lib/datasets/")
from base_dataset import BaseDataset
train_dataset = CityscapesSequence(
root='/data/yizhou/cityscape/leftImg8bit_sequence_resized_zip/',
list_path='/data/yizhou/cityscape/test_list.text',
num_samples=None,
num_classes=3,
multi_scale=False,
flip=False,
base_size=512,
crop_size=(256, 512))
trainloader = torch.utils.data.DataLoader(
train_dataset,
batch_size=1,
shuffle=False,
num_workers=1,
pin_memory=True,
drop_last=True)
for i_iter, s in enumerate(trainloader):
st, s2t, s3t, name = s
im = Image.fromarray(np.transpose(np.uint8(st[0][0:3]), (1, 2, 0)))
im.save('test.png')
| 41.16092
| 132
| 0.473108
| 2,167
| 17,905
| 3.732349
| 0.136133
| 0.048961
| 0.026706
| 0.014466
| 0.753833
| 0.728116
| 0.70413
| 0.70413
| 0.690529
| 0.665554
| 0
| 0.068303
| 0.401452
| 17,905
| 434
| 133
| 41.25576
| 0.686386
| 0.014242
| 0
| 0.728
| 0
| 0
| 0.022048
| 0.009806
| 0
| 0
| 0
| 0
| 0.005333
| 1
| 0.045333
| false
| 0
| 0.029333
| 0
| 0.117333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0651363901f8a71dd2c42c685e0a44470afbd9cd
| 103
|
py
|
Python
|
search-insert-position/Solution.6587756.py
|
rahul-ramadas/leetcode
|
6c84c2333a613729361c5cdb63dc3fc80203b340
|
[
"MIT"
] | null | null | null |
search-insert-position/Solution.6587756.py
|
rahul-ramadas/leetcode
|
6c84c2333a613729361c5cdb63dc3fc80203b340
|
[
"MIT"
] | 1
|
2016-09-11T22:26:17.000Z
|
2016-09-13T01:49:48.000Z
|
search-insert-position/Solution.6587756.py
|
rahul-ramadas/leetcode
|
6c84c2333a613729361c5cdb63dc3fc80203b340
|
[
"MIT"
] | null | null | null |
class Solution:
def searchInsert(self, A, target):
return bisect.bisect_left(A, target)
| 25.75
| 45
| 0.669903
| 13
| 103
| 5.230769
| 0.769231
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.23301
| 103
| 3
| 46
| 34.333333
| 0.860759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
06737af4719e3bc932b559060aaa9f99f62c96d2
| 132
|
py
|
Python
|
pinry/pins/admin.py
|
Jenso/ProjectY
|
267a349ce45d3399ecac71d81b09db4d6943a329
|
[
"BSD-2-Clause",
"Unlicense"
] | null | null | null |
pinry/pins/admin.py
|
Jenso/ProjectY
|
267a349ce45d3399ecac71d81b09db4d6943a329
|
[
"BSD-2-Clause",
"Unlicense"
] | null | null | null |
pinry/pins/admin.py
|
Jenso/ProjectY
|
267a349ce45d3399ecac71d81b09db4d6943a329
|
[
"BSD-2-Clause",
"Unlicense"
] | null | null | null |
from django.contrib import admin
from pinry.pins.models import Category, Pin
admin.site.register(Category)
admin.site.register(Pin)
| 26.4
| 43
| 0.825758
| 20
| 132
| 5.45
| 0.6
| 0.165138
| 0.311927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 132
| 4
| 44
| 33
| 0.900826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ebf46c046d0a6084a9884b8af2a208157180556b
| 91
|
py
|
Python
|
comments/admin.py
|
etovrodeya/hotel_project2
|
0f81856b48baeae1b63f994f796cedd68ad10b03
|
[
"MIT"
] | 1
|
2020-07-29T20:16:17.000Z
|
2020-07-29T20:16:17.000Z
|
comments/admin.py
|
etovrodeya/hotel_project2
|
0f81856b48baeae1b63f994f796cedd68ad10b03
|
[
"MIT"
] | null | null | null |
comments/admin.py
|
etovrodeya/hotel_project2
|
0f81856b48baeae1b63f994f796cedd68ad10b03
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Comment
admin.site.register(Comment)
| 18.2
| 32
| 0.824176
| 13
| 91
| 5.769231
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10989
| 91
| 4
| 33
| 22.75
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
231a7fbf7a63544baf0b98f6a3f17bc0547c4ae5
| 712
|
py
|
Python
|
music-app/music/models.py
|
izzywrubel/COMP333-project-2
|
00a7f2b2bd3df24f7509d10327568cc6a812ece5
|
[
"MIT"
] | null | null | null |
music-app/music/models.py
|
izzywrubel/COMP333-project-2
|
00a7f2b2bd3df24f7509d10327568cc6a812ece5
|
[
"MIT"
] | null | null | null |
music-app/music/models.py
|
izzywrubel/COMP333-project-2
|
00a7f2b2bd3df24f7509d10327568cc6a812ece5
|
[
"MIT"
] | null | null | null |
from django.db import models
class artists(models.Model):
song = models.CharField(primary_key = True, max_length=200)
artist = models.CharField(max_length=200)
class ratings(models.Model):
id = models.IntegerField(primary_key = True)
username = models.CharField(max_length=200)
song = models.CharField(max_length=200)
rating = models.IntegerField(default=0)
class users(models.Model):
username = models.CharField(primary_key = True, max_length=200)
password = models.CharField(max_length=200)
class genres(models.Model):
song = models.CharField(max_length=200)
artist = models.CharField(primary_key = True, max_length=200)
genre = models.CharField(max_length=200)
| 33.904762
| 67
| 0.744382
| 95
| 712
| 5.442105
| 0.294737
| 0.261122
| 0.208897
| 0.27853
| 0.659574
| 0.504836
| 0.237911
| 0.237911
| 0
| 0
| 0
| 0.046053
| 0.146067
| 712
| 20
| 68
| 35.6
| 0.804276
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.0625
| 0.0625
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
2354ef5b72940e31c6fa59d92fcdd07418771edb
| 244
|
py
|
Python
|
Chapter5_module_package_program/Section5.3_module_and_import/report.py
|
skatsuta/introducing-python
|
945fc84ba58aaa2602e454890c8c6f26e403660e
|
[
"MIT"
] | null | null | null |
Chapter5_module_package_program/Section5.3_module_and_import/report.py
|
skatsuta/introducing-python
|
945fc84ba58aaa2602e454890c8c6f26e403660e
|
[
"MIT"
] | null | null | null |
Chapter5_module_package_program/Section5.3_module_and_import/report.py
|
skatsuta/introducing-python
|
945fc84ba58aaa2602e454890c8c6f26e403660e
|
[
"MIT"
] | null | null | null |
"""This module provides the weather report."""
def get_description():
"""Returns a random weather."""
from random import choice
possibilities = ['rain', 'snow', 'sleet', 'fog', 'sun', 'who knows']
return choice(possibilities)
| 27.111111
| 72
| 0.655738
| 28
| 244
| 5.678571
| 0.857143
| 0.238994
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184426
| 244
| 8
| 73
| 30.5
| 0.798995
| 0.270492
| 0
| 0
| 0
| 0
| 0.167665
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
236dfca3b37cc5c1191e5631c0d4f408c064e660
| 1,450
|
py
|
Python
|
sudoku/four_pyramid.py
|
billsioros/sudoku-generator-n-solver
|
0a6488cdd6541b98898cdd43cb4266b289e98a83
|
[
"MIT"
] | null | null | null |
sudoku/four_pyramid.py
|
billsioros/sudoku-generator-n-solver
|
0a6488cdd6541b98898cdd43cb4266b289e98a83
|
[
"MIT"
] | null | null | null |
sudoku/four_pyramid.py
|
billsioros/sudoku-generator-n-solver
|
0a6488cdd6541b98898cdd43cb4266b289e98a83
|
[
"MIT"
] | null | null | null |
from pulp import *
from sudoku import classic
class FourPyramidSudokuLP(classic.SudokuLP):
def __init__(self, matrix):
super().__init__(matrix)
for k in range(1, self.n + 1):
self += lpSum([
lpSum([
self.x[r - 1][c - 1][k - 1]
for c in range(self.m + r, self.n - r + 1)
]) for r in range(1, self.m + 1)
]) == 1, f"in pyramid 1 only one {k + 1}"
for k in range(1, self.n + 1):
self += lpSum([
lpSum([
self.x[r - 1][c - 1][k - 1]
for r in range(1 + c, self.n - self.m + 1 - c + 1)
]) for c in range(1, self.m + 1)
]) == 1, f"in pyramid 2 only one {k + 1}"
for k in range(1, self.n + 1):
self += lpSum([
lpSum([
self.x[r - 1][c - 1][k - 1]
for c in range(self.n + self.m - 1 - r, r - self.m + 1)
]) for r in range(self.n - self.m + 1, self.n + 1)
]) == 1, f"in pyramid 3 only one {k + 1}"
for k in range(1, self.n + 1):
self += lpSum([
lpSum([
self.x[r - 1][c - 1][k - 1]
for r in range(self.n + self.m + 1 - c, c - 1 + 1)
]) for c in range(self.n - self.m + 1, self.n + 1)
]) == 1, f"in pyramid 4 only one {k + 1}"
| 33.72093
| 75
| 0.393103
| 221
| 1,450
| 2.542986
| 0.140271
| 0.149466
| 0.085409
| 0.128114
| 0.752669
| 0.743772
| 0.709964
| 0.709964
| 0.709964
| 0.709964
| 0
| 0.062262
| 0.457241
| 1,450
| 42
| 76
| 34.52381
| 0.651842
| 0
| 0
| 0.484848
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.060606
| 0
| 0.121212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
88bf88705521642d5ab41822892d7ec98f7eef3a
| 40
|
py
|
Python
|
python/basic-python/guided_exercises/ch1_variables/hello_world.py
|
codingandcommunity/rise_high
|
042d07cee1119b46f723a9c763b8ee3d0fc4ac2c
|
[
"MIT"
] | 2
|
2019-08-12T23:19:48.000Z
|
2019-08-15T00:24:01.000Z
|
python/basic-python/guided_exercises/ch1_variables/hello_world.py
|
codingandcommunity/rise_high
|
042d07cee1119b46f723a9c763b8ee3d0fc4ac2c
|
[
"MIT"
] | null | null | null |
python/basic-python/guided_exercises/ch1_variables/hello_world.py
|
codingandcommunity/rise_high
|
042d07cee1119b46f723a9c763b8ee3d0fc4ac2c
|
[
"MIT"
] | null | null | null |
# print "Hello, World!" to the terminal
| 20
| 39
| 0.7
| 6
| 40
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 40
| 1
| 40
| 40
| 0.848485
| 0.925
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
88ca41dafcab03a6dfbb0899b17daf1859a479f9
| 65
|
py
|
Python
|
mrc_insar_common/model/__init__.py
|
UAMRC-3vG/mrc_insar_common
|
89171e6387cccc07d08ef802b0f0f807eca09b1b
|
[
"Apache-2.0"
] | 1
|
2022-02-16T03:55:34.000Z
|
2022-02-16T03:55:34.000Z
|
mrc_insar_common/model/__init__.py
|
UAMRC-3vG/MRC-InSAR-Common
|
89171e6387cccc07d08ef802b0f0f807eca09b1b
|
[
"Apache-2.0"
] | null | null | null |
mrc_insar_common/model/__init__.py
|
UAMRC-3vG/MRC-InSAR-Common
|
89171e6387cccc07d08ef802b0f0f807eca09b1b
|
[
"Apache-2.0"
] | null | null | null |
from .dncnn.dncnn import DnCNN
from .unet.unet_model import UNet
| 21.666667
| 33
| 0.815385
| 11
| 65
| 4.727273
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 65
| 2
| 34
| 32.5
| 0.912281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
88db744aa5201da6ac36f1c55f23dcb67aa30c05
| 106
|
py
|
Python
|
fibonacci.py
|
simatei/CS-problems
|
b9b3aac98745680a89f7097a1147367cc934fbca
|
[
"MIT"
] | null | null | null |
fibonacci.py
|
simatei/CS-problems
|
b9b3aac98745680a89f7097a1147367cc934fbca
|
[
"MIT"
] | null | null | null |
fibonacci.py
|
simatei/CS-problems
|
b9b3aac98745680a89f7097a1147367cc934fbca
|
[
"MIT"
] | null | null | null |
def fib1(n: int) -> int:
# base case
if n < 2:
return n
return fib1(n-1) + fib1(n-2)
| 15.142857
| 32
| 0.481132
| 19
| 106
| 2.684211
| 0.526316
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 0.367925
| 106
| 6
| 33
| 17.666667
| 0.671642
| 0.084906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
88f1a171328ab52d5df3b53f9d432ec88316ed9e
| 109
|
py
|
Python
|
coding_intereview/1323. Maximum 69 Number.py
|
purusharthmalik/Python-Bootcamp
|
2ed1cf886d1081de200b0fdd4cb4e28008c7e3d1
|
[
"MIT"
] | 2
|
2020-10-03T16:38:10.000Z
|
2021-06-03T11:01:59.000Z
|
coding_intereview/1323. Maximum 69 Number.py
|
purusharthmalik/Python-Bootcamp
|
2ed1cf886d1081de200b0fdd4cb4e28008c7e3d1
|
[
"MIT"
] | null | null | null |
coding_intereview/1323. Maximum 69 Number.py
|
purusharthmalik/Python-Bootcamp
|
2ed1cf886d1081de200b0fdd4cb4e28008c7e3d1
|
[
"MIT"
] | 1
|
2020-10-03T16:38:02.000Z
|
2020-10-03T16:38:02.000Z
|
class Solution:
def maximum69Number(self, num: int) -> int:
return str(num).replace('6', '9', 1)
| 27.25
| 47
| 0.605505
| 15
| 109
| 4.4
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.220183
| 109
| 3
| 48
| 36.333333
| 0.717647
| 0
| 0
| 0
| 0
| 0
| 0.018349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
88fd73d60abfea79f73ed106a42ce76d3603cae3
| 103
|
py
|
Python
|
src/metric/__init__.py
|
markkua/ImpliCity
|
2bf80ae1a05a530e0d405ce2057ab5b9c57ea21a
|
[
"MIT"
] | 17
|
2022-02-21T12:25:05.000Z
|
2022-03-23T20:37:37.000Z
|
src/metric/__init__.py
|
markkua/ImpliCity
|
2bf80ae1a05a530e0d405ce2057ab5b9c57ea21a
|
[
"MIT"
] | null | null | null |
src/metric/__init__.py
|
markkua/ImpliCity
|
2bf80ae1a05a530e0d405ce2057ab5b9c57ea21a
|
[
"MIT"
] | 2
|
2022-02-21T21:58:35.000Z
|
2022-03-15T18:26:32.000Z
|
# encoding: utf-8
# Author: Bingxin Ke
# Created: 2021/10/8
from .metrics import *
from .iou import *
| 14.714286
| 22
| 0.68932
| 16
| 103
| 4.4375
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0.184466
| 103
| 6
| 23
| 17.166667
| 0.75
| 0.514563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0022b85501c961d030f6f096a7a20b04e251c53c
| 227
|
py
|
Python
|
components/collector/src/source_collectors/testng/test_cases.py
|
kargaranamir/quality-time
|
1c427c61bee9d31c3526f0a01be2218a7e167c23
|
[
"Apache-2.0"
] | 33
|
2016-01-20T07:35:48.000Z
|
2022-03-14T09:20:51.000Z
|
components/collector/src/source_collectors/testng/test_cases.py
|
kargaranamir/quality-time
|
1c427c61bee9d31c3526f0a01be2218a7e167c23
|
[
"Apache-2.0"
] | 2,410
|
2016-01-22T18:13:01.000Z
|
2022-03-31T16:57:34.000Z
|
components/collector/src/source_collectors/testng/test_cases.py
|
kargaranamir/quality-time
|
1c427c61bee9d31c3526f0a01be2218a7e167c23
|
[
"Apache-2.0"
] | 21
|
2016-01-16T11:49:23.000Z
|
2022-01-14T21:53:22.000Z
|
"""TestNG test cases collector."""
from .tests import TestNGTests # pylint: disable=no-name-in-module
class TestNGTestCases(TestNGTests): # pylint: disable=too-few-public-methods
"""Collector for TestNG test cases."""
| 28.375
| 77
| 0.735683
| 28
| 227
| 5.964286
| 0.75
| 0.11976
| 0.179641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132159
| 227
| 7
| 78
| 32.428571
| 0.847716
| 0.594714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cc51453db04d31f3ae0c71202f2fbab8b7c97640
| 206
|
py
|
Python
|
SbisFiles/__init__.py
|
PesyCorm/AutomationFiles
|
3afe7cd28e6b472bd822c0974386591408f0d62d
|
[
"MIT"
] | null | null | null |
SbisFiles/__init__.py
|
PesyCorm/AutomationFiles
|
3afe7cd28e6b472bd822c0974386591408f0d62d
|
[
"MIT"
] | null | null | null |
SbisFiles/__init__.py
|
PesyCorm/AutomationFiles
|
3afe7cd28e6b472bd822c0974386591408f0d62d
|
[
"MIT"
] | null | null | null |
from .DemoAuth import AuthPage
from .SbisAccord import AccordSectionSelector
from .TasksSection import TasksSectionSelector
from .TasksOnMe import TasksPageManagement
from .ExecPanel import ControlExecPanel
| 41.2
| 46
| 0.883495
| 20
| 206
| 9.1
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092233
| 206
| 5
| 47
| 41.2
| 0.973262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aebd5a9230f0c5c3cb2a10502bb77eedd3dbf9e7
| 1,460
|
py
|
Python
|
trident/models/__init__.py
|
cronin4392/trident
|
1c1eb01bcde861496ce83e265ff071fc9bcb9db2
|
[
"MIT"
] | 68
|
2020-11-13T06:40:52.000Z
|
2022-03-28T12:40:59.000Z
|
trident/models/__init__.py
|
cronin4392/trident
|
1c1eb01bcde861496ce83e265ff071fc9bcb9db2
|
[
"MIT"
] | 1
|
2021-08-15T17:06:35.000Z
|
2021-11-10T04:42:52.000Z
|
trident/models/__init__.py
|
cronin4392/trident
|
1c1eb01bcde861496ce83e265ff071fc9bcb9db2
|
[
"MIT"
] | 11
|
2020-11-24T13:14:16.000Z
|
2021-12-26T07:41:29.000Z
|
"""trident models"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from trident.backend.common import get_backend
if get_backend()=='pytorch':
from . import pytorch_vgg as vgg
from . import pytorch_resnet as resnet
from . import pytorch_senet as senet
from . import pytorch_densenet as densenet
from . import pytorch_efficientnet as efficientnet
#from . import pytorch_efficientnetv2 as efficientnet_v2
from . import pytorch_mobilenet as mobilenet
from . import pytorch_deeplab as deeplab
from . import pytorch_arcfacenet as arcfacenet
from . import pytorch_mtcnn as mtcnn
from . import pytorch_rfbnet as rfbnet
from . import pytorch_ssd as ssd
from . import pytorch_yolo as yolo
from . import pytorch_embedded as embedded
from . import pytorch_inception as inception
from . import pytorch_visual_transformer as visual_transformer
elif get_backend()=='tensorflow':
from . import tensorflow_vgg as vgg
from . import tensorflow_resnet as resnet
from . import tensorflow_efficientnet as efficientnet
from . import tensorflow_densenet as densenet
from . import tensorflow_mobilenet as mobilenet
from . import tensorflow_deeplab as deeplab
from . import tensorflow_mtcnn as mtcnn
#__all__ = ['vgg','resnet','densenet','efficientnet','mobilenet','gan','deeplab','arcfacenet','mtcnn','rfbnet','ssd','yolo']
| 39.459459
| 124
| 0.763699
| 185
| 1,460
| 5.772973
| 0.2
| 0.215356
| 0.254682
| 0.022472
| 0.303371
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001667
| 0.178082
| 1,460
| 36
| 125
| 40.555556
| 0.888333
| 0.132192
| 0
| 0
| 0
| 0
| 0.013514
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.928571
| 0
| 0.928571
| 0.035714
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aec23a8b5726f31bcd0b689ab48393d866536eaa
| 20
|
py
|
Python
|
testsuite/modulegraph-dir/multi_level_star_import.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 9
|
2020-03-22T14:48:01.000Z
|
2021-05-30T12:18:12.000Z
|
testsuite/modulegraph-dir/multi_level_star_import.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 15
|
2020-01-06T10:02:32.000Z
|
2021-05-28T12:22:44.000Z
|
testsuite/modulegraph-dir/multi_level_star_import.py
|
ronaldoussoren/modulegraph2
|
b6ab1766b0098651b51083235ff8a18a5639128b
|
[
"MIT"
] | 4
|
2020-05-10T18:51:41.000Z
|
2021-04-07T14:03:12.000Z
|
from pkg_a import *
| 10
| 19
| 0.75
| 4
| 20
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
aec4d234f6b9708dafb8b78051f5c57fd2e28e52
| 21,526
|
py
|
Python
|
models/build_model_3d.py
|
AdityaNG/LEAStereo
|
2a17c0f7501b8f8e75709e66fd5504900b758c9f
|
[
"MIT"
] | 186
|
2020-11-30T06:52:26.000Z
|
2022-03-31T12:41:37.000Z
|
models/build_model_3d.py
|
AdityaNG/LEAStereo
|
2a17c0f7501b8f8e75709e66fd5504900b758c9f
|
[
"MIT"
] | 31
|
2020-12-01T07:10:28.000Z
|
2022-02-23T12:28:19.000Z
|
models/build_model_3d.py
|
bhlarson/LEAStereo
|
e3e474703e5ba4009832908dac8af02188e63b03
|
[
"MIT"
] | 40
|
2020-11-30T11:22:47.000Z
|
2022-03-10T01:37:37.000Z
|
import torch.nn as nn
import torch.nn.functional as F
import models.cell_level_search_3d as cell_level_search
from models.genotypes_3d import PRIMITIVES
from models.operations_3d import *
from models.decoding_formulas import Decoder
import pdb
class AutoMatching(nn.Module):
def __init__(self, num_layers, filter_multiplier=8, block_multiplier=2, step=3, cell=cell_level_search.Cell):
super(AutoMatching, self).__init__()
self.cells = nn.ModuleList()
self._num_layers = num_layers
self._step = step
self._block_multiplier = block_multiplier
self._filter_multiplier = filter_multiplier
self._initialize_alphas_betas()
f_initial = int(self._filter_multiplier)
self._num_end = f_initial * self._block_multiplier
print('Matching Net block_multiplier:{0}'.format(block_multiplier))
print('Matching Net filter_multiplier:{0}'.format(filter_multiplier))
print('Matching Net f_initial:{0}'.format(f_initial))
self.stem0 = ConvBR(self._num_end*2, self._num_end, 3, stride=1, padding=1)
for i in range(self._num_layers):
if i == 0:
cell1 = cell(self._step, self._block_multiplier, -1,
None, f_initial, None,
self._filter_multiplier)
cell2 = cell(self._step, self._block_multiplier, -1,
f_initial, None, None,
self._filter_multiplier * 2)
self.cells += [cell1]
self.cells += [cell2]
elif i == 1:
cell1 = cell(self._step, self._block_multiplier, f_initial,
None, self._filter_multiplier, self._filter_multiplier * 2,
self._filter_multiplier)
cell2 = cell(self._step, self._block_multiplier, -1,
self._filter_multiplier, self._filter_multiplier * 2, None,
self._filter_multiplier * 2)
cell3 = cell(self._step, self._block_multiplier, -1,
self._filter_multiplier * 2, None, None,
self._filter_multiplier * 4)
self.cells += [cell1]
self.cells += [cell2]
self.cells += [cell3]
elif i == 2:
cell1 = cell(self._step, self._block_multiplier, self._filter_multiplier,
None, self._filter_multiplier, self._filter_multiplier * 2,
self._filter_multiplier)
cell2 = cell(self._step, self._block_multiplier, self._filter_multiplier * 2,
self._filter_multiplier, self._filter_multiplier * 2, self._filter_multiplier * 4,
self._filter_multiplier * 2)
cell3 = cell(self._step, self._block_multiplier, -1,
self._filter_multiplier * 2, self._filter_multiplier * 4, None,
self._filter_multiplier * 4)
cell4 = cell(self._step, self._block_multiplier, -1,
self._filter_multiplier * 4, None, None,
self._filter_multiplier * 8)
self.cells += [cell1]
self.cells += [cell2]
self.cells += [cell3]
self.cells += [cell4]
elif i == 3:
cell1 = cell(self._step, self._block_multiplier, self._filter_multiplier,
None, self._filter_multiplier, self._filter_multiplier * 2,
self._filter_multiplier)
cell2 = cell(self._step, self._block_multiplier, self._filter_multiplier * 2,
self._filter_multiplier, self._filter_multiplier * 2, self._filter_multiplier * 4,
self._filter_multiplier * 2)
cell3 = cell(self._step, self._block_multiplier, self._filter_multiplier * 4,
self._filter_multiplier * 2, self._filter_multiplier * 4, self._filter_multiplier * 8,
self._filter_multiplier * 4)
cell4 = cell(self._step, self._block_multiplier, -1,
self._filter_multiplier * 4, self._filter_multiplier * 8, None,
self._filter_multiplier * 8)
self.cells += [cell1]
self.cells += [cell2]
self.cells += [cell3]
self.cells += [cell4]
else:
cell1 = cell(self._step, self._block_multiplier, self._filter_multiplier,
None, self._filter_multiplier, self._filter_multiplier * 2,
self._filter_multiplier)
cell2 = cell(self._step, self._block_multiplier, self._filter_multiplier * 2,
self._filter_multiplier, self._filter_multiplier * 2, self._filter_multiplier * 4,
self._filter_multiplier * 2)
cell3 = cell(self._step, self._block_multiplier, self._filter_multiplier * 4,
self._filter_multiplier * 2, self._filter_multiplier * 4, self._filter_multiplier * 8,
self._filter_multiplier * 4)
cell4 = cell(self._step, self._block_multiplier, self._filter_multiplier * 8,
self._filter_multiplier * 4, self._filter_multiplier * 8, None,
self._filter_multiplier * 8)
self.cells += [cell1]
self.cells += [cell2]
self.cells += [cell3]
self.cells += [cell4]
self.last_3 = ConvBR(self._num_end, 1, 3, 1, 1, bn=False, relu=False)
self.last_6 = ConvBR(self._num_end*2 , self._num_end, 1, 1, 0)
self.last_12 = ConvBR(self._num_end*4 , self._num_end*2, 1, 1, 0)
self.last_24 = ConvBR(self._num_end*8 , self._num_end*4, 1, 1, 0)
def forward(self, x):
self.level_3 = []
self.level_6 = []
self.level_12 = []
self.level_24 = []
stem = self.stem0(x)
self.level_3.append(stem)
count = 0
normalized_betas = torch.randn(self._num_layers, 4, 3).cuda()
# Softmax on alphas and betas
if torch.cuda.device_count() > 1:
#print('1')
img_device = torch.device('cuda', x.get_device())
normalized_alphas = F.softmax(self.alphas.to(device=img_device), dim=-1)
# normalized_betas[layer][ith node][0 : ➚, 1: ➙, 2 : ➘]
for layer in range(len(self.betas)):
if layer == 0:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:].to(device=img_device), dim=-1) * (2/3)
elif layer == 1:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:].to(device=img_device), dim=-1) * (2/3)
normalized_betas[layer][1] = F.softmax(self.betas[layer][1].to(device=img_device), dim=-1)
elif layer == 2:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:].to(device=img_device), dim=-1) * (2/3)
normalized_betas[layer][1] = F.softmax(self.betas[layer][1].to(device=img_device), dim=-1)
normalized_betas[layer][2] = F.softmax(self.betas[layer][2].to(device=img_device), dim=-1)
else:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:].to(device=img_device), dim=-1) * (2/3)
normalized_betas[layer][1] = F.softmax(self.betas[layer][1].to(device=img_device), dim=-1)
normalized_betas[layer][2] = F.softmax(self.betas[layer][2].to(device=img_device), dim=-1)
normalized_betas[layer][3][:2] = F.softmax(self.betas[layer][3][:1].to(device=img_device), dim=-1) * (2/3)
else:
normalized_alphas = F.softmax(self.alphas, dim=-1)
for layer in range(len(self.betas)):
if layer == 0:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:], dim=-1) * (2/3)
elif layer == 1:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:], dim=-1) * (2/3)
normalized_betas[layer][1] = F.softmax(self.betas[layer][1], dim=-1)
elif layer == 2:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:], dim=-1) * (2/3)
normalized_betas[layer][1] = F.softmax(self.betas[layer][1], dim=-1)
normalized_betas[layer][2] = F.softmax(self.betas[layer][2], dim=-1)
else:
normalized_betas[layer][0][1:] = F.softmax(self.betas[layer][0][1:], dim=-1) * (2/3)
normalized_betas[layer][1] = F.softmax(self.betas[layer][1], dim=-1)
normalized_betas[layer][2] = F.softmax(self.betas[layer][2], dim=-1)
normalized_betas[layer][3][:2] = F.softmax(self.betas[layer][3][:2], dim=-1) * (2/3)
for layer in range(self._num_layers):
if layer == 0:
level3_new, = self.cells[count](None, None, self.level_3[-1], None, normalized_alphas)
count += 1
level6_new, = self.cells[count](None, self.level_3[-1], None, None, normalized_alphas)
count += 1
level3_new = normalized_betas[layer][0][1] * level3_new
level6_new = normalized_betas[layer][0][2] * level6_new
self.level_3.append(level3_new)
self.level_6.append(level6_new)
elif layer == 1:
level3_new_1, level3_new_2 = self.cells[count](self.level_3[-2],
None,
self.level_3[-1],
self.level_6[-1],
normalized_alphas)
count += 1
level3_new = normalized_betas[layer][0][1] * level3_new_1 + normalized_betas[layer][1][0] * level3_new_2
level6_new_1, level6_new_2 = self.cells[count](None,
self.level_3[-1],
self.level_6[-1],
None,
normalized_alphas)
count += 1
level6_new = normalized_betas[layer][0][2] * level6_new_1 + normalized_betas[layer][1][2] * level6_new_2
level12_new, = self.cells[count](None,
self.level_6[-1],
None,
None,
normalized_alphas)
level12_new = normalized_betas[layer][1][2] * level12_new
count += 1
self.level_3.append(level3_new)
self.level_6.append(level6_new)
self.level_12.append(level12_new)
elif layer == 2:
level3_new_1, level3_new_2 = self.cells[count](self.level_3[-2],
None,
self.level_3[-1],
self.level_6[-1],
normalized_alphas)
count += 1
level3_new = normalized_betas[layer][0][1] * level3_new_1 + normalized_betas[layer][1][0] * level3_new_2
level6_new_1, level6_new_2, level6_new_3 = self.cells[count](self.level_6[-2],
self.level_3[-1],
self.level_6[-1],
self.level_12[-1],
normalized_alphas)
count += 1
level6_new = normalized_betas[layer][0][2] * level6_new_1 + normalized_betas[layer][1][1] * level6_new_2 + normalized_betas[layer][2][
0] * level6_new_3
level12_new_1, level12_new_2 = self.cells[count](None,
self.level_6[-1],
self.level_12[-1],
None,
normalized_alphas)
count += 1
level12_new = normalized_betas[layer][1][2] * level12_new_1 + normalized_betas[layer][2][1] * level12_new_2
level24_new, = self.cells[count](None,
self.level_12[-1],
None,
None,
normalized_alphas)
level24_new = normalized_betas[layer][2][2] * level24_new
count += 1
self.level_3.append(level3_new)
self.level_6.append(level6_new)
self.level_12.append(level12_new)
self.level_24.append(level24_new)
elif layer == 3:
level3_new_1, level3_new_2 = self.cells[count](self.level_3[-2],
None,
self.level_3[-1],
self.level_6[-1],
normalized_alphas)
count += 1
level3_new = normalized_betas[layer][0][1] * level3_new_1 + normalized_betas[layer][1][0] * level3_new_2
level6_new_1, level6_new_2, level6_new_3 = self.cells[count](self.level_6[-2],
self.level_3[-1],
self.level_6[-1],
self.level_12[-1],
normalized_alphas)
count += 1
level6_new = normalized_betas[layer][0][2] * level6_new_1 + normalized_betas[layer][1][1] * level6_new_2 + normalized_betas[layer][2][
0] * level6_new_3
level12_new_1, level12_new_2, level12_new_3 = self.cells[count](self.level_12[-2],
self.level_6[-1],
self.level_12[-1],
self.level_24[-1],
normalized_alphas)
count += 1
level12_new = normalized_betas[layer][1][2] * level12_new_1 + normalized_betas[layer][2][1] * level12_new_2 + normalized_betas[layer][3][
0] * level12_new_3
level24_new_1, level24_new_2 = self.cells[count](None,
self.level_12[-1],
self.level_24[-1],
None,
normalized_alphas)
count += 1
level24_new = normalized_betas[layer][2][2] * level24_new_1 + normalized_betas[layer][3][1] * level24_new_2
self.level_3.append(level3_new)
self.level_6.append(level6_new)
self.level_12.append(level12_new)
self.level_24.append(level24_new)
else:
level3_new_1, level3_new_2 = self.cells[count](self.level_3[-2],
None,
self.level_3[-1],
self.level_6[-1],
normalized_alphas)
count += 1
level3_new = normalized_betas[layer][0][1] * level3_new_1 + normalized_betas[layer][1][0] * level3_new_2
level6_new_1, level6_new_2, level6_new_3 = self.cells[count](self.level_6[-2],
self.level_3[-1],
self.level_6[-1],
self.level_12[-1],
normalized_alphas)
count += 1
level6_new = normalized_betas[layer][0][2] * level6_new_1 + normalized_betas[layer][1][1] * level6_new_2 + normalized_betas[layer][2][
0] * level6_new_3
level12_new_1, level12_new_2, level12_new_3 = self.cells[count](self.level_12[-2],
self.level_6[-1],
self.level_12[-1],
self.level_24[-1],
normalized_alphas)
count += 1
level12_new = normalized_betas[layer][1][2] * level12_new_1 + normalized_betas[layer][2][1] * level12_new_2 + normalized_betas[layer][3][
0] * level12_new_3
level24_new_1, level24_new_2 = self.cells[count](self.level_24[-2],
self.level_12[-1],
self.level_24[-1],
None,
normalized_alphas)
count += 1
level24_new = normalized_betas[layer][2][2] * level24_new_1 + normalized_betas[layer][3][1] * level24_new_2
self.level_3.append(level3_new)
self.level_6.append(level6_new)
self.level_12.append(level12_new)
self.level_24.append(level24_new)
self.level_3 = self.level_3[-2:]
self.level_6 = self.level_6[-2:]
self.level_12 = self.level_12[-2:]
self.level_24 = self.level_24[-2:]
#define upsampling
d, h, w = stem.size()[2], stem.size()[3], stem.size()[4]
upsample_6 = nn.Upsample(size=stem.size()[2:], mode='trilinear', align_corners=True)
upsample_12 = nn.Upsample(size=[d//2, h//2, w//2], mode='trilinear', align_corners=True)
upsample_24 = nn.Upsample(size=[d//4, h//4, w//4], mode='trilinear', align_corners=True)
result_3 = self.last_3(self.level_3[-1])
result_6 = self.last_3(upsample_6(self.last_6(self.level_6[-1])))
result_12 = self.last_3(upsample_6(self.last_6(upsample_12(self.last_12(self.level_12[-1])))))
result_24 = self.last_3(upsample_6(self.last_6(upsample_12(self.last_12(self.last_24(self.level_24[-1]))))))
sum_matching_map =result_3 + result_6 + result_12 + result_24
return sum_matching_map
def _initialize_alphas_betas(self):
k = sum(1 for i in range(self._step) for n in range(2 + i))
num_ops = len(PRIMITIVES)
alphas = (1e-3 * torch.randn(k, num_ops)).clone().detach().requires_grad_(True)
betas = (1e-3 * torch.randn(self._num_layers, 4, 3)).clone().detach().requires_grad_(True)
self._arch_parameters = [
alphas,
betas,
]
self._arch_param_names = [
'alphas',
'betas',
]
[self.register_parameter(name, torch.nn.Parameter(param)) for name, param in zip(self._arch_param_names, self._arch_parameters)]
def arch_parameters(self):
return [param for name, param in self.named_parameters() if name in self._arch_param_names]
def weight_parameters(self):
return [param for name, param in self.named_parameters() if name not in self._arch_param_names]
def genotype(self):
decoder = Decoder(self.alphas_cell, self._block_multiplier, self._step)
return decoder.genotype_decode()
| 54.358586
| 153
| 0.46618
| 2,252
| 21,526
| 4.166963
| 0.061279
| 0.075767
| 0.130009
| 0.049233
| 0.820332
| 0.77387
| 0.742967
| 0.717924
| 0.68702
| 0.658142
| 0
| 0.062142
| 0.43185
| 21,526
| 395
| 154
| 54.496203
| 0.704906
| 0.005017
| 0
| 0.666667
| 0
| 0
| 0.006305
| 0.000981
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.022013
| 0.006289
| 0.056604
| 0.009434
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
aee884a148f55eccdc9f5baa38829f7715f42086
| 103
|
wsgi
|
Python
|
app.wsgi
|
taps1197/Traahi
|
52765e26b844169349de7c5a13da8edcbd6e7d47
|
[
"MIT"
] | 1
|
2019-03-29T11:38:03.000Z
|
2019-03-29T11:38:03.000Z
|
app.wsgi
|
taps1197/Traahi
|
52765e26b844169349de7c5a13da8edcbd6e7d47
|
[
"MIT"
] | null | null | null |
app.wsgi
|
taps1197/Traahi
|
52765e26b844169349de7c5a13da8edcbd6e7d47
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0,'/var/www/html/blockMakers')
from blockMakers import app as application
| 17.166667
| 46
| 0.786408
| 16
| 103
| 5.0625
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.106796
| 103
| 5
| 47
| 20.6
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0.245098
| 0.245098
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aef53bad93830fadb7eb0b24fff1c96d464311a2
| 25
|
py
|
Python
|
eip_bridge/src/eip_bridge/__init__.py
|
marip8/eip_bridge
|
b1ec48d2a16ed7a861f9d2e6473a3a339b82a1c8
|
[
"Apache-2.0"
] | null | null | null |
eip_bridge/src/eip_bridge/__init__.py
|
marip8/eip_bridge
|
b1ec48d2a16ed7a861f9d2e6473a3a339b82a1c8
|
[
"Apache-2.0"
] | null | null | null |
eip_bridge/src/eip_bridge/__init__.py
|
marip8/eip_bridge
|
b1ec48d2a16ed7a861f9d2e6473a3a339b82a1c8
|
[
"Apache-2.0"
] | null | null | null |
from eip_bridge import *
| 12.5
| 24
| 0.8
| 4
| 25
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
aefd1e0b4ec54d36e45961a35d78abc9542dbbfb
| 17,267
|
py
|
Python
|
src/fingerflow/matcher/VerifyNet/verify_net_train_experimental.py
|
jakubarendac/fingerflow
|
a0a53259ec575704d19ae0ae770335536e567583
|
[
"MIT"
] | null | null | null |
src/fingerflow/matcher/VerifyNet/verify_net_train_experimental.py
|
jakubarendac/fingerflow
|
a0a53259ec575704d19ae0ae770335536e567583
|
[
"MIT"
] | null | null | null |
src/fingerflow/matcher/VerifyNet/verify_net_train_experimental.py
|
jakubarendac/fingerflow
|
a0a53259ec575704d19ae0ae770335536e567583
|
[
"MIT"
] | null | null | null |
# pylint: skip-file
import tensorflow as tf
from . import constants, utils
def get_verify_net_model(precision, verify_net_path=None):
embedding_network = get_embeddings_model(precision)
input_1 = tf.keras.Input(utils.get_input_shape(precision))
# x1 = tf.keras.layers.ZeroPadding2D((0, 7))(input_1)
# x1 = tf.keras.layers.ZeroPadding2D((22, 22))(x1)
# x1 = tf.keras.layers.Conv2D(3, (3, 3), padding='same')(x1)
input_2 = tf.keras.Input(utils.get_input_shape(precision))
# x2 = tf.keras.layers.ZeroPadding2D((0, 7))(input_1)
# x2 = tf.keras.layers.ZeroPadding2D((22, 22))(x2)
# x2 = tf.keras.layers.Conv2D(3, (3, 3), padding='same')(x2)
tower_1 = embedding_network(input_1)
tower_2 = embedding_network(input_2)
merge_layer = tf.keras.layers.Lambda(utils.euclidean_distance)([tower_1, tower_2])
normal_layer = tf.keras.layers.BatchNormalization()(merge_layer)
output_layer = tf.keras.layers.Dense(1, activation="sigmoid")(normal_layer)
siamese_network = tf.keras.Model(inputs=[input_1, input_2], outputs=output_layer)
siamese_network.compile(
loss=utils.verify_net_loss(constants.MARGIN),
optimizer=tf.keras.optimizers.Adam(0.001),
metrics=["accuracy"])
if verify_net_path:
siamese_network.load_weights(verify_net_path)
print(f'Verify net weights loaded from {verify_net_path}')
return siamese_network
def get_embeddings_model(precision):
switcher = {
15: build_15_minutiae_model,
20: build_20_minutiae_model
}
inputs = tf.keras.Input(shape=(utils.get_input_shape(precision)))
x = tf.keras.layers.BatchNormalization()(inputs)
outputs = switcher.get(precision)(x)
# return outputs
# # x = tf.keras.layers.Dense(32, activation='relu')(x)
# # x = tf.keras.layers.Dense(64, activation='sigmoid')(x)
# # x = tf.keras.layers.Dense(128, activation='relu')(inputs)
# # x = tf.keras.layers.Dense(128,
# # kernel_regularizer=tf.keras.regularizers.l1(l1=0.001))(x)
# # x = tf.keras.layers.PReLU()(x)
# # x = tf.keras.layers.Dropout(0.1)(x)
# # something good
# # x = tf.keras.layers.Conv1D(64, 3, activation=tf.keras.layers.PReLU(),
# # kernel_regularizer=tf.keras.regularizers.l2(l2=0))(x)
# # x = tf.keras.layers.BatchNormalization()(x)
# # # x = tf.keras.layers.PReLU()(x)
# # # x = tf.keras.layers.Dropout(0.2)(x)
# # # x = tf.keras.layers.MaxPooling1D(2)(x)
# # # x = tf.keras.layers.Conv1D(128, 3)(x)
# # # # x = tf.keras.layers.PReLU()(x)
# # # x = tf.keras.layers.Dropout(0.2)(x)
# # # x = tf.keras.layers.MaxPooling1D(2)(x)
# # x = tf.keras.layers.Conv1D(64, 3, activation=tf.keras.layers.PReLU(),
# # kernel_regularizer=tf.keras.regularizers.l2(l2=0))(x)
# # x = tf.keras.layers.BatchNormalization()(x)
# # # x = tf.keras.layers.PReLU()(x)
# # # x = tf.keras.layers.Dropout(0.2)(x)
# # # x = tf.keras.layers.MaxPooling1D(2)(x)
# # x = tf.keras.layers.Dense(32, activation=tf.keras.layers.PReLU())(x)
# # something good
# # x = tf.keras.layers.PReLU()(x)
# # x = tf.keras.layers.Dropout(0.1)(x)
# # x = tf.keras.layers.Dense(128,
# # kernel_regularizer=tf.keras.regularizers.l1(l1=0.001))(x)
# # x = tf.keras.layers.PReLU()(x)
# # x = tf.keras.layers.Dropout(0.1)(x)
# # x = tf.keras.layers.Dense(128, activation='sigmoid')(x)
# # x = tf.keras.layers.Dense(64, activation='sigmoid')(x)
# x = tf.keras.layers.Dense(64, activation=tf.keras.layers.PReLU())(x)
# x = tf.keras.layers.Dense(5, activation=tf.keras.layers.PReLU())(x)
# x = tf.keras.layers.Flatten()(x)
# # outputs = tf.keras.layers.Dense(5, activation='relu')(x)
# # x = tf.keras.layers.Conv1D(64, 3)(x)
# # x = tf.keras.layers.PReLU()(x)
# # x = tf.keras.layers.MaxPooling1D(2)(x)
embedding_network = tf.keras.Model(inputs, outputs)
embedding_network.summary()
#model = ResNet50()
return embedding_network
def KerasResNet50():
base_model = tf.keras.applications.ResNet50(
weights=None, include_top=False, input_shape=(64, 64, 3),
classifier_activation="softmax")
x = base_model.output
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dropout(0.2)(x)
predictions = tf.keras.layers.Dense(256, activation='softmax')(x)
# base_model.trainable = False
model = tf.keras.Model(inputs=base_model.input, outputs=predictions)
model.summary()
return model
# def MergeModel(input_shape):
# X_input = tf.keras.layers.Input(input_shape)
# # X = tf.keras.layers.ZeroPadding2D((3, 3))(X_input)
# # Zero-Padding
# X = tf.keras.layers.ZeroPadding2D((0, 7))(X_input)
# X = tf.keras.layers.ZeroPadding2D((25, 25))(X)
# X = tf.keras.layers.Conv2D(3, (3, 3), padding='same')(X)
# preprocessing_model = tf.keras.Model(inputs=X_input, outputs=X)
# keras_model = KerasModel()
# concatenated = tf.keras.layers.merge.concatenate([model1_out, model2_out])
def build_15_minutiae_model(x):
x = tf.keras.layers.Conv1D(64, 3, activation="relu")(x)
x = tf.keras.layers.MaxPooling1D(2)(x)
x = tf.keras.layers.Dropout(0.2)(x)
x = tf.keras.layers.Conv1D(64, 3, activation="relu")(x)
x = tf.keras.layers.MaxPooling1D(2)(x)
x = tf.keras.layers.Dropout(0.2)(x)
return x
def build_20_minutiae_model(x):
# x = tf.keras.layers.Conv1D(64, 3, activation="relu")(x)
# # x = tf.keras.layers.MaxPooling1D(2)(x)
# # x = tf.keras.layers.Dropout(0.2)(x)
# x = tf.keras.layers.Conv1D(128, 3, activation="relu",
# kernel_regularizer=tf.keras.regularizers.l2(l2=0))(x)
#x = tf.keras.layers.MaxPooling1D(1)(x)
#x = tf.keras.layers.Dropout(0.2)(x)
# x = tf.keras.layers.Conv1D(128, 3, activation="relu")(x)
# x = tf.keras.layers.Conv1D(256, 3, activation="relu")(x)
# x = tf.keras.layers.Conv1D(128, 3, activation="relu")(x)
# # x = tf.keras.layers.MaxPooling1D(2)(x)
# x = tf.keras.layers.Conv1D(16, 15, activation="relu")(x)
# x = tf.keras.layers.MaxPooling1D(1, strides=2)(x)
# x = tf.keras.layers.Conv1D(32, 10, strides=5, activation="relu")(x)
# x = tf.keras.layers.Conv1D(27, 5, strides=1, activation="relu")(x)
# x = tf.keras.layers.Conv1D(227, 11, strides=4, activation="relu")(x)
# x = tf.keras.layers.MaxPooling1D(2)(x)
# x = tf.keras.layers.Dropout(0.2)(x)
# x = tf.keras.layers.Conv1D(8, 3, activation="relu")(x)
# x = tf.keras.layers.Conv1D(256, 3, activation="relu",
# kernel_regularizer=tf.keras.regularizers.l2(l2=0))(x)
#x = tf.keras.layers.MaxPooling1D(1)(x)
#x = tf.keras.layers.Dropout(0.1)(x)
# x = tf.keras.layers.Conv1D(512, 3, activation="relu",
# kernel_regularizer=tf.keras.regularizers.l2(l2=0))(x)
#x = tf.keras.layers.MaxPooling1D(1)(x)
#x = tf.keras.layers.Dropout(0.1)(x)
# x = tf.keras.layers.Conv1D(256, 3, activation="relu",
# kernel_regularizer=tf.keras.regularizers.l2(l2=0))(x)
#x = tf.keras.layers.MaxPooling1D(1)(x)
#x = tf.keras.layers.Dropout(0.1)(x)
#x = tf.keras.layers.Conv1D(128, 3, activation="relu")(x)
#x = tf.keras.layers.MaxPooling1D(1)(x)
#x = tf.keras.layers.Dropout(0.1)(x)
x = tf.keras.layers.ZeroPadding2D((0, (6, 5)))(x)
# X = tf.keras.layers.ZeroPadding2D((25, 25))(X)
# x = tf.keras.layers.Conv2D(1, (3,3),
# kernel_regularizer=tf.keras.regularizers.l2(l2=0.001),
# activation="relu")(x)
#x = tf.keras.layers.MaxPooling1D(1)(x)
#x = tf.keras.layers.Dropout(0.2)(x)
# x = tf.keras.layers.Dense(1024, activation='sigmoid')(x)
#x = tf.keras.layers.Dense(256, activation='sigmoid')(x)
# x = tf.keras.layers.Dropout(0.1)(x)
#x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Conv1D(64, 3, 2, activation="relu")(x)
#x = tf.keras.layers.MaxPooling2D(2)(x)
#x = tf.keras.layers.Dropout(0.1)(x)
x = tf.keras.layers.Conv1D(64, 3, 2, activation="relu")(x)
x = tf.keras.layers.MaxPooling2D(2)(x)
#x = tf.keras.layers.Dropout(0.1)(x)
# x = tf.keras.layers.Dense(5, activation='sigmoid')(x)
x = tf.keras.layers.Dense(256,
kernel_regularizer=tf.keras.regularizers.l2(l2=0.0001),
activation='relu')(x)
x = tf.keras.layers.Dropout(0.6)(x)
x = tf.keras.layers.Dense(128,
kernel_regularizer=tf.keras.regularizers.l2(l2=0.0001),
activation='relu')(x)
x = tf.keras.layers.Dropout(0.6)(x)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(10, activation='sigmoid')(x)
# x = tf.keras.layers.Dropout(0.1)(x)
return x
def identity_block(X, f, filters, stage, block):
"""
Implementation of the identity block as defined in Figure 3
Arguments:
X -- input tensor of shape (m, n_H_prev, n_W_prev, n_C_prev)
f -- integer, specifying the shape of the middle CONV's window for the main path
filters -- python list of integers, defining the number of filters in the CONV layers of the main path
stage -- integer, used to name the layers, depending on their position in the network
block -- string/character, used to name the layers, depending on their position in the network
Returns:
X -- output of the identity block, tensor of shape (n_H, n_W, n_C)
"""
# defining name basis
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
# Retrieve Filters
F1, F2, F3 = filters
# Save the input value. You'll need this later to add back to the main path.
X_shortcut = X
# First component of main path
X = tf.keras.layers.Conv2D(filters=F1, kernel_size=(1, 1),
strides=(1, 1),
padding='valid', name=conv_name_base + '2a')(X)
X = tf.keras.layers.BatchNormalization(name=bn_name_base + '2a')(X)
X = tf.keras.layers.Activation('relu')(X)
# Second component of main path (≈3 lines)
X = tf.keras.layers.Conv2D(filters=F2, kernel_size=(f, f),
strides=(1, 1),
padding='same', name=conv_name_base + '2b')(X)
X = tf.keras.layers.BatchNormalization(name=bn_name_base + '2b')(X)
X = tf.keras.layers.Activation('relu')(X)
# Third component of main path (≈2 lines)
X = tf.keras.layers.Conv2D(filters=F3, kernel_size=(1, 1),
strides=(1, 1),
padding='valid', name=conv_name_base + '2c')(X)
X = tf.keras.layers.BatchNormalization(name=bn_name_base + '2c')(X)
# Final step: Add shortcut value to main path, and pass it through a RELU activation (≈2 lines)
X = tf.keras.layers.Add()([X, X_shortcut])
X = tf.keras.layers.Activation('relu')(X)
return X
def convolutional_block(X, f, filters, stage, block, s=2):
"""
Implementation of the convolutional block as defined in Figure 4
Arguments:
X -- input tensor of shape (m, n_H_prev, n_W_prev, n_C_prev)
f -- integer, specifying the shape of the middle CONV's window for the main path
filters -- python list of integers, defining the number of filters in the CONV layers of the main path
stage -- integer, used to name the layers, depending on their position in the network
block -- string/character, used to name the layers, depending on their position in the network
s -- Integer, specifying the stride to be used
Returns:
X -- output of the convolutional block, tensor of shape (n_H, n_W, n_C)
"""
# defining name basis
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
# Retrieve Filters
F1, F2, F3 = filters
# Save the input value
X_shortcut = X
##### MAIN PATH #####
# First component of main path
X = tf.keras.layers.Conv2D(F1, (1, 1), strides=(s, s), name=conv_name_base + '2a')(X)
# X = tf.keras.layers.Dropout(0.2)
X = tf.keras.layers.BatchNormalization(name=bn_name_base + '2a')(X)
X = tf.keras.layers.Activation('relu')(X)
# Second component of main path (≈3 lines)
X = tf.keras.layers.Conv2D(
filters=F2, kernel_size=(f, f),
strides=(1, 1),
padding='same', name=conv_name_base + '2b')(X)
# X = tf.keras.layers.Dropout(0.2)
X = tf.keras.layers.BatchNormalization(name=bn_name_base + '2b')(X)
X = tf.keras.layers.Activation('relu')(X)
# Third component of main path (≈2 lines)
X = tf.keras.layers.Conv2D(
filters=F3, kernel_size=(1, 1),
strides=(1, 1),
padding='valid', name=conv_name_base + '2c')(X)
# X = tf.keras.layers.Dropout(0.2)
X = tf.keras.layers.BatchNormalization(name=bn_name_base + '2c')(X)
# SHORTCUT PATH #### (≈2 lines)
X_shortcut = tf.keras.layers.Conv2D(
filters=F3, kernel_size=(1, 1),
strides=(s, s),
padding='valid', name=conv_name_base + '1')(X_shortcut)
X_shortcut = tf.keras.layers.BatchNormalization(
name=bn_name_base + '1')(X_shortcut)
# Final step: Add shortcut value to main path, and pass it through a RELU activation (≈2 lines)
X = tf.keras.layers.Add()([X, X_shortcut])
X = tf.keras.layers.Activation('relu')(X)
return X
# https://github.com/priya-dwivedi/Deep-Learning/blob/master/resnet_keras/Residual_Network_Keras.ipynb
# https://towardsdatascience.com/understanding-and-coding-a-resnet-in-keras-446d7ff84d33
def ResNet50(input_shape=(20, 6, 1)):
"""
Implementation of the popular ResNet50 the following architecture:
CONV1D -> BATCHNORM -> RELU -> MAXPOOL -> CONVBLOCK -> IDBLOCK*2 -> CONVBLOCK -> IDBLOCK*3
-> CONVBLOCK -> IDBLOCK*5 -> CONVBLOCK -> IDBLOCK*2 -> AVGPOOL -> TOPLAYER
Arguments:
input_shape -- shape of the images of the dataset
classes -- integer, number of classes
Returns:
model -- a Model() instance in Keras
"""
# Define the input as a tensor with shape input_shape
X_input = tf.keras.layers.Input(input_shape)
# X = tf.keras.layers.ZeroPadding2D((3, 3))(X_input)
# Zero-Padding
X = tf.keras.layers.ZeroPadding2D((0, 7))(X_input)
X = tf.keras.layers.ZeroPadding2D((25, 25))(X)
X = tf.keras.layers.Conv2D(3, (3, 3), padding='same')(X)
# Stage 1
X = tf.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), name='conv1',
kernel_initializer=tf.keras.initializers.glorot_uniform(seed=0))(X)
X = tf.keras.layers.Dropout(0.1)(X)
X = tf.keras.layers.BatchNormalization(axis=3, name='bn_conv1')(X)
X = tf.keras.layers.Activation('relu')(X)
X = tf.keras.layers.MaxPooling2D((3, 3), strides=(2, 2))(X)
X = tf.keras.layers.Conv2D(64, (3, 3), strides=(1, 1), name='conv2',
kernel_initializer=tf.keras.initializers.glorot_uniform(seed=0))(X)
# X = tf.keras.layers.Activation('relu')(X)
# Stage 2
#X = convolutional_block(X, f=3, filters=[64, 64, 256], stage=2, block='a', s=1)
#X = identity_block(X, 3, [64, 64, 256], stage=2, block='b')
#X = identity_block(X, 3, [64, 64, 256], stage=2, block='c')
# # ### START CODE HERE ###
# # # Stage 3 (≈4 lines)
# X = convolutional_block(X, f=3, filters=[128, 128, 512], stage=3, block='a', s=2)
# X = identity_block(X, 3, [128, 128, 512], stage=3, block='b')
# X = identity_block(X, 3, [128, 128, 512], stage=3, block='c')
# X = identity_block(X, 3, [128, 128, 512], stage=3, block='d')
# # # Stage 4 (≈6 lines)
# X = convolutional_block(X, f=3, filters=[256, 256, 1024], stage=4, block='a', s=2)
# X = identity_block(X, 3, [256, 256, 1024], stage=4, block='b')
# X = identity_block(X, 3, [256, 256, 1024], stage=4, block='c')
# X = identity_block(X, 3, [256, 256, 1024], stage=4, block='d')
# X = identity_block(X, 3, [256, 256, 1024], stage=4, block='e')
# X = identity_block(X, 3, [256, 256, 1024], stage=4, block='f')
# # # Stage 5 (≈3 lines)
# X = convolutional_block(X, f=3, filters=[512, 512, 2048], stage=5, block='a', s=2)
# X = identity_block(X, 3, [512, 512, 2048], stage=5, block='b')
# X = identity_block(X, 3, [512, 512, 2048], stage=5, block='c')
# AVGPOOL (≈1 line). Use "X = AveragePooling1D(...)(X)"
# X = tf.keras.layers.AveragePooling2D((2, 2), name="avg_pool")(X)
### END CODE HERE ###
# output layer
X = tf.keras.layers.Flatten()(X)
X = tf.keras.layers.Dense(
16, activation='relu',
name='fc' + str(16),
#kernel_regularization = tf.keras.regularizers.l2(l2=0.001),
kernel_initializer=tf.keras.initializers.glorot_uniform(seed=0))(X)
#X = tf.keras.layers.Dropout(0.1)(X)
# Create model
model = tf.keras.Model(inputs=X_input, outputs=X, name='ResNet50')
print("summary => ", model.summary())
return model
| 37.949451
| 106
| 0.618579
| 2,556
| 17,267
| 4.103678
| 0.103678
| 0.122795
| 0.197064
| 0.184193
| 0.774907
| 0.742206
| 0.708456
| 0.691391
| 0.64601
| 0.60921
| 0
| 0.053855
| 0.214977
| 17,267
| 454
| 107
| 38.03304
| 0.719144
| 0.541843
| 0
| 0.437956
| 0
| 0
| 0.036564
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058394
| false
| 0
| 0.014599
| 0
| 0.131387
| 0.014599
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9dba1f1f826948da80bc258fa359068d19d1c196
| 970
|
py
|
Python
|
_site/Forritun/Forritun 1/testaroni tima.py
|
EinarK2/einark2.github.io
|
ec121871d381fe62e29573e67b57baf80f31e90d
|
[
"CC-BY-4.0"
] | null | null | null |
_site/Forritun/Forritun 1/testaroni tima.py
|
EinarK2/einark2.github.io
|
ec121871d381fe62e29573e67b57baf80f31e90d
|
[
"CC-BY-4.0"
] | null | null | null |
_site/Forritun/Forritun 1/testaroni tima.py
|
EinarK2/einark2.github.io
|
ec121871d381fe62e29573e67b57baf80f31e90d
|
[
"CC-BY-4.0"
] | 1
|
2018-09-12T15:12:34.000Z
|
2018-09-12T15:12:34.000Z
|
#testaroni
tala=[401, 406, 408, 410, 410, 411, 413, 414, 414, 416, 422, 423, 423, 425, 425, 425, 427, 427, 429, 430, 431, 440, 442, 444, 445, 446, 448, 452, 454, 454, 454, 458, 460, 461, 461, 461, 463, 464, 464, 465, 466, 467, 478, 482, 483, 484, 485, 487, 487, 489, 490, 491, 491, 491, 492, 496, 496, 498, 498, 498, 500, 500, 502, 504, 507, 508, 508, 511, 514, 517, 518, 519, 519, 523, 525, 526, 534, 535, 536, 543, 544, 545, 546, 547, 552, 554, 554, 555, 556, 556, 557, 560, 562, 563, 563, 564, 565, 567, 569, 569, 569, 571, 572, 573, 575, 575, 577, 578, 578, 580, 580, 580, 580, 581, 582, 586, 587, 587, 588, 589, 591, 591, 593, 594, 596, 597, 601, 601, 601, 601, 601, 605, 605, 605, 607, 607, 608, 610, 611, 611, 613, 614, 617, 617, 621, 621, 624, 625, 628, 628, 629, 629, 631, 632, 636, 637, 638, 639, 641, 644, 644, 647, 649, 652, 652, 654, 654, 655, 656, 659, 660, 660, 662, 665, 666, 669, 673, 682, 683, 687, 690, 690, 692, 697, 697, 699, 699]
print(sum(tala))
| 194
| 940
| 0.604124
| 192
| 970
| 3.052083
| 0.729167
| 0.040956
| 0.046075
| 0.040956
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.719231
| 0.195876
| 970
| 4
| 941
| 242.5
| 0.032051
| 0.009278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
3b031973d209945661d54d63d398e421573bf003
| 20
|
py
|
Python
|
src/__init__.py
|
mrdlp/Ontology-Population-with-Web-Scraping
|
1d3bc24dcb74be0fe11e7c7509aa5f9932233dd0
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
mrdlp/Ontology-Population-with-Web-Scraping
|
1d3bc24dcb74be0fe11e7c7509aa5f9932233dd0
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
mrdlp/Ontology-Population-with-Web-Scraping
|
1d3bc24dcb74be0fe11e7c7509aa5f9932233dd0
|
[
"MIT"
] | null | null | null |
#this is a first try
| 20
| 20
| 0.75
| 5
| 20
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.9375
| 0.95
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d177e320f045b46e9a86a4c092aa56852188fe67
| 59
|
py
|
Python
|
tools/Polygraphy/polygraphy/backend/common/__init__.py
|
martellz/TensorRT
|
f182e83b30b5d45aaa3f9a041ff8b3ce83e366f4
|
[
"Apache-2.0"
] | 4
|
2021-04-16T13:49:38.000Z
|
2022-01-16T08:58:07.000Z
|
tools/Polygraphy/polygraphy/backend/common/__init__.py
|
martellz/TensorRT
|
f182e83b30b5d45aaa3f9a041ff8b3ce83e366f4
|
[
"Apache-2.0"
] | null | null | null |
tools/Polygraphy/polygraphy/backend/common/__init__.py
|
martellz/TensorRT
|
f182e83b30b5d45aaa3f9a041ff8b3ce83e366f4
|
[
"Apache-2.0"
] | 2
|
2021-02-04T14:46:10.000Z
|
2021-02-04T14:56:08.000Z
|
from polygraphy.backend.common.loader import BytesFromPath
| 29.5
| 58
| 0.881356
| 7
| 59
| 7.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 1
| 59
| 59
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d1830178d78d1efb4f5dea41fd8038f2403c0bef
| 112
|
py
|
Python
|
packages/jsii-rosetta/test/translations/comments/interleave_single_line_comments_with_function_call.py
|
NGL321/jsii
|
a31ebf5ef676391d97f2286edc21e5859c38c96c
|
[
"Apache-2.0"
] | 1,639
|
2019-07-05T07:21:00.000Z
|
2022-03-31T09:55:01.000Z
|
packages/jsii-rosetta/test/translations/comments/interleave_single_line_comments_with_function_call.py
|
NGL321/jsii
|
a31ebf5ef676391d97f2286edc21e5859c38c96c
|
[
"Apache-2.0"
] | 2,704
|
2019-07-01T23:10:28.000Z
|
2022-03-31T23:40:12.000Z
|
packages/jsii-rosetta/test/translations/comments/interleave_single_line_comments_with_function_call.py
|
NGL321/jsii
|
a31ebf5ef676391d97f2286edc21e5859c38c96c
|
[
"Apache-2.0"
] | 146
|
2019-07-02T14:36:25.000Z
|
2022-03-26T00:21:27.000Z
|
some_function(arg1,
# A comment before arg2
arg2="string",
# A comment before arg3
arg3="boo"
)
| 16
| 27
| 0.625
| 15
| 112
| 4.6
| 0.666667
| 0.231884
| 0.405797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0.267857
| 112
| 7
| 28
| 16
| 0.780488
| 0.383929
| 0
| 0
| 0
| 0
| 0.134328
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d184470930fb9ef30eb1dfb0cf90d6541d4c6a11
| 43
|
py
|
Python
|
vernon/summers2005/__main__.py
|
pkgw/vernon
|
9dd52d813722d0932195723cf8c37a5dd2fd0d25
|
[
"MIT"
] | null | null | null |
vernon/summers2005/__main__.py
|
pkgw/vernon
|
9dd52d813722d0932195723cf8c37a5dd2fd0d25
|
[
"MIT"
] | null | null | null |
vernon/summers2005/__main__.py
|
pkgw/vernon
|
9dd52d813722d0932195723cf8c37a5dd2fd0d25
|
[
"MIT"
] | 1
|
2020-12-05T06:05:40.000Z
|
2020-12-05T06:05:40.000Z
|
from . import summarize
summarize().show()
| 14.333333
| 23
| 0.744186
| 5
| 43
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 43
| 2
| 24
| 21.5
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d19f53e7c6b215bf96e844a57c88e556dc065849
| 73
|
py
|
Python
|
captain_hook/services/base/events/__init__.py
|
brantje/captain_hook
|
dde076a96afffa2235b7d8d01d47c4e61099c6b6
|
[
"Apache-2.0"
] | 1
|
2017-01-07T16:22:05.000Z
|
2017-01-07T16:22:05.000Z
|
captain_hook/services/base/events/__init__.py
|
brantje/captain_hook
|
dde076a96afffa2235b7d8d01d47c4e61099c6b6
|
[
"Apache-2.0"
] | 3
|
2017-02-27T00:34:19.000Z
|
2017-02-27T14:25:44.000Z
|
captain_hook/services/base/events/__init__.py
|
brantje/telegram-github-bot
|
dde076a96afffa2235b7d8d01d47c4e61099c6b6
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from .base_event import BaseEvent
| 24.333333
| 38
| 0.876712
| 10
| 73
| 5.8
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 73
| 2
| 39
| 36.5
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ae134f522864342441d0466e5437cf4a8df35157
| 705
|
py
|
Python
|
diskpy/ICgen/__init__.py
|
langfzac/diskpy
|
3b0f4fdc7f1fea21efdd3ab55bbf362181c7a3c4
|
[
"MIT"
] | 4
|
2016-03-25T18:09:39.000Z
|
2020-03-10T09:27:41.000Z
|
diskpy/ICgen/__init__.py
|
langfzac/diskpy
|
3b0f4fdc7f1fea21efdd3ab55bbf362181c7a3c4
|
[
"MIT"
] | 21
|
2015-07-20T21:56:45.000Z
|
2017-09-16T23:01:15.000Z
|
diskpy/ICgen/__init__.py
|
langfzac/diskpy
|
3b0f4fdc7f1fea21efdd3ab55bbf362181c7a3c4
|
[
"MIT"
] | 4
|
2015-08-07T22:03:12.000Z
|
2021-02-19T16:30:17.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 7 10:11:48 2015
@author: ibackus
"""
from ICgen import IC, load
import AddBinary, binary, binaryUtils, calc_temp, calc_velocity, \
ICgen_settings, ICgen_utils, make_sigma, \
make_snapshotBinary, make_snapshot, make_snapshotSType, pos_class, \
sigma_profile, vertical_solver
from rhosolver import rhosolver, loadrho
__all__ = ['IC', 'load', 'AddBinary', 'binary',
'binaryUtils', 'calc_temp', 'calc_velocity', 'ICgen_settings',
'ICgen_utils', 'make_sigma',
'make_snapshotBinary', 'make_snapshot', 'make_snapshotSType',
'pos_class', 'sigma_profile', 'vertical_solver', 'rhosolver',
'loadrho']
| 29.375
| 74
| 0.680851
| 80
| 705
| 5.675
| 0.5
| 0.026432
| 0.114537
| 0.132159
| 0.700441
| 0.700441
| 0.700441
| 0.700441
| 0.700441
| 0.700441
| 0
| 0.020942
| 0.187234
| 705
| 23
| 75
| 30.652174
| 0.771379
| 0.107801
| 0
| 0
| 0
| 0
| 0.309179
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ae5702cbf7d34170815f9d815c74f6fb052ca673
| 73
|
py
|
Python
|
pyha/cores/packet/__init__.py
|
gasparka/pyha
|
60d9bbfd6075e7548d670d05317d64bc2a1a19ee
|
[
"Apache-2.0"
] | 6
|
2017-05-18T18:57:07.000Z
|
2020-08-06T11:23:34.000Z
|
pyha/cores/packet/__init__.py
|
gasparka/pyha
|
60d9bbfd6075e7548d670d05317d64bc2a1a19ee
|
[
"Apache-2.0"
] | 607
|
2017-05-10T12:51:54.000Z
|
2022-03-31T18:08:15.000Z
|
pyha/cores/packet/__init__.py
|
gasparka/pyha
|
60d9bbfd6075e7548d670d05317d64bc2a1a19ee
|
[
"Apache-2.0"
] | 1
|
2019-03-20T13:57:46.000Z
|
2019-03-20T13:57:46.000Z
|
from .crc16 import CRC16
from .header_correlator import HeaderCorrelator
| 24.333333
| 47
| 0.863014
| 9
| 73
| 6.888889
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0.109589
| 73
| 2
| 48
| 36.5
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ae9bf5c79674fa3f110bb10b95216faff91f1343
| 76
|
py
|
Python
|
sacrerouge/datasets/duc_tac/duc2004/__init__.py
|
danieldeutsch/decomposed-rouge
|
0d723be8e3359f0bdcc9c7940336800895e46dbb
|
[
"Apache-2.0"
] | 81
|
2020-07-10T15:45:08.000Z
|
2022-03-30T12:19:11.000Z
|
sacrerouge/datasets/duc_tac/duc2004/__init__.py
|
danieldeutsch/decomposed-rouge
|
0d723be8e3359f0bdcc9c7940336800895e46dbb
|
[
"Apache-2.0"
] | 29
|
2020-08-03T21:50:45.000Z
|
2022-02-23T14:34:16.000Z
|
sacrerouge/datasets/duc_tac/duc2004/__init__.py
|
danieldeutsch/decomposed-rouge
|
0d723be8e3359f0bdcc9c7940336800895e46dbb
|
[
"Apache-2.0"
] | 7
|
2020-08-14T09:54:08.000Z
|
2022-03-30T12:19:25.000Z
|
from sacrerouge.datasets.duc_tac.duc2004.subcommand import DUC2004Subcommand
| 76
| 76
| 0.907895
| 9
| 76
| 7.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 0.039474
| 76
| 1
| 76
| 76
| 0.821918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ae9e90cb0cda8da5d0da0d14eedc75eb0c359ea9
| 203
|
py
|
Python
|
output/models/ms_data/datatypes/facets/unsigned_short/unsigned_short_min_inclusive005_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/datatypes/facets/unsigned_short/unsigned_short_min_inclusive005_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/datatypes/facets/unsigned_short/unsigned_short_min_inclusive005_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ms_data.datatypes.facets.unsigned_short.unsigned_short_min_inclusive005_xsd.unsigned_short_min_inclusive005 import (
FooType,
Test,
)
__all__ = [
"FooType",
"Test",
]
| 20.3
| 135
| 0.753695
| 24
| 203
| 5.833333
| 0.666667
| 0.278571
| 0.228571
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034682
| 0.147783
| 203
| 9
| 136
| 22.555556
| 0.774566
| 0
| 0
| 0
| 0
| 0
| 0.054187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
881d037b3fd5697bbac007352270a46871233414
| 80
|
py
|
Python
|
mytest.py
|
iPhone2018/bkapp
|
21e30122b8fdaecba2f1d6bbc349e2a67d866c22
|
[
"Apache-2.0"
] | null | null | null |
mytest.py
|
iPhone2018/bkapp
|
21e30122b8fdaecba2f1d6bbc349e2a67d866c22
|
[
"Apache-2.0"
] | 5
|
2019-11-07T07:03:55.000Z
|
2021-06-10T22:09:28.000Z
|
mytest.py
|
iPhone2018/bkapp
|
21e30122b8fdaecba2f1d6bbc349e2a67d866c22
|
[
"Apache-2.0"
] | null | null | null |
s = "2.12 1.86 1.81 3/982 17267\n"
s = s.replace("\n", "").split(" ")[1]
print s
| 26.666667
| 37
| 0.525
| 19
| 80
| 2.210526
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.283582
| 0.1625
| 80
| 3
| 38
| 26.666667
| 0.343284
| 0
| 0
| 0
| 0
| 0
| 0.382716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
882bf0ef2ddf53307445dd3e3611e5f2c90370d7
| 1,268
|
py
|
Python
|
main.py
|
Iangecko/arbys
|
5b2e4b4e511d5721d6a1cc447b8fbf9be43fa909
|
[
"MIT"
] | null | null | null |
main.py
|
Iangecko/arbys
|
5b2e4b4e511d5721d6a1cc447b8fbf9be43fa909
|
[
"MIT"
] | null | null | null |
main.py
|
Iangecko/arbys
|
5b2e4b4e511d5721d6a1cc447b8fbf9be43fa909
|
[
"MIT"
] | null | null | null |
from client import client
from key import token
# To load new modules, copy/paste the line below, uncommented, with X filled in for the name of your file
# from modules import X
from modules import about
from modules import ares
from modules import beef
from modules import call
from modules import chicken
from modules import cond
from modules import cqdx
from modules import emoji_stats
from modules import exec
from modules import exit
from modules import fivenine
from modules import ham
from modules import help
from modules import htm
from modules import info
from modules import join_leave_msgs
from modules import logstat
from modules import markov
from modules import mc
from modules import message_log
from modules import morse
from modules import music
from modules import n2yo
from modules import nou
from modules import ntp
from modules import phonehand
from modules import ping
from modules import pingreact
from modules import relay
from modules import roles
from modules import spaceman
from modules import stats
from modules import thiccbeef
from modules import thiccom
from modules import thiccseal
from modules import time
from modules import tubez
from modules import units
from modules import unmorse
from modules import uwu
client.run(token)
| 25.36
| 105
| 0.836751
| 199
| 1,268
| 5.311558
| 0.341709
| 0.426679
| 0.659413
| 0.041627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000935
| 0.156151
| 1,268
| 49
| 106
| 25.877551
| 0.986916
| 0.09858
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.976744
| 0
| 0.976744
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
88340ce6bd4e7d86f02425b3444ad02587cf331b
| 78
|
py
|
Python
|
sample/sample.py
|
eaybek/brainduck
|
f45dea58a39dc543d9bbf9cdc4732cbdd8f7c0ea
|
[
"MIT"
] | null | null | null |
sample/sample.py
|
eaybek/brainduck
|
f45dea58a39dc543d9bbf9cdc4732cbdd8f7c0ea
|
[
"MIT"
] | null | null | null |
sample/sample.py
|
eaybek/brainduck
|
f45dea58a39dc543d9bbf9cdc4732cbdd8f7c0ea
|
[
"MIT"
] | null | null | null |
from brainduck.brainduck import Brainduck
class Brainduck(object):
pass
| 13
| 41
| 0.782051
| 9
| 78
| 6.777778
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 78
| 5
| 42
| 15.6
| 0.938462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
887f8b04819f37d165c87e09b1e6033da5cdebe0
| 351
|
py
|
Python
|
code/tools/gen_checkpoint.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 789
|
2018-03-21T05:28:38.000Z
|
2022-03-29T19:32:47.000Z
|
code/tools/gen_checkpoint.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 46
|
2018-05-03T07:11:10.000Z
|
2022-03-11T23:26:03.000Z
|
code/tools/gen_checkpoint.py
|
santomon/taskonomy
|
4b22087a2686172b21b61589831061e7a386fe36
|
[
"MIT"
] | 152
|
2018-03-24T10:20:44.000Z
|
2022-02-09T02:38:10.000Z
|
ckpt_string = 'model_checkpoint_path: "/home/ubuntu/s3/model_log_final/{task}/logs/slim-train/time/model.ckpt-{step}"\nall_model_checkpoint_paths: "/home/ubuntu/s3/model_log_final/{task}/logs/slim-train/time/model.ckpt-{step}"'
with open("checkpoint", "w") as text_file:
print(ckpt_string.format(task="keypoint3d", step="112830"), file=text_file)
| 87.75
| 227
| 0.769231
| 55
| 351
| 4.672727
| 0.509091
| 0.077821
| 0.093385
| 0.132296
| 0.459144
| 0.459144
| 0.459144
| 0.459144
| 0.459144
| 0.459144
| 0
| 0.026946
| 0.048433
| 351
| 3
| 228
| 117
| 0.742515
| 0
| 0
| 0
| 0
| 0.333333
| 0.678063
| 0.595442
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
88a33772a285013cc309453a69a781ac80f43100
| 268
|
py
|
Python
|
quake/server/worker.py
|
It4innovations/quake
|
a57f37e5c871e0c7c00b84aef638b925ef96690a
|
[
"MIT"
] | 1
|
2021-03-26T14:23:44.000Z
|
2021-03-26T14:23:44.000Z
|
quake/server/worker.py
|
It4innovations/quake
|
a57f37e5c871e0c7c00b84aef638b925ef96690a
|
[
"MIT"
] | null | null | null |
quake/server/worker.py
|
It4innovations/quake
|
a57f37e5c871e0c7c00b84aef638b925ef96690a
|
[
"MIT"
] | null | null | null |
class Worker:
def __init__(self, worker_id, hostname):
self.worker_id = worker_id
self.hostname = hostname
self.ds_connection = None
self.tasks = set()
def __repr__(self):
return "<Worker id={}>".format(self.worker_id)
| 26.8
| 54
| 0.619403
| 33
| 268
| 4.636364
| 0.454545
| 0.261438
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268657
| 268
| 9
| 55
| 29.777778
| 0.780612
| 0
| 0
| 0
| 0
| 0
| 0.052239
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.125
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
ee14c20cb31444294bf49ed6a884974ffa5e6dd2
| 83
|
py
|
Python
|
tirma/models/__init__.py
|
sergevkim/ImageTranslation
|
b90f71b6abf0950569e6567ed67cb4bb9f99eaaf
|
[
"MIT"
] | 1
|
2020-11-28T18:35:31.000Z
|
2020-11-28T18:35:31.000Z
|
tirma/models/__init__.py
|
sergevkim/ImageTranslation
|
b90f71b6abf0950569e6567ed67cb4bb9f99eaaf
|
[
"MIT"
] | null | null | null |
tirma/models/__init__.py
|
sergevkim/ImageTranslation
|
b90f71b6abf0950569e6567ed67cb4bb9f99eaaf
|
[
"MIT"
] | null | null | null |
from .cycle_gan import CycleGAN
from .pix2pix_translator import Pix2PixTranslator
| 20.75
| 49
| 0.86747
| 10
| 83
| 7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.108434
| 83
| 3
| 50
| 27.666667
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ee41ed3d586de4ced7f9b519426a38c70d13d085
| 224
|
py
|
Python
|
spotty/deployment/utils/cli.py
|
greglira/spotty
|
0b5073621ba8e19be75b6f9701e6c9971b6d17fb
|
[
"MIT"
] | 246
|
2018-09-03T09:09:48.000Z
|
2020-07-18T21:07:15.000Z
|
spotty/deployment/utils/cli.py
|
greglira/spotty
|
0b5073621ba8e19be75b6f9701e6c9971b6d17fb
|
[
"MIT"
] | 42
|
2018-10-09T19:41:56.000Z
|
2020-06-15T22:55:58.000Z
|
spotty/deployment/utils/cli.py
|
greglira/spotty
|
0b5073621ba8e19be75b6f9701e6c9971b6d17fb
|
[
"MIT"
] | 27
|
2018-10-09T22:16:40.000Z
|
2020-06-08T22:26:00.000Z
|
import shlex
def shlex_join(split_command: list):
"""Return a shell-escaped string from *split_command*.
Copy-pasted from the Python 3.8 code.
"""
return ' '.join(shlex.quote(arg) for arg in split_command)
| 24.888889
| 62
| 0.696429
| 34
| 224
| 4.470588
| 0.705882
| 0.236842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01105
| 0.191964
| 224
| 8
| 63
| 28
| 0.828729
| 0.397321
| 0
| 0
| 0
| 0
| 0.008333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ee81e68223163d241cbf2ecc1c0257b17be08804
| 189
|
py
|
Python
|
prl/utils/__init__.py
|
sliwy/prl
|
0e4bfa5578d11890d21932f535b095f2657ed4ff
|
[
"MIT"
] | 51
|
2020-02-12T08:57:50.000Z
|
2022-03-14T13:27:40.000Z
|
prl/utils/__init__.py
|
sliwy/prl
|
0e4bfa5578d11890d21932f535b095f2657ed4ff
|
[
"MIT"
] | 4
|
2021-03-19T10:47:07.000Z
|
2022-03-12T00:14:39.000Z
|
prl/utils/__init__.py
|
sliwy/prl
|
0e4bfa5578d11890d21932f535b095f2657ed4ff
|
[
"MIT"
] | 4
|
2020-03-04T07:03:24.000Z
|
2022-03-14T13:27:43.000Z
|
from prl.utils.misc import colors
from prl.utils.utils import timeit
from prl.utils.loggers import (
time_logger,
memory_logger,
agent_logger,
misc_logger,
nn_logger,
)
| 18.9
| 34
| 0.730159
| 27
| 189
| 4.925926
| 0.481481
| 0.157895
| 0.270677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201058
| 189
| 9
| 35
| 21
| 0.880795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ee8f577665e1b983ed81ddb097fdcd5fe554ea2f
| 328
|
py
|
Python
|
sparkplug/timereporters/base.py
|
freshbooks/sparkplug
|
4f4fe38655a93cdee602019de2a75cd3d320408c
|
[
"MIT"
] | null | null | null |
sparkplug/timereporters/base.py
|
freshbooks/sparkplug
|
4f4fe38655a93cdee602019de2a75cd3d320408c
|
[
"MIT"
] | 11
|
2015-04-16T18:34:31.000Z
|
2021-05-07T14:19:57.000Z
|
sparkplug/timereporters/base.py
|
freshbooks/sparkplug
|
4f4fe38655a93cdee602019de2a75cd3d320408c
|
[
"MIT"
] | 1
|
2019-03-14T12:52:44.000Z
|
2019-03-14T12:52:44.000Z
|
import datetime
def _milliseconds(timedelta):
return timedelta.total_seconds() * 1000
class Base(object):
def __init__(self):
pass
def append_wait(self, delta, tags=None):
pass
def append_exec(self, delta, tags=None):
pass
def append_erro(self, delta, tags=None):
pass
| 16.4
| 44
| 0.640244
| 41
| 328
| 4.902439
| 0.536585
| 0.104478
| 0.19403
| 0.253731
| 0.402985
| 0.298507
| 0.298507
| 0
| 0
| 0
| 0
| 0.016598
| 0.265244
| 328
| 19
| 45
| 17.263158
| 0.817427
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0.333333
| 0.083333
| 0.083333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
4e66849f114be35de8a8317ef834f4045fce8dde
| 16,953
|
bzl
|
Python
|
3rdparty/workspace.bzl
|
digital-peace-talks/argument-analysis-research
|
587b52bedb79a0c9497b8c39ccc70edf4d165249
|
[
"MIT"
] | null | null | null |
3rdparty/workspace.bzl
|
digital-peace-talks/argument-analysis-research
|
587b52bedb79a0c9497b8c39ccc70edf4d165249
|
[
"MIT"
] | null | null | null |
3rdparty/workspace.bzl
|
digital-peace-talks/argument-analysis-research
|
587b52bedb79a0c9497b8c39ccc70edf4d165249
|
[
"MIT"
] | null | null | null |
# Do not edit. bazel-deps autogenerates this file from dependencies.yaml.
def _jar_artifact_impl(ctx):
jar_name = "%s.jar" % ctx.name
ctx.download(
output=ctx.path("jar/%s" % jar_name),
url=ctx.attr.urls,
sha256=ctx.attr.sha256,
executable=False
)
src_name="%s-sources.jar" % ctx.name
srcjar_attr=""
has_sources = len(ctx.attr.src_urls) != 0
if has_sources:
ctx.download(
output=ctx.path("jar/%s" % src_name),
url=ctx.attr.src_urls,
sha256=ctx.attr.src_sha256,
executable=False
)
srcjar_attr ='\n srcjar = ":%s",' % src_name
build_file_contents = """
package(default_visibility = ['//visibility:public'])
java_import(
name = 'jar',
tags = ['maven_coordinates={artifact}'],
jars = ['{jar_name}'],{srcjar_attr}
)
filegroup(
name = 'file',
srcs = [
'{jar_name}',
'{src_name}'
],
visibility = ['//visibility:public']
)\n""".format(artifact = ctx.attr.artifact, jar_name = jar_name, src_name = src_name, srcjar_attr = srcjar_attr)
ctx.file(ctx.path("jar/BUILD"), build_file_contents, False)
return None
jar_artifact = repository_rule(
attrs = {
"artifact": attr.string(mandatory = True),
"sha256": attr.string(mandatory = True),
"urls": attr.string_list(mandatory = True),
"src_sha256": attr.string(mandatory = False, default=""),
"src_urls": attr.string_list(mandatory = False, default=[]),
},
implementation = _jar_artifact_impl
)
def jar_artifact_callback(hash):
src_urls = []
src_sha256 = ""
source=hash.get("source", None)
if source != None:
src_urls = [source["url"]]
src_sha256 = source["sha256"]
jar_artifact(
artifact = hash["artifact"],
name = hash["name"],
urls = [hash["url"]],
sha256 = hash["sha256"],
src_urls = src_urls,
src_sha256 = src_sha256
)
native.bind(name = hash["bind"], actual = hash["actual"])
def list_dependencies():
return [
{"artifact": "com.fasterxml.jackson.core:jackson-annotations:2.9.0", "lang": "java", "sha1": "07c10d545325e3a6e72e06381afe469fd40eb701", "sha256": "45d32ac61ef8a744b464c54c2b3414be571016dd46bfc2bec226761cf7ae457a", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0.jar", "name": "com_fasterxml_jackson_core_jackson_annotations", "actual": "@com_fasterxml_jackson_core_jackson_annotations//jar", "bind": "jar/com/fasterxml/jackson/core/jackson_annotations"},
{"artifact": "com.fasterxml.jackson.core:jackson-core:2.9.7", "lang": "java", "sha1": "4b7f0e0dc527fab032e9800ed231080fdc3ac015", "sha256": "9e5bc0efabd9f0cac5c1fdd9ae35b16332ed22a0ee19a356de370a18a8cb6c84", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.9.7/jackson-core-2.9.7.jar", "name": "com_fasterxml_jackson_core_jackson_core", "actual": "@com_fasterxml_jackson_core_jackson_core//jar", "bind": "jar/com/fasterxml/jackson/core/jackson_core"},
{"artifact": "com.fasterxml.jackson.core:jackson-databind:2.9.7", "lang": "java", "sha1": "e6faad47abd3179666e89068485a1b88a195ceb7", "sha256": "675376decfc070b039d2be773a97002f1ee1e1346d95bd99feee0d56683a92bf", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.9.7/jackson-databind-2.9.7.jar", "name": "com_fasterxml_jackson_core_jackson_databind", "actual": "@com_fasterxml_jackson_core_jackson_databind//jar", "bind": "jar/com/fasterxml/jackson/core/jackson_databind"},
{"artifact": "com.fasterxml.jackson.module:jackson-module-kotlin:2.9.7", "lang": "kotlin", "sha1": "9ec9b84e8af4c4f31efcbc5c21e34da8021419f1", "sha256": "5b313b299717156ee883ef37774f709c8c9942b395edcc1d13368e52a786be28", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/fasterxml/jackson/module/jackson-module-kotlin/2.9.7/jackson-module-kotlin-2.9.7.jar", "name": "com_fasterxml_jackson_module_jackson_module_kotlin", "actual": "@com_fasterxml_jackson_module_jackson_module_kotlin//jar:file", "bind": "jar/com/fasterxml/jackson/module/jackson_module_kotlin"},
{"artifact": "io.javalin:javalin:2.3.0", "lang": "kotlin", "sha1": "73836e9cf29f978e47817584f9cee86b5e1f4c09", "sha256": "3571e83863e1f163854f1b2ee3cbfc1336fcbdfa595ec9c2ed8ab8bfa792e5f4", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/io/javalin/javalin/2.3.0/javalin-2.3.0.jar", "name": "io_javalin_javalin", "actual": "@io_javalin_javalin//jar:file", "bind": "jar/io/javalin/javalin"},
{"artifact": "javax.servlet:javax.servlet-api:3.1.0", "lang": "java", "sha1": "3cd63d075497751784b2fa84be59432f4905bf7c", "sha256": "af456b2dd41c4e82cf54f3e743bc678973d9fe35bd4d3071fa05c7e5333b8482", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar", "name": "javax_servlet_javax_servlet_api", "actual": "@javax_servlet_javax_servlet_api//jar", "bind": "jar/javax/servlet/javax_servlet_api"},
{"artifact": "org.eclipse.jetty.websocket:websocket-api:9.4.12.v20180830", "lang": "java", "sha1": "97d6376f70ae6c01112325c5254e566af118bc75", "sha256": "6f7ecb42601058ffe4a6c19c5340cac3ebf0f83e2e252b457558f104238278e3", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/websocket/websocket-api/9.4.12.v20180830/websocket-api-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_websocket_websocket_api", "actual": "@org_eclipse_jetty_websocket_websocket_api//jar", "bind": "jar/org/eclipse/jetty/websocket/websocket_api"},
{"artifact": "org.eclipse.jetty.websocket:websocket-client:9.4.12.v20180830", "lang": "java", "sha1": "75880b6a90a6eda83fdbfc20a42f23eade4b975d", "sha256": "97c6882c858a75776773eaccc01739757c4e9f60a51613878c1f2b2ba03d91af", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/websocket/websocket-client/9.4.12.v20180830/websocket-client-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_websocket_websocket_client", "actual": "@org_eclipse_jetty_websocket_websocket_client//jar", "bind": "jar/org/eclipse/jetty/websocket/websocket_client"},
{"artifact": "org.eclipse.jetty.websocket:websocket-common:9.4.12.v20180830", "lang": "java", "sha1": "33997cdafbabb3ffd6947a5c33057f967e10535b", "sha256": "3c35aefa720c51e09532c16fdbfaaebd1af3e07dee699dacaba8e0ab0adf88e5", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/websocket/websocket-common/9.4.12.v20180830/websocket-common-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_websocket_websocket_common", "actual": "@org_eclipse_jetty_websocket_websocket_common//jar", "bind": "jar/org/eclipse/jetty/websocket/websocket_common"},
{"artifact": "org.eclipse.jetty.websocket:websocket-server:9.4.12.v20180830", "lang": "java", "sha1": "fadf609aec6026cb25f25b6bc0b979821f849fd7", "sha256": "7b1bd39006be8c32d7426a119567d860b3e4a3dc3c01a5c91326450bb0213a03", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/websocket/websocket-server/9.4.12.v20180830/websocket-server-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_websocket_websocket_server", "actual": "@org_eclipse_jetty_websocket_websocket_server//jar", "bind": "jar/org/eclipse/jetty/websocket/websocket_server"},
{"artifact": "org.eclipse.jetty.websocket:websocket-servlet:9.4.12.v20180830", "lang": "java", "sha1": "8d212616b6ea21b96152ff202c2f53fdca8b8b53", "sha256": "8d43e0882759ecd093bd1a5a0ef2b4db38ac279212488a34edb8d7de7c45cc4d", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/websocket/websocket-servlet/9.4.12.v20180830/websocket-servlet-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_websocket_websocket_servlet", "actual": "@org_eclipse_jetty_websocket_websocket_servlet//jar", "bind": "jar/org/eclipse/jetty/websocket/websocket_servlet"},
{"artifact": "org.eclipse.jetty:jetty-client:9.4.12.v20180830", "lang": "java", "sha1": "1d329d68f31dce13135243c06013aaf6f708f7e7", "sha256": "62efbbfda88cd4f7644242c4b4df8f3b0a671bfeafea7682dabe00352ba07db7", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-client/9.4.12.v20180830/jetty-client-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_client", "actual": "@org_eclipse_jetty_jetty_client//jar", "bind": "jar/org/eclipse/jetty/jetty_client"},
{"artifact": "org.eclipse.jetty:jetty-http:9.4.12.v20180830", "lang": "java", "sha1": "1341796dde4e16df69bca83f3e87688ba2e7d703", "sha256": "20547da653be9942cc63f57e632a732608559aebde69753bc7312cfe16e8d9c0", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-http/9.4.12.v20180830/jetty-http-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_http", "actual": "@org_eclipse_jetty_jetty_http//jar", "bind": "jar/org/eclipse/jetty/jetty_http"},
{"artifact": "org.eclipse.jetty:jetty-io:9.4.12.v20180830", "lang": "java", "sha1": "e93f5adaa35a9a6a85ba130f589c5305c6ecc9e3", "sha256": "ab1784abbb9e0ed0869ab6568fe46f1faa79fb5e948cf96450daecd9d27ba1db", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-io/9.4.12.v20180830/jetty-io-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_io", "actual": "@org_eclipse_jetty_jetty_io//jar", "bind": "jar/org/eclipse/jetty/jetty_io"},
{"artifact": "org.eclipse.jetty:jetty-security:9.4.12.v20180830", "lang": "java", "sha1": "299e0602a9c0b753ba232cc1c1dda72ddd9addcf", "sha256": "513184970c785ac830424a9c62c2fadfa77a630f44aa0bdd792f00aaa092887e", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-security/9.4.12.v20180830/jetty-security-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_security", "actual": "@org_eclipse_jetty_jetty_security//jar", "bind": "jar/org/eclipse/jetty/jetty_security"},
{"artifact": "org.eclipse.jetty:jetty-server:9.4.12.v20180830", "lang": "java", "sha1": "b0f25df0d32a445fd07d5f16fff1411c16b888fa", "sha256": "4833644e5c5a09bbddc85f75c53e0c8ed750de120ba248fffd8508028528252d", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-server/9.4.12.v20180830/jetty-server-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_server", "actual": "@org_eclipse_jetty_jetty_server//jar", "bind": "jar/org/eclipse/jetty/jetty_server"},
{"artifact": "org.eclipse.jetty:jetty-servlet:9.4.12.v20180830", "lang": "java", "sha1": "4c1149328eda9fa39a274262042420f66d9ffd5f", "sha256": "7310d4cccf8abf27fde0c3f1a32e19c75fe33c6f1ab558f0704d915f0f01cb07", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-servlet/9.4.12.v20180830/jetty-servlet-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_servlet", "actual": "@org_eclipse_jetty_jetty_servlet//jar", "bind": "jar/org/eclipse/jetty/jetty_servlet"},
{"artifact": "org.eclipse.jetty:jetty-util:9.4.12.v20180830", "lang": "java", "sha1": "cb4ccec9bd1fe4b10a04a0fb25d7053c1050188a", "sha256": "60ad53e118a3e7d10418b155b9944d90b2e4e4c732e53ef4f419473288d3f48c", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-util/9.4.12.v20180830/jetty-util-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_util", "actual": "@org_eclipse_jetty_jetty_util//jar", "bind": "jar/org/eclipse/jetty/jetty_util"},
{"artifact": "org.eclipse.jetty:jetty-webapp:9.4.12.v20180830", "lang": "java", "sha1": "a3e119df2da04fcf5aa290c8c35c5b310ce2dcd1", "sha256": "5301e412a32bf7dddcfad458d952179597c61f8fd531c265873562725c3d4646", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-webapp/9.4.12.v20180830/jetty-webapp-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_webapp", "actual": "@org_eclipse_jetty_jetty_webapp//jar", "bind": "jar/org/eclipse/jetty/jetty_webapp"},
{"artifact": "org.eclipse.jetty:jetty-xml:9.4.12.v20180830", "lang": "java", "sha1": "e9f1874e9b5edd498f2fe7cd0904405da07cc300", "sha256": "5b8298ab3d43ddaf0941d41f51b82c8ae23a247da055fa161b752ab9495155ed", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/eclipse/jetty/jetty-xml/9.4.12.v20180830/jetty-xml-9.4.12.v20180830.jar", "name": "org_eclipse_jetty_jetty_xml", "actual": "@org_eclipse_jetty_jetty_xml//jar", "bind": "jar/org/eclipse/jetty/jetty_xml"},
{"artifact": "org.jetbrains.kotlin:kotlin-reflect:1.2.51", "lang": "java", "sha1": "36b719a7b84452dd13eeec979d8c82bfb765c57d", "sha256": "129f42c1ad5c3958856ecf2b2dadcd76e24a0b9b7f85aa2aba383616fcc49c7d", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/jetbrains/kotlin/kotlin-reflect/1.2.51/kotlin-reflect-1.2.51.jar", "name": "org_jetbrains_kotlin_kotlin_reflect", "actual": "@org_jetbrains_kotlin_kotlin_reflect//jar", "bind": "jar/org/jetbrains/kotlin/kotlin_reflect"},
{"artifact": "org.jetbrains.kotlin:kotlin-stdlib-common:1.2.71", "lang": "java", "sha1": "ba18ca1aa0e40eb6f1865b324af2f4cbb691c1ec", "sha256": "63999687ff2fce8a592dd180ffbbf8f1d21c26b4044c55cdc74ff3cf3b3cf328", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib-common/1.2.71/kotlin-stdlib-common-1.2.71.jar", "name": "org_jetbrains_kotlin_kotlin_stdlib_common", "actual": "@org_jetbrains_kotlin_kotlin_stdlib_common//jar", "bind": "jar/org/jetbrains/kotlin/kotlin_stdlib_common"},
{"artifact": "org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.2.71", "lang": "java", "sha1": "4ce93f539e2133f172f1167291a911f83400a5d0", "sha256": "b136bd61b240e07d4d92ce00d3bd1dbf584400a7bf5f220c2f3cd22446858082", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib-jdk7/1.2.71/kotlin-stdlib-jdk7-1.2.71.jar", "name": "org_jetbrains_kotlin_kotlin_stdlib_jdk7", "actual": "@org_jetbrains_kotlin_kotlin_stdlib_jdk7//jar", "bind": "jar/org/jetbrains/kotlin/kotlin_stdlib_jdk7"},
{"artifact": "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.2.71", "lang": "java", "sha1": "5470d1f752cd342edb77e1062bac07e838d2cea4", "sha256": "ac3c8abf47790b64b4f7e2509a53f0c145e061ac1612a597520535d199946ea9", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib-jdk8/1.2.71/kotlin-stdlib-jdk8-1.2.71.jar", "name": "org_jetbrains_kotlin_kotlin_stdlib_jdk8", "actual": "@org_jetbrains_kotlin_kotlin_stdlib_jdk8//jar", "bind": "jar/org/jetbrains/kotlin/kotlin_stdlib_jdk8"},
# duplicates in org.jetbrains.kotlin:kotlin-stdlib promoted to 1.2.71
# - org.jetbrains.kotlin:kotlin-reflect:1.2.51 wanted version 1.2.51
# - org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.2.71 wanted version 1.2.71
{"artifact": "org.jetbrains.kotlin:kotlin-stdlib:1.2.71", "lang": "java", "sha1": "d9717625bb3c731561251f8dd2c67a1011d6764c", "sha256": "4c895c270b87f5fec2a2796e1d89c15407ee821de961527c28588bb46afbc68b", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.71/kotlin-stdlib-1.2.71.jar", "name": "org_jetbrains_kotlin_kotlin_stdlib", "actual": "@org_jetbrains_kotlin_kotlin_stdlib//jar", "bind": "jar/org/jetbrains/kotlin/kotlin_stdlib"},
{"artifact": "org.jetbrains:annotations:13.0", "lang": "java", "sha1": "919f0dfe192fb4e063e7dacadee7f8bb9a2672a9", "sha256": "ace2a10dc8e2d5fd34925ecac03e4988b2c0f851650c94b8cef49ba1bd111478", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/jetbrains/annotations/13.0/annotations-13.0.jar", "name": "org_jetbrains_annotations", "actual": "@org_jetbrains_annotations//jar", "bind": "jar/org/jetbrains/annotations"},
{"artifact": "org.slf4j:slf4j-api:1.7.25", "lang": "java", "sha1": "da76ca59f6a57ee3102f8f9bd9cee742973efa8a", "sha256": "18c4a0095d5c1da6b817592e767bb23d29dd2f560ad74df75ff3961dbde25b79", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar", "name": "org_slf4j_slf4j_api", "actual": "@org_slf4j_slf4j_api//jar", "bind": "jar/org/slf4j/slf4j_api"},
]
def maven_dependencies(callback = jar_artifact_callback):
for hash in list_dependencies():
callback(hash)
| 159.933962
| 609
| 0.751843
| 2,034
| 16,953
| 6.109636
| 0.086037
| 0.056329
| 0.084493
| 0.082562
| 0.630723
| 0.566669
| 0.480003
| 0.423433
| 0.361069
| 0.293474
| 0
| 0.163985
| 0.068719
| 16,953
| 105
| 610
| 161.457143
| 0.623131
| 0.016339
| 0
| 0.042105
| 1
| 0.284211
| 0.772059
| 0.435607
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042105
| false
| 0
| 0.010526
| 0.010526
| 0.073684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4e779751446e43b6a57dd34b97f035ba3270117f
| 117
|
py
|
Python
|
keras_contrib/layers/noise.py
|
WiproOpenSourcePractice/keras-contrib
|
3e77ba234f46b82997271996946b731bc774fb9f
|
[
"MIT"
] | 11
|
2019-03-23T13:23:49.000Z
|
2022-01-20T07:57:56.000Z
|
keras_contrib/layers/noise.py
|
WiproOpenSourcePractice/keras-contrib
|
3e77ba234f46b82997271996946b731bc774fb9f
|
[
"MIT"
] | 1
|
2021-06-18T23:07:54.000Z
|
2021-07-13T21:43:51.000Z
|
keras_contrib/layers/noise.py
|
WiproOpenSourcePractice/keras-contrib
|
3e77ba234f46b82997271996946b731bc774fb9f
|
[
"MIT"
] | 11
|
2017-07-06T14:11:51.000Z
|
2021-08-21T23:18:20.000Z
|
from __future__ import absolute_import
from keras.engine import Layer
from .. import backend as K
import numpy as np
| 23.4
| 38
| 0.820513
| 19
| 117
| 4.789474
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 117
| 4
| 39
| 29.25
| 0.919192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4ea3bc6d97a5e08e9785fbd729d7caa84df4d848
| 11,475
|
py
|
Python
|
plugins/good-morning/main.py
|
fz6m/tomon-naixue
|
dfbdd69836f26d160cece34e204f9fb2ed731607
|
[
"MIT"
] | 3
|
2020-08-23T17:43:09.000Z
|
2020-08-31T04:43:42.000Z
|
plugins/good-morning/main.py
|
fz6m/tomon-naixue
|
dfbdd69836f26d160cece34e204f9fb2ed731607
|
[
"MIT"
] | null | null | null |
plugins/good-morning/main.py
|
fz6m/tomon-naixue
|
dfbdd69836f26d160cece34e204f9fb2ed731607
|
[
"MIT"
] | null | null | null |
import random
from .config import goodMorningInstructionSet, goodNightInstructionSet
from .utils import Tools, Status, Utils, TimeUtils, GoodMorningModel
async def mainProgram(bot, userQQ, userGroup, msg, nickname, cid):
# Good morning match
if Tools.commandMatch(msg, goodMorningInstructionSet):
sendMsg = await goodMorningInformation(userQQ, userGroup, nickname)
await bot.send_text(
cid = cid,
content = sendMsg
)
return
# Good night detection
if Tools.commandMatch(msg, goodNightInstructionSet):
sendMsg = await goodNightInformation(userQQ, userGroup, nickname)
await bot.send_text(
cid = cid,
content = sendMsg
)
return
async def userRegistration(userQQ, model):
registrationStructure = {
'qq': userQQ,
'model': model,
'time': TimeUtils.getTheCurrentTime(),
'accurateTime': TimeUtils.getAccurateTimeNow()
}
await Utils.userInformationWriting(userQQ, registrationStructure)
return Status.SUCCESS
async def createACheckInPool(userGroup, model):
signInPoolStructure = {
'qun': userGroup,
'time': TimeUtils.getTheCurrentTime(),
'accurateTime': TimeUtils.getAccurateTimeNow(),
'userList': [],
'number': 0
}
await Utils.groupWrite(str(userGroup) + '-' + model, signInPoolStructure)
return Status.SUCCESS
async def addToCheckInPoolAndGetRanking(userQQ, userGroup, model):
if model == GoodMorningModel.MORNING_MODEL.value:
# Check if there is a check-in pool
content = await Utils.groupRead(str(userGroup) + '-' + model)
if content == Status.FAILURE:
# Create a check-in pool
await createACheckInPool(userGroup, model)
content = await Utils.groupRead(str(userGroup) + '-' + model)
# Check if the pool has expired
if content['time'] != TimeUtils.getTheCurrentTime():
# Expired, rebuild the pool
await createACheckInPool(userGroup, model)
content = await Utils.groupRead(str(userGroup) + '-' + model)
# Add users to the check-in pool
user = await Utils.userInformationReading(userQQ)
content['userList'].append(user)
content['number'] += 1
await Utils.groupWrite(str(userGroup) + '-' + model, content)
return content['number']
if model == GoodMorningModel.NIGHT_MODEL.value:
# Check if there is a check-in pool
content = await Utils.groupRead(str(userGroup) + '-' + model)
if content == Status.FAILURE:
# Create a check-in pool
await createACheckInPool(userGroup, model)
content = await Utils.groupRead(str(userGroup) + '-' + model)
# Check if the pool has expired
hourNow = TimeUtils.getTheCurrentHour()
expiryId = False
if content['time'] != TimeUtils.getTheCurrentTime():
if TimeUtils.judgeTimeDifference(content['accurateTime']) < 24:
if hourNow >= 12:
expiryId = True
else:
expiryId = True
if expiryId:
# Expired, rebuild the pool
await createACheckInPool(userGroup, model)
content = await Utils.groupRead(str(userGroup) + '-' + model)
# Add users to the check-in pool
user = await Utils.userInformationReading(userQQ)
content['userList'].append(user)
content['number'] += 1
await Utils.groupWrite(str(userGroup) + '-' + model, content)
return content['number']
async def goodMorningInformation(userQQ, userGroup, nickname):
# Check if registered
registered = await Utils.userInformationReading(userQQ)
send = Tools.at(userQQ)
if registered == Status.FAILURE:
# registered
await userRegistration(userQQ, GoodMorningModel.MORNING_MODEL.value)
# Add to check-in pool and get ranking
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.MORNING_MODEL.value)
send += (await Utils.extractRandomWords(GoodMorningModel.MORNING_MODEL.value, nickname) + '\n' +
(await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.MORNING_MODEL.value)).replace(r'{number}', str(rank)))
return send
# Already registered
if registered['model'] == GoodMorningModel.MORNING_MODEL.value:
# too little time
if TimeUtils.judgeTimeDifference(registered['accurateTime']) <= 4:
send += await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('triggered', GoodMorningModel.MORNING_MODEL.value)
return send
# Good morning no twice a day
if registered['time'] != TimeUtils.getTheCurrentTime():
await userRegistration(userQQ, GoodMorningModel.MORNING_MODEL.value)
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.MORNING_MODEL.value)
send += (await Utils.extractRandomWords(GoodMorningModel.MORNING_MODEL.value, nickname) + '\n' +
(await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.MORNING_MODEL.value)).replace(r'{number}', str(rank)))
return send
if registered['model'] == GoodMorningModel.NIGHT_MODEL.value:
sleepingTime = TimeUtils.judgeTimeDifference(registered['accurateTime'])
# too little time
if sleepingTime <= 4:
send += await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('unable_to_trigger', GoodMorningModel.MORNING_MODEL.value)
return send
# Sleep time cannot exceed 24 hours
await userRegistration(userQQ, GoodMorningModel.MORNING_MODEL.value)
if sleepingTime < 24:
send += await Utils.extractRandomWords(GoodMorningModel.MORNING_MODEL.value, nickname)
# Calculate Wake Up Ranking
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.MORNING_MODEL.value)
send += ((await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.MORNING_MODEL.value)).replace(r'{number}', str(rank)) + '\n')
# Calculate precise sleep time
sleepPreciseTime = TimeUtils.calculateTheElapsedTimeCombination(registered['accurateTime'])
if sleepPreciseTime[0] >= 9:
send += TimeUtils.replaceHourMinuteAndSecond(sleepPreciseTime,
(await Utils.readConfiguration(GoodMorningModel.MORNING_MODEL.value))['sleeping_time'][1]['content'])
elif sleepPreciseTime[0] >= 7:
send += TimeUtils.replaceHourMinuteAndSecond(sleepPreciseTime,
(await Utils.readConfiguration(GoodMorningModel.MORNING_MODEL.value))['sleeping_time'][0]['content'])
else:
send += TimeUtils.replaceHourMinuteAndSecond(sleepPreciseTime,
(await Utils.readConfiguration(GoodMorningModel.MORNING_MODEL.value))['too_little_sleep'])
else:
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.MORNING_MODEL.value)
send += (await Utils.extractRandomWords(GoodMorningModel.MORNING_MODEL.value, nickname) + '\n' +
(await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.MORNING_MODEL.value)).replace(r'{number}', str(rank)))
return send
return Status.FAILURE
async def goodNightInformation(userQQ, userGroup, nickname):
# Check if registered
registered = await Utils.userInformationReading(userQQ)
send = Tools.at(userQQ)
if registered == Status.FAILURE:
# registered
await userRegistration(userQQ, GoodMorningModel.NIGHT_MODEL.value)
# Add to check-in pool and get ranking
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.NIGHT_MODEL.value)
send += (await Utils.extractRandomWords(GoodMorningModel.NIGHT_MODEL.value, nickname) + '\n' +
(await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.NIGHT_MODEL.value)).replace(r'{number}', str(rank)))
return send
# Already registered
if registered['model'] == GoodMorningModel.NIGHT_MODEL.value:
# too little time
if TimeUtils.judgeTimeDifference(registered['accurateTime']) <= 4:
send += await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('triggered', GoodMorningModel.NIGHT_MODEL.value)
return send
# Two good nights can not be less than 12 hours
if TimeUtils.judgeTimeDifference(registered['accurateTime']) >= 12:
await userRegistration(userQQ, GoodMorningModel.NIGHT_MODEL.value)
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.NIGHT_MODEL.value)
send += (await Utils.extractRandomWords(GoodMorningModel.NIGHT_MODEL.value, nickname) + '\n' +
(await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.NIGHT_MODEL.value)).replace(r'{number}', str(rank)))
return send
if registered['model'] == GoodMorningModel.MORNING_MODEL.value:
soberTime = TimeUtils.judgeTimeDifference(registered['accurateTime'])
# too little time
if soberTime <= 4:
send += await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('unable_to_trigger', GoodMorningModel.NIGHT_MODEL.value)
return send
# sober time cannot exceed 24 hours
await userRegistration(userQQ, GoodMorningModel.NIGHT_MODEL.value)
if soberTime < 24:
send += await Utils.extractRandomWords(GoodMorningModel.NIGHT_MODEL.value, nickname)
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.NIGHT_MODEL.value)
send += ((await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.NIGHT_MODEL.value)).replace(r'{number}', str(rank)) + '\n')
soberAccurateTime = TimeUtils.calculateTheElapsedTimeCombination(registered['accurateTime'])
if soberAccurateTime[0] >= 12:
send += TimeUtils.replaceHourMinuteAndSecond(soberAccurateTime,
(await Utils.readConfiguration(GoodMorningModel.NIGHT_MODEL.value))['working_hours'][2]['content'])
else:
send += TimeUtils.replaceHourMinuteAndSecond(soberAccurateTime,
random.choice((await Utils.readConfiguration(GoodMorningModel.NIGHT_MODEL.value))['working_hours'])['content'])
else:
rank = await addToCheckInPoolAndGetRanking(userQQ, userGroup, GoodMorningModel.NIGHT_MODEL.value)
send += (await Utils.extractRandomWords(GoodMorningModel.NIGHT_MODEL.value, nickname) + '\n' +
(await Utils.extractConfigurationInformationAccordingToSpecifiedParameters('suffix',
GoodMorningModel.NIGHT_MODEL.value)).replace(r'{number}', str(rank)))
return send
return Status.FAILURE
| 53.621495
| 152
| 0.675817
| 994
| 11,475
| 7.744467
| 0.136821
| 0.058457
| 0.083658
| 0.098597
| 0.833593
| 0.777345
| 0.739413
| 0.714211
| 0.694466
| 0.612627
| 0
| 0.003742
| 0.231547
| 11,475
| 214
| 153
| 53.621495
| 0.869245
| 0.066405
| 0
| 0.634731
| 0
| 0
| 0.04765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017964
| 0
| 0.125749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4ebbd4818dc7bb79d00626d86f42c008d5f1a870
| 7,984
|
py
|
Python
|
test/unit_tests/providers/test_slideshare.py
|
ourresearch/total-impact-webapp
|
ab0d011dc783491bc85aadc2dc9c0f204e59429e
|
[
"MIT"
] | 4
|
2015-10-22T10:11:01.000Z
|
2017-06-04T18:08:28.000Z
|
test/unit_tests/providers/test_slideshare.py
|
Impactstory/total-impact-webapp
|
ab0d011dc783491bc85aadc2dc9c0f204e59429e
|
[
"MIT"
] | 2
|
2015-01-11T05:45:59.000Z
|
2015-02-11T20:37:05.000Z
|
test/unit_tests/providers/test_slideshare.py
|
Impactstory/total-impact-webapp
|
ab0d011dc783491bc85aadc2dc9c0f204e59429e
|
[
"MIT"
] | 3
|
2015-01-10T03:23:13.000Z
|
2015-10-11T15:49:41.000Z
|
from test.unit_tests.providers import common
from test.unit_tests.providers.common import ProviderTestCase
from totalimpact.providers.provider import Provider, ProviderContentMalformedError
from totalimpact.providers import provider
from test.utils import http
import os
import collections
from nose.tools import assert_equals, raises, nottest, assert_true
datadir = os.path.join(os.path.split(__file__)[0], "../../../extras/sample_provider_pages/slideshare")
SAMPLE_EXTRACT_MEMBER_ITEMS_PAGE = os.path.join(datadir, "members")
SAMPLE_EXTRACT_METRICS_PAGE = os.path.join(datadir, "metrics")
SAMPLE_EXTRACT_ALIASES_PAGE = os.path.join(datadir, "aliases")
SAMPLE_EXTRACT_BIBLIO_PAGE = os.path.join(datadir, "biblio")
TEST_URL = "http://www.slideshare.net/cavlec/manufacturing-serendipity-12176916"
TEST_URL2 = "www.slideshare.net/hpiwowar/right-time-right-place-to-change-the-world"
TEST_SLIDESHARE_USER = "cavlec"
class TestSlideshare(ProviderTestCase):
provider_name = "slideshare"
testitem_members = "cavlec"
testitem_aliases = ("url", TEST_URL)
testitem_metrics = ("url", TEST_URL)
testitem_biblio = ("url", TEST_URL)
def setUp(self):
ProviderTestCase.setUp(self)
def test_is_relevant_alias(self):
# ensure that it matches an appropriate ids
assert_equals(self.provider.is_relevant_alias(self.testitem_aliases), True)
assert_equals(self.provider.is_relevant_alias(("github", "egonw,cdk")), False)
def test_extract_members(self):
f = open(SAMPLE_EXTRACT_MEMBER_ITEMS_PAGE, "r")
members = self.provider._extract_members(f.read(), TEST_SLIDESHARE_USER)
assert_equals(len(members), 36)
assert_true('url', u'http://www.slideshare.net/cavlec/avoiding-heronway' in members)
def test_extract_biblio(self):
f = open(SAMPLE_EXTRACT_BIBLIO_PAGE, "r")
ret = self.provider._extract_biblio(f.read())
assert_equals(ret, {'username': u'cavlec', 'title': u'Manufacturing Serendipity', 'repository': 'Slideshare', 'created': u'Tue Mar 27 10:10:11 -0500 2012'})
def test_extract_aliases(self):
# ensure that the dryad reader can interpret an xml doc appropriately
f = open(SAMPLE_EXTRACT_ALIASES_PAGE, "r")
aliases = self.provider._extract_aliases(f.read())
assert_equals(aliases, [('title', u'Manufacturing Serendipity')])
def test_extract_metrics_success(self):
f = open(SAMPLE_EXTRACT_METRICS_PAGE, "r")
metrics_dict = self.provider._extract_metrics(f.read())
assert_equals(metrics_dict["slideshare:views"], 337)
assert_equals(metrics_dict["slideshare:downloads"], 4)
@http
def test_metrics(self):
metrics_dict = self.provider.metrics([self.testitem_metrics])
expected = {'slideshare:downloads': (4, 'http://www.slideshare.net/cavlec/manufacturing-serendipity-12176916'), 'slideshare:views': (543, 'http://www.slideshare.net/cavlec/manufacturing-serendipity-12176916'), 'slideshare:favorites': (2, 'http://www.slideshare.net/cavlec/manufacturing-serendipity-12176916')}
print metrics_dict
for key in expected:
assert metrics_dict[key][0] >= expected[key][0], [key, metrics_dict[key], expected[key]]
assert metrics_dict[key][1] == expected[key][1], [key, metrics_dict[key], expected[key]]
@http
def test_provider_import(self):
test_tabs = {"account_name": "cavlec", "standard_urls_input": TEST_URL2}
members = provider.import_products("slideshare", test_tabs)
print members
expected = [('url', u'https://www.slideshare.net/hpiwowar/right-time-right-place-to-change-the-world'), ('url', u'http://www.slideshare.net/cavlec/week8-5557551'), ('url', u'http://www.slideshare.net/cavlec/canoe-the-open-content-rapids'), ('url', u'http://www.slideshare.net/cavlec/so-you-think-you-know-libraries'), ('url', u'http://www.slideshare.net/cavlec/what-we-organize'), ('url', u'http://www.slideshare.net/cavlec/escapar-la-carrera-de-la-reina'), ('url', u'http://www.slideshare.net/cavlec/librarians-love-data'), ('url', u'http://www.slideshare.net/cavlec/even-the-loons-are-licensed'), ('url', u'http://www.slideshare.net/cavlec/institutional-repositories-rebirth-of-the-phoenix'), ('url', u'http://www.slideshare.net/cavlec/manufacturing-serendipity-12176916'), ('url', u'http://www.slideshare.net/cavlec/canoe-the-open-content-rapids-2862487'), ('url', u'http://www.slideshare.net/cavlec/encryption-27779361'), ('url', u'http://www.slideshare.net/cavlec/who-owns-our-work-notes'), ('url', u'http://www.slideshare.net/cavlec/rdf-rda-and-other-tlas'), ('url', u'http://www.slideshare.net/cavlec/whats-driving-open-access'), ('url', u'http://www.slideshare.net/cavlec/manufacturing-serendipity'), ('url', u'http://www.slideshare.net/cavlec/i-own-copyright-so-i-pwn-you'), ('url', u'http://www.slideshare.net/cavlec/paying-forit'), ('url', u'http://www.slideshare.net/cavlec/grab-a-bucket-its-raining-data'), ('url', u'http://www.slideshare.net/cavlec/soylent-semantic-web-is-people-with-notes'), ('url', u'http://www.slideshare.net/cavlec/who-owns-our-work'), ('url', u'http://www.slideshare.net/cavlec/soylent-semanticweb-is-people'), ('url', u'http://www.slideshare.net/cavlec/open-sesame-and-other-open-movements'), ('url', u'http://www.slideshare.net/cavlec/digital-preservation-and-institutional-repositories'), ('url', u'http://www.slideshare.net/cavlec/a-successful-failure-community-requirements-gathering-for-dspace'), ('url', u'http://www.slideshare.net/cavlec/project-management-16606291'), ('url', u'http://www.slideshare.net/cavlec/databases-markup-and-regular-expressions'), ('url', u'http://www.slideshare.net/cavlec/solving-problems-with-web-20'), ('url', u'http://www.slideshare.net/cavlec/educators-together'), ('url', u'http://www.slideshare.net/cavlec/le-ir-cest-mort-vive-le-ir'), ('url', u'http://www.slideshare.net/cavlec/open-content'), ('url', u'http://www.slideshare.net/cavlec/so-are-we-winning-yet'), ('url', u'http://www.slideshare.net/cavlec/save-the-cows-data-curation-for-the-rest-of-us-1533252'), ('url', u'http://www.slideshare.net/cavlec/grab-a-bucket-its-raining-data-2134106'), ('url', u'http://www.slideshare.net/cavlec/nsa-27779364'), ('url', u'http://www.slideshare.net/cavlec/is-this-big-data-which-i-see-before-me'), ('url', u'http://www.slideshare.net/cavlec/escaping-the-red-queens-race-with-open-access'), ('url', u'http://www.slideshare.net/cavlec/research-data-and-scholarly-communication'), ('url', u'http://www.slideshare.net/cavlec/so-arewewinningyet-notes'), ('url', u'http://www.slideshare.net/cavlec/week13-5972690'), ('url', u'http://www.slideshare.net/cavlec/privacy-inlibs'), ('url', u'http://www.slideshare.net/cavlec/marc-and-bibframe-linking-libraries-and-archives'), ('url', u'http://www.slideshare.net/cavlec/frbr-and-rda'), ('url', u'http://www.slideshare.net/cavlec/the-social-journal'), ('url', u'http://www.slideshare.net/cavlec/occupy-copyright'), ('url', u'http://www.slideshare.net/cavlec/research-data-and-scholarly-communication-16366049'), ('url', u'http://www.slideshare.net/cavlec/what-youre-up-against'), ('url', u'http://www.slideshare.net/cavlec/escaping-datageddon'), ('url', u'http://www.slideshare.net/cavlec/risk-management-and-auditing'), ('url', u'http://www.slideshare.net/cavlec/the-canonically-bad-digital-humanities-proposal'), ('url', u'http://www.slideshare.net/cavlec/data-and-the-law'), ('url', u'http://www.slideshare.net/cavlec/ejournals-and-open-access'), ('url', u'http://www.slideshare.net/cavlec/preservation-and-institutional-repositories-for-the-digital-arts-and-humanities'), ('url', u'http://www.slideshare.net/cavlec/avoiding-heronway'), ('url', u'http://www.slideshare.net/cavlec/taming-the-monster-digital-preservation-planning-and-implementation-tools'), ('url', u'http://www.slideshare.net/cavlec/library-linked-data')]
for member in expected:
assert(member in members)
| 99.8
| 4,344
| 0.726202
| 1,119
| 7,984
| 5.089366
| 0.245755
| 0.141528
| 0.174188
| 0.210711
| 0.507112
| 0.453205
| 0.443371
| 0.287445
| 0.167867
| 0.098332
| 0
| 0.020011
| 0.086172
| 7,984
| 79
| 4,345
| 101.063291
| 0.760554
| 0.013652
| 0
| 0.032787
| 0
| 0.344262
| 0.571084
| 0.014992
| 0
| 0
| 0
| 0
| 0.196721
| 0
| null | null | 0
| 0.163934
| null | null | 0.032787
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4ebc5860728f6bf2e085520108e5d15a38534a71
| 221
|
py
|
Python
|
cmp_telegram_pusher/src/controllers/interfaces/MessageRegistrar.py
|
andrii-z4i/xmind-telegram
|
82e50ae0ada048b87a2c082bbdd4510e02cb3694
|
[
"MIT"
] | null | null | null |
cmp_telegram_pusher/src/controllers/interfaces/MessageRegistrar.py
|
andrii-z4i/xmind-telegram
|
82e50ae0ada048b87a2c082bbdd4510e02cb3694
|
[
"MIT"
] | 16
|
2018-05-07T09:42:56.000Z
|
2018-11-19T06:05:51.000Z
|
cmp_telegram_pusher/src/controllers/interfaces/MessageRegistrar.py
|
andrii-z4i/xmind-telegram
|
82e50ae0ada048b87a2c082bbdd4510e02cb3694
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from shared.model import MessageContainer
class MessageRegistrar(ABC):
@abstractmethod
def store_message(self, message_body: str) -> None:
raise NotImplementedError()
| 24.555556
| 55
| 0.760181
| 24
| 221
| 6.916667
| 0.75
| 0.204819
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171946
| 221
| 9
| 56
| 24.555556
| 0.907104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
14e0de81c17877378bf1b55e3760f43a8dd0f9f3
| 432
|
py
|
Python
|
task5.py
|
missKatiaPunter/python-coursework
|
80c8760ad2d337a8cdac6d64f30fd429b67d7d99
|
[
"MIT"
] | null | null | null |
task5.py
|
missKatiaPunter/python-coursework
|
80c8760ad2d337a8cdac6d64f30fd429b67d7d99
|
[
"MIT"
] | null | null | null |
task5.py
|
missKatiaPunter/python-coursework
|
80c8760ad2d337a8cdac6d64f30fd429b67d7d99
|
[
"MIT"
] | null | null | null |
# Task 5
# You have found a mystery function; all you know are some of its inputs/outputs:
# mystery_num(300) ==> returns 2
# mystery_num(6996) ==> returns 4
# mystery_num(666) ==> returns 3
# mystery_num(90783) ==> returns 4
# mystery_num(1233321457) ==> returns 0
# mystery_num(81234) ==> returns 2
# mystery_num(89282350306) ==> returns 8
# mystery_num(3479283469) ==> returns 5
# Write the function.
def mystery_num():
pass
| 30.857143
| 81
| 0.706019
| 64
| 432
| 4.625
| 0.546875
| 0.304054
| 0.101351
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165289
| 0.159722
| 432
| 14
| 82
| 30.857143
| 0.650138
| 0.881944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
14e2b60e2f696e2cff3f74e7af5f80b7ec1f35a4
| 85
|
py
|
Python
|
__init__.py
|
sesquideus/scalyca
|
ae1e8bcf4dbbdf1b653e0dd89a842a202cbbc624
|
[
"MIT"
] | null | null | null |
__init__.py
|
sesquideus/scalyca
|
ae1e8bcf4dbbdf1b653e0dd89a842a202cbbc624
|
[
"MIT"
] | null | null | null |
__init__.py
|
sesquideus/scalyca
|
ae1e8bcf4dbbdf1b653e0dd89a842a202cbbc624
|
[
"MIT"
] | null | null | null |
from .scalyca import Scala, Scalyca
from .utilities import ReadableDir, WriteableDir
| 28.333333
| 48
| 0.835294
| 10
| 85
| 7.1
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 85
| 2
| 49
| 42.5
| 0.946667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
14f30a7b6a9d5232b5d721cd14792a63df21179d
| 102
|
py
|
Python
|
src/chapter1.py
|
andyliumathematics/princeton-calculus
|
9be5f4038a67b90f9844d1c9d4592dc2a3bf4647
|
[
"Apache-2.0"
] | null | null | null |
src/chapter1.py
|
andyliumathematics/princeton-calculus
|
9be5f4038a67b90f9844d1c9d4592dc2a3bf4647
|
[
"Apache-2.0"
] | null | null | null |
src/chapter1.py
|
andyliumathematics/princeton-calculus
|
9be5f4038a67b90f9844d1c9d4592dc2a3bf4647
|
[
"Apache-2.0"
] | null | null | null |
# %%
from sympy import Symbol
from sympy import plot
x = Symbol('x')
f = x**2
plot(f,(x,-4,2))
# %%
| 12.75
| 24
| 0.578431
| 19
| 102
| 3.105263
| 0.473684
| 0.305085
| 0.508475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.205882
| 102
| 7
| 25
| 14.571429
| 0.691358
| 0.04902
| 0
| 0
| 0
| 0
| 0.010638
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0947223e4288e72867f9482453beae975c34c6a8
| 50
|
py
|
Python
|
judge/settings/__init__.py
|
shan18/Online-Judge
|
b03e1df9eaa91957b635b6527f4abf5509495b56
|
[
"MIT"
] | 1
|
2020-07-26T20:54:53.000Z
|
2020-07-26T20:54:53.000Z
|
judge/settings/__init__.py
|
shan18/Online-Judge
|
b03e1df9eaa91957b635b6527f4abf5509495b56
|
[
"MIT"
] | null | null | null |
judge/settings/__init__.py
|
shan18/Online-Judge
|
b03e1df9eaa91957b635b6527f4abf5509495b56
|
[
"MIT"
] | null | null | null |
from .production import *
# from .local import *
| 12.5
| 25
| 0.7
| 6
| 50
| 5.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 50
| 3
| 26
| 16.666667
| 0.875
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0956151bd7ce7420fb63ab57e2d787ee0beae934
| 153
|
py
|
Python
|
compiled/construct/valid_fail_contents.py
|
smarek/ci_targets
|
c5edee7b0901fd8e7f75f85245ea4209b38e0cb3
|
[
"MIT"
] | 4
|
2017-04-08T12:55:11.000Z
|
2020-12-05T21:09:31.000Z
|
compiled/construct/valid_fail_contents.py
|
smarek/ci_targets
|
c5edee7b0901fd8e7f75f85245ea4209b38e0cb3
|
[
"MIT"
] | 7
|
2018-04-23T01:30:33.000Z
|
2020-10-30T23:56:14.000Z
|
compiled/construct/valid_fail_contents.py
|
smarek/ci_targets
|
c5edee7b0901fd8e7f75f85245ea4209b38e0cb3
|
[
"MIT"
] | 6
|
2017-04-08T11:41:14.000Z
|
2020-10-30T22:47:31.000Z
|
from construct import *
from construct.lib import *
valid_fail_contents = Struct(
'foo' / FixedSized(2, GreedyBytes),
)
_schema = valid_fail_contents
| 17
| 36
| 0.764706
| 19
| 153
| 5.894737
| 0.684211
| 0.232143
| 0.303571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007634
| 0.143791
| 153
| 8
| 37
| 19.125
| 0.847328
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
095aafdd4b924c03f4b72c04afaa459f0064ed5b
| 56
|
py
|
Python
|
cloverapi/__init__.py
|
jmphilli/clover-api-python
|
e67f7578e7d46fb4753b10f3827fe5684f4678a6
|
[
"MIT"
] | 5
|
2018-08-02T18:40:51.000Z
|
2022-03-04T17:13:55.000Z
|
cloverapi/__init__.py
|
jmphilli/clover-api-python
|
e67f7578e7d46fb4753b10f3827fe5684f4678a6
|
[
"MIT"
] | 2
|
2018-12-13T15:51:30.000Z
|
2020-05-26T02:29:47.000Z
|
cloverapi/__init__.py
|
jmphilli/clover-api-python
|
e67f7578e7d46fb4753b10f3827fe5684f4678a6
|
[
"MIT"
] | 11
|
2018-12-12T19:22:48.000Z
|
2021-02-02T00:48:16.000Z
|
from cloverapi.cloverapi_client import CloverApiClient
| 18.666667
| 54
| 0.892857
| 6
| 56
| 8.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089286
| 56
| 2
| 55
| 28
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1189d28af4a59e7a26e3152176a77da4b0778007
| 237
|
py
|
Python
|
backend/app/__init__.py
|
Sirius-ctrl/COMP90024-Project2-Distributed-Twitter-Analyser
|
7b19bb3bb995bec981a3d159e5e4e853361341b0
|
[
"Apache-2.0"
] | 9
|
2020-08-09T14:31:48.000Z
|
2022-03-15T09:41:28.000Z
|
backend/app/__init__.py
|
Sirius-ctrl/COMP90024-Project2-Distributed-Twitter-Analyser
|
7b19bb3bb995bec981a3d159e5e4e853361341b0
|
[
"Apache-2.0"
] | null | null | null |
backend/app/__init__.py
|
Sirius-ctrl/COMP90024-Project2-Distributed-Twitter-Analyser
|
7b19bb3bb995bec981a3d159e5e4e853361341b0
|
[
"Apache-2.0"
] | 8
|
2020-06-30T12:37:55.000Z
|
2022-03-03T11:12:23.000Z
|
"""
Author: XuLin Yang & Renjie Meng
Student id: 904904 & 877396
Date: 2020-4-24 01:16:19
Description: creates the application object as an instance of class Flask imported from the flask package.
"""
from app import aurin
| 26.333333
| 106
| 0.7173
| 36
| 237
| 4.722222
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13369
| 0.21097
| 237
| 8
| 107
| 29.625
| 0.775401
| 0.864979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
11e102a796adaf58782ed3f3edc74ff89132e10d
| 1,068
|
py
|
Python
|
End2EndPurification/blocking_times.py
|
mercari/quantum-entanglement-purification-simulator
|
94b6616216589ef017e415227e7cdf61a6f8b6b0
|
[
"MIT"
] | null | null | null |
End2EndPurification/blocking_times.py
|
mercari/quantum-entanglement-purification-simulator
|
94b6616216589ef017e415227e7cdf61a6f8b6b0
|
[
"MIT"
] | null | null | null |
End2EndPurification/blocking_times.py
|
mercari/quantum-entanglement-purification-simulator
|
94b6616216589ef017e415227e7cdf61a6f8b6b0
|
[
"MIT"
] | null | null | null |
class BlockingTimes:
LIMIT = 1000000
def __init__(self, bloking_time_int_node, blocking_time_end_node) -> None:
self.blocking_time_int_node = bloking_time_int_node
self.blocking_time_end_node = blocking_time_end_node
def __repr__(self):
return "(b_time_interm_node:"+ '{:.5g}'.format(self.blocking_time_int_node) +", b_time_end_node:"+ '{:.5g}'.format(self.blocking_time_end_node) + ")"
@staticmethod
def merge_blocking_times(bt_left, bt_right):
return BlockingTimes(bt_left.blocking_time_int_node+bt_right.blocking_time_int_node, bt_left.blocking_time_end_node + bt_right.blocking_time_end_node)
@staticmethod
def add_blocking_times(self, blocking_time_int_node, blocking_time_end_node):
return BlockingTimes(self.blocking_time_int_node + blocking_time_int_node, self.blocking_time_end_node + blocking_time_end_node)
@staticmethod
def multiply_blocking_times(self, multiplier):
return BlockingTimes(self.blocking_time_int_node * multiplier, self.blocking_time_end_node * multiplier)
| 62.823529
| 158
| 0.781835
| 150
| 1,068
| 4.993333
| 0.193333
| 0.288385
| 0.161549
| 0.253672
| 0.678238
| 0.53004
| 0.360481
| 0.141522
| 0.141522
| 0.141522
| 0
| 0.00973
| 0.133895
| 1,068
| 16
| 159
| 66.75
| 0.8
| 0
| 0
| 0.1875
| 0
| 0
| 0.047753
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0
| 0
| 0.25
| 0.6875
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
11f8b49c41268945b9ec8cc9da45b0cf2b608dde
| 548
|
py
|
Python
|
PyOpenGL-3.0.2/OpenGL/raw/GL/NV/texture_barrier.py
|
frederica07/Dragon_Programming_Process
|
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
|
[
"BSD-2-Clause"
] | null | null | null |
PyOpenGL-3.0.2/OpenGL/raw/GL/NV/texture_barrier.py
|
frederica07/Dragon_Programming_Process
|
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
|
[
"BSD-2-Clause"
] | null | null | null |
PyOpenGL-3.0.2/OpenGL/raw/GL/NV/texture_barrier.py
|
frederica07/Dragon_Programming_Process
|
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
|
[
"BSD-2-Clause"
] | null | null | null |
'''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p, constants as _cs, arrays
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_NV_texture_barrier'
def _f( function ):
return _p.createFunction( function,_p.GL,'GL_NV_texture_barrier',False)
@_f
@_p.types(None,)
def glTextureBarrierNV( ):pass
def glInitTextureBarrierNV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
| 30.444444
| 75
| 0.775547
| 72
| 548
| 5.666667
| 0.597222
| 0.073529
| 0.078431
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138686
| 548
| 17
| 76
| 32.235294
| 0.864407
| 0.213504
| 0
| 0
| 1
| 0
| 0.1
| 0.1
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.083333
| 0.333333
| 0.083333
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
01137d37e254794642b22504259bd11e043d0fc3
| 4,061
|
py
|
Python
|
car/TF_RefineDet_CIDI3/model/VGG.py
|
donghaiwang/VisualTracking_DRL
|
8fbe03f3b56a90ebd53173a2d367f49e52c25a4c
|
[
"Apache-2.0"
] | 4
|
2018-12-07T12:47:13.000Z
|
2021-12-21T08:46:50.000Z
|
car/TF_RefineDet_CIDI3/model/VGG.py
|
donghaiwang/VisualTracking_DRL
|
8fbe03f3b56a90ebd53173a2d367f49e52c25a4c
|
[
"Apache-2.0"
] | null | null | null |
car/TF_RefineDet_CIDI3/model/VGG.py
|
donghaiwang/VisualTracking_DRL
|
8fbe03f3b56a90ebd53173a2d367f49e52c25a4c
|
[
"Apache-2.0"
] | 2
|
2018-10-29T09:29:19.000Z
|
2021-12-21T08:46:52.000Z
|
# -*- coding: UTF-8 -*-
"""
vgg(base network)->RefineDet_tf for vechile detection.
@author: xie wei
"""
from model.layers_group import *
slim = tf.contrib.slim
def VGG(inputs,name,training=True,w_summary=True,keep_prob = 1.0,use_bn = False, reuse = False):
with tf.variable_scope(name,reuse=reuse):
end_points_collection = name + '_end_logits'
if (use_bn):
conv1_1 = conv_bn_relu(inputs, 64, 3, 1,'SAME', training, w_summary, name='conv1_1')
conv1_2 = conv_bn_relu(conv1_1, 64, 3, 1, 'SAME', training, w_summary, name='conv1_2')
pool1 = pool(conv1_2,2, 2, 'max', name='pool1')
conv2_1 = conv_bn_relu(pool1, 128, 3, 1, 'SAME', training, w_summary, name='conv2_1')
conv2_2 = conv_bn_relu(conv2_1, 128, 3, 1, 'SAME', training, w_summary, name='conv2_2')
pool2 = pool(conv2_2, 2, 2, 'max', name='pool2')
conv3_1 = conv_bn_relu(pool2, 256, 3, 1, 'SAME', training, w_summary, name='conv3_1')
conv3_2 = conv_bn_relu(conv3_1, 256, 3, 1, 'SAME', training, w_summary, name='conv3_2')
conv3_3 = conv_bn_relu(conv3_2, 256, 3, 1, 'SAME', training, w_summary, name='conv3_3')
pool3 = pool(conv3_3, 2, 2, 'max', name='pool3')
conv4_1 = conv_bn_relu(pool3, 512, 3, 1, 'SAME', training, w_summary, name='conv4_1')
conv4_2 = conv_bn_relu(conv4_1, 512, 3, 1, 'SAME', training, w_summary, name='conv4_2')
conv4_3 = conv_bn_relu(conv4_2, 512, 3, 1, 'SAME', training, w_summary, name='conv4_3')
pool4 = pool(conv4_3, 2, 2, 'max', name='pool4')
conv5_1 = conv_bn_relu(pool4, 512, 3, 1, 'SAME', training, w_summary, name='conv5_1')
conv5_2 = conv_bn_relu(conv5_1, 512, 3, 1, 'SAME', training, w_summary, name='conv5_2')
conv5_3 = conv_bn_relu(conv5_2, 512, 3, 1, 'SAME', training, w_summary, name='conv5_3')
pool5 = pool(conv5_3, 2, 2, 'max', name='pool5')
else:
conv1_1 = conv_relu(inputs, 64, 3, 1, 'SAME', training, w_summary, bias=False,name='conv1_1')
conv1_2 = conv_relu(conv1_1, 64, 3, 1, 'SAME', training, w_summary, bias=False,name='conv1_2')
pool1 = pool(conv1_2, 2, 2, 'max', name='pool1')
conv2_1 = conv_relu(pool1, 128, 3, 1, 'SAME', training, w_summary, bias=False,name='conv2_1')
conv2_2 = conv_relu(conv2_1, 128, 3, 1, 'SAME', training, w_summary, bias=False,name='conv2_2')
pool2 = pool(conv2_2, 2, 2, 'max', name='pool2')
conv3_1 = conv_relu(pool2, 256, 3, 1, 'SAME', training, w_summary, bias=False,name='conv3_1')
conv3_2 = conv_relu(conv3_1, 256, 3, 1, 'SAME', training, w_summary, bias=False,name='conv3_2')
conv3_3 = conv_relu(conv3_2, 256, 3, 1, 'SAME', training, w_summary, bias=False,name='conv3_3')
pool3 = pool(conv3_3, 2, 2, 'max', name='pool3')
conv4_1 = conv_relu(pool3, 512, 3, 1, 'SAME', training, w_summary, bias=False,name='conv4_1')
conv4_2 = conv_relu(conv4_1, 512, 3, 1, 'SAME', training, w_summary, bias=False,name='conv4_2')
conv4_3 = conv_relu(conv4_2, 512, 3, 1, 'SAME', training, w_summary, bias=False,name='conv4_3')
pool4 = pool(conv4_3, 2, 2, 'max', name='pool4')
conv5_1 = conv_relu(pool4, 512, 3, 1, 'SAME', training, w_summary, name='conv5_1')
conv5_2 = conv_relu(conv5_1, 512, 3, 1, 'SAME', training, w_summary, name='conv5_2')
conv5_3 = conv_relu(conv5_2, 512, 3, 1, 'SAME', training, w_summary, name='conv5_3')
pool5 = pool(conv5_3, 2, 2, 'max', name='pool5')
fc6 = astrous_conv_relu(pool5,1024,3,3,'SAME',w_summary,training,True,name='fc6')
fc7 = conv_relu(fc6, 1024, 1, 1, 'SAME', w_summary, training, True, name='fc7')
end_logits = slim.utils.convert_collection_to_dict(end_points_collection)
end_logits['conv4_3'] = conv4_3
end_logits['conv5_3'] = conv5_3
end_logits['fc7'] = fc7
return end_logits
| 57.197183
| 107
| 0.614381
| 646
| 4,061
| 3.594427
| 0.117647
| 0.099914
| 0.067183
| 0.156761
| 0.789836
| 0.789836
| 0.708872
| 0.708872
| 0.708872
| 0.611111
| 0
| 0.115225
| 0.228515
| 4,061
| 70
| 108
| 58.014286
| 0.625918
| 0.023147
| 0
| 0.2
| 0
| 0
| 0.103213
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.02
| 0
| 0.06
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0153b9d466f4ce802dc638ae6cfe0fca086a1d7b
| 149
|
py
|
Python
|
tests/benchmark/includes.py
|
maguec/RediSearch
|
c6ecf9de36b7aa5f3603ead7c8fc18c330882668
|
[
"MIT",
"Ruby",
"Apache-2.0",
"BSD-3-Clause"
] | 2,098
|
2019-05-13T09:11:54.000Z
|
2022-03-31T06:24:50.000Z
|
tests/benchmark/includes.py
|
maguec/RediSearch
|
c6ecf9de36b7aa5f3603ead7c8fc18c330882668
|
[
"MIT",
"Ruby",
"Apache-2.0",
"BSD-3-Clause"
] | 1,659
|
2019-05-13T07:55:29.000Z
|
2022-03-31T02:42:57.000Z
|
tests/benchmark/includes.py
|
maguec/RediSearch
|
c6ecf9de36b7aa5f3603ead7c8fc18c330882668
|
[
"MIT",
"Ruby",
"Apache-2.0",
"BSD-3-Clause"
] | 227
|
2019-05-17T07:54:49.000Z
|
2022-03-28T03:50:19.000Z
|
import sys
import os
try:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../deps/readies"))
import paella
except:
pass
| 14.9
| 85
| 0.657718
| 22
| 149
| 4.272727
| 0.681818
| 0.12766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.174497
| 149
| 9
| 86
| 16.555556
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
0153c219e223f1ad14a430c3b20b3bef90024de9
| 232
|
py
|
Python
|
tests/fakes/fake_rml_mapper.py
|
meaningfy-ws/ted-xml-2-rdf
|
ac26a19f3761b7cf79d79a46be6323b658f067eb
|
[
"Apache-2.0"
] | 1
|
2022-03-21T12:32:52.000Z
|
2022-03-21T12:32:52.000Z
|
tests/fakes/fake_rml_mapper.py
|
meaningfy-ws/ted-xml-2-rdf
|
ac26a19f3761b7cf79d79a46be6323b658f067eb
|
[
"Apache-2.0"
] | 24
|
2022-02-10T10:43:56.000Z
|
2022-03-29T12:36:21.000Z
|
tests/fakes/fake_rml_mapper.py
|
meaningfy-ws/ted-sws
|
d1e351eacb2900f84ec7edc457e49d8202fbaff5
|
[
"Apache-2.0"
] | null | null | null |
import pathlib
from ted_sws.notice_transformer.adapters.rml_mapper import RMLMapperABC, SerializationFormat
class FakeRMLMapper(RMLMapperABC):
def execute(self, package_path: pathlib.Path) -> str:
return "RDF result"
| 25.777778
| 92
| 0.784483
| 27
| 232
| 6.592593
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142241
| 232
| 8
| 93
| 29
| 0.894472
| 0
| 0
| 0
| 0
| 0
| 0.043103
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
0165969bb12452f1b5637a68c6aed00efe056f00
| 3,626
|
py
|
Python
|
z2/part3/updated_part2_batch/jm/parser_errors_2/565102744.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part3/updated_part2_batch/jm/parser_errors_2/565102744.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part3/updated_part2_batch/jm/parser_errors_2/565102744.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 565102744
"""
"""
random actions, total chaos
"""
board = gamma_new(4, 5, 4, 3)
assert board is not None
assert gamma_move(board, 1, 2, 1) == 1
assert gamma_move(board, 1, 0, 4) == 1
assert gamma_move(board, 2, 3, 0) == 1
assert gamma_move(board, 2, 2, 0) == 1
assert gamma_move(board, 3, 4, 1) == 0
assert gamma_move(board, 3, 0, 1) == 1
assert gamma_move(board, 4, 1, 3) == 1
assert gamma_move(board, 4, 1, 0) == 1
assert gamma_move(board, 1, 3, 2) == 1
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_busy_fields(board, 1) == 3
board905986942 = gamma_board(board)
assert board905986942 is not None
assert board905986942 == ("1...\n"
".4..\n"
"...1\n"
"3.1.\n"
".422\n")
del board905986942
board905986942 = None
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 3, 2, 2) == 1
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 1, 2, 2) == 0
assert gamma_free_fields(board, 1) == 5
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 2, 1, 4) == 1
assert gamma_move(board, 3, 3, 4) == 1
assert gamma_move(board, 3, 0, 2) == 1
assert gamma_move(board, 4, 4, 2) == 0
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 1, 0, 2) == 0
assert gamma_move(board, 2, 3, 0) == 0
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_busy_fields(board, 2) == 3
assert gamma_move(board, 3, 3, 2) == 0
assert gamma_move(board, 3, 1, 1) == 1
assert gamma_move(board, 4, 0, 0) == 1
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 2, 4, 2) == 0
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 3, 2, 2) == 0
assert gamma_move(board, 3, 2, 2) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_move(board, 4, 3, 0) == 0
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_golden_move(board, 3, 1, 2) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_busy_fields(board, 3) == 5
assert gamma_move(board, 4, 0, 3) == 1
assert gamma_move(board, 4, 2, 4) == 1
assert gamma_move(board, 1, 1, 4) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 2, 0, 0) == 0
assert gamma_move(board, 3, 3, 3) == 1
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 4, 2, 3) == 1
assert gamma_move(board, 4, 0, 2) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 1, 2, 2) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_golden_possible(board, 2) == 1
board365356545 = gamma_board(board)
assert board365356545 is not None
assert board365356545 == ("1243\n"
"4443\n"
"3.31\n"
"331.\n"
"4422\n")
del board365356545
board365356545 = None
assert gamma_move(board, 3, 1, 3) == 0
assert gamma_move(board, 3, 2, 2) == 0
assert gamma_move(board, 4, 2, 1) == 0
assert gamma_free_fields(board, 4) == 2
assert gamma_move(board, 1, 2, 2) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_move(board, 3, 0, 2) == 0
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_move(board, 4, 3, 2) == 0
gamma_delete(board)
| 30.216667
| 46
| 0.651131
| 670
| 3,626
| 3.377612
| 0.061194
| 0.349978
| 0.424216
| 0.565621
| 0.774194
| 0.767565
| 0.647813
| 0.44852
| 0.390632
| 0.37384
| 0
| 0.137228
| 0.188086
| 3,626
| 119
| 47
| 30.470588
| 0.631454
| 0
| 0
| 0.262136
| 0
| 0
| 0.016959
| 0
| 0
| 0
| 0
| 0
| 0.747573
| 1
| 0
| false
| 0
| 0.009709
| 0
| 0.009709
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
01699a7bc043086f4b1a7dd3ccdbbc60336984f2
| 81
|
py
|
Python
|
configs/snippets/person_test.py
|
trunkclub/ontology_etl
|
097985be505469258ee6c831e789f64fb804f091
|
[
"MIT"
] | null | null | null |
configs/snippets/person_test.py
|
trunkclub/ontology_etl
|
097985be505469258ee6c831e789f64fb804f091
|
[
"MIT"
] | null | null | null |
configs/snippets/person_test.py
|
trunkclub/ontology_etl
|
097985be505469258ee6c831e789f64fb804f091
|
[
"MIT"
] | null | null | null |
def person_test(x):
return isinstance(x, dict) and 'demographic_data' in x
| 16.2
| 58
| 0.716049
| 13
| 81
| 4.307692
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 81
| 4
| 59
| 20.25
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
6d8463279d1fd30035686d5a87eb901a846a235f
| 131
|
py
|
Python
|
test-ui.py
|
eliteraspberries/python-ui
|
f09156ac7b63d37488c5f65d4acd883d4bea5a32
|
[
"0BSD"
] | null | null | null |
test-ui.py
|
eliteraspberries/python-ui
|
f09156ac7b63d37488c5f65d4acd883d4bea5a32
|
[
"0BSD"
] | null | null | null |
test-ui.py
|
eliteraspberries/python-ui
|
f09156ac7b63d37488c5f65d4acd883d4bea5a32
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
def test_import():
import ui
print(ui.__version__)
if __name__ == '__main__':
test_import()
| 10.916667
| 26
| 0.641221
| 17
| 131
| 4.117647
| 0.764706
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221374
| 131
| 11
| 27
| 11.909091
| 0.686275
| 0.152672
| 0
| 0
| 0
| 0
| 0.072727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.6
| 0
| 0.8
| 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6db21f19e578c4cc807810c5713b692d634bb1a0
| 29
|
py
|
Python
|
readalongs/_version.py
|
ReadAlongs/Studio
|
3fd89f49466bbed99b6cabaf071b0605e63d1fdc
|
[
"MIT"
] | 16
|
2020-05-27T18:09:04.000Z
|
2022-03-16T17:40:57.000Z
|
readalongs/_version.py
|
ReadAlongs/Studio
|
3fd89f49466bbed99b6cabaf071b0605e63d1fdc
|
[
"MIT"
] | 77
|
2020-03-31T16:07:15.000Z
|
2022-03-17T14:22:51.000Z
|
readalongs/_version.py
|
ReadAlongs/Studio
|
3fd89f49466bbed99b6cabaf071b0605e63d1fdc
|
[
"MIT"
] | 7
|
2021-05-04T17:38:57.000Z
|
2022-03-25T09:07:23.000Z
|
__version__ = "0.2.20211122"
| 14.5
| 28
| 0.724138
| 4
| 29
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.384615
| 0.103448
| 29
| 1
| 29
| 29
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6db762539a39aca2b870a7a5e6e2567334c4fd70
| 81
|
py
|
Python
|
ifstat/errors.py
|
aviramc/ifstat
|
51285c387d5821794b2bdbd4e73d8396ab916baf
|
[
"Apache-2.0"
] | null | null | null |
ifstat/errors.py
|
aviramc/ifstat
|
51285c387d5821794b2bdbd4e73d8396ab916baf
|
[
"Apache-2.0"
] | null | null | null |
ifstat/errors.py
|
aviramc/ifstat
|
51285c387d5821794b2bdbd4e73d8396ab916baf
|
[
"Apache-2.0"
] | null | null | null |
class IFStatError(Exception):
pass
class NoDeviceError(Exception):
pass
| 13.5
| 31
| 0.740741
| 8
| 81
| 7.5
| 0.625
| 0.433333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 81
| 5
| 32
| 16.2
| 0.909091
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6dc415488103250a683918a53bae83c67262dfb9
| 26
|
py
|
Python
|
apps/controllerx/cx_version.py
|
francoisauclair911/controllerx
|
00b38cdbddb75e470d1577f1d22c8e99d62e1256
|
[
"MIT"
] | null | null | null |
apps/controllerx/cx_version.py
|
francoisauclair911/controllerx
|
00b38cdbddb75e470d1577f1d22c8e99d62e1256
|
[
"MIT"
] | null | null | null |
apps/controllerx/cx_version.py
|
francoisauclair911/controllerx
|
00b38cdbddb75e470d1577f1d22c8e99d62e1256
|
[
"MIT"
] | null | null | null |
__version__ = "v4.17.0b1"
| 13
| 25
| 0.692308
| 4
| 26
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 0.115385
| 26
| 1
| 26
| 26
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0.346154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6dca4a4128fe68de3a8391eeacaf57697bb0ceb1
| 107
|
py
|
Python
|
app/fedcv/image_segmentation/model/__init__.py
|
ray-ruisun/FedML
|
24ff30d636bb70f64e94e9ca205375033597d3dd
|
[
"Apache-2.0"
] | null | null | null |
app/fedcv/image_segmentation/model/__init__.py
|
ray-ruisun/FedML
|
24ff30d636bb70f64e94e9ca205375033597d3dd
|
[
"Apache-2.0"
] | null | null | null |
app/fedcv/image_segmentation/model/__init__.py
|
ray-ruisun/FedML
|
24ff30d636bb70f64e94e9ca205375033597d3dd
|
[
"Apache-2.0"
] | null | null | null |
from .deeplabV3_plus import DeepLabV3_plus
from .unet import UNet
from .transunet import VisionTransformer
| 26.75
| 42
| 0.859813
| 14
| 107
| 6.428571
| 0.5
| 0.288889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021053
| 0.11215
| 107
| 3
| 43
| 35.666667
| 0.926316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6df88ee732d0633406d96262c31e8a7428165c84
| 78
|
py
|
Python
|
spark_minimal_algorithms/examples/__init__.py
|
kowaalczyk/spark-minimal-algorithms
|
450e536b46af60056b77f5c2cef195af2bb988bd
|
[
"MIT"
] | 3
|
2020-06-17T22:41:46.000Z
|
2021-04-06T06:51:37.000Z
|
spark_minimal_algorithms/examples/__init__.py
|
kowaalczyk/spark-minimal-algorithms
|
450e536b46af60056b77f5c2cef195af2bb988bd
|
[
"MIT"
] | null | null | null |
spark_minimal_algorithms/examples/__init__.py
|
kowaalczyk/spark-minimal-algorithms
|
450e536b46af60056b77f5c2cef195af2bb988bd
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .countifs import Countifs
from .tera_sort import TeraSort
| 19.5
| 31
| 0.807692
| 11
| 78
| 5.636364
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.141026
| 78
| 3
| 32
| 26
| 0.910448
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0987241dfaf99a6fc4d4d0fc5ec6ff15ee143330
| 119
|
py
|
Python
|
envoy.dependency.cve_scan/envoy/dependency/cve_scan/exceptions.py
|
envoyproxy/pytooling
|
db8b60184f8a61b3184a111b0cfaff4780511b46
|
[
"Apache-2.0"
] | 1
|
2021-12-09T19:24:48.000Z
|
2021-12-09T19:24:48.000Z
|
envoy.dependency.cve_scan/envoy/dependency/cve_scan/exceptions.py
|
envoyproxy/pytooling
|
db8b60184f8a61b3184a111b0cfaff4780511b46
|
[
"Apache-2.0"
] | 392
|
2021-08-24T15:55:32.000Z
|
2022-03-28T14:26:22.000Z
|
envoy.dependency.cve_scan/envoy/dependency/cve_scan/exceptions.py
|
phlax/abstracts
|
53fbbee68d1f56effe0ded1ed4e28be870693877
|
[
"Apache-2.0"
] | 3
|
2021-10-06T13:43:11.000Z
|
2021-11-29T13:48:56.000Z
|
class CPEError(Exception):
pass
class CVEError(Exception):
pass
class CVECheckError(Exception):
pass
| 9.153846
| 31
| 0.697479
| 12
| 119
| 6.916667
| 0.5
| 0.46988
| 0.433735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226891
| 119
| 12
| 32
| 9.916667
| 0.902174
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
09966618108baaff19e5db40c4e4d7749c7eaefb
| 82
|
py
|
Python
|
pytwitchchat/__init__.py
|
benjiJanssens/PyTwitchChat
|
16aad531c120514469ad472dd35b090869f651f8
|
[
"MIT"
] | 1
|
2021-05-04T12:31:01.000Z
|
2021-05-04T12:31:01.000Z
|
pytwitchchat/__init__.py
|
benjiJanssens/PyTwitchChat
|
16aad531c120514469ad472dd35b090869f651f8
|
[
"MIT"
] | null | null | null |
pytwitchchat/__init__.py
|
benjiJanssens/PyTwitchChat
|
16aad531c120514469ad472dd35b090869f651f8
|
[
"MIT"
] | 1
|
2021-05-04T12:15:17.000Z
|
2021-05-04T12:15:17.000Z
|
# noinspection PyUnresolvedReferences
from .py_twitch_chat import TwitchChatClient
| 41
| 44
| 0.902439
| 8
| 82
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 82
| 2
| 44
| 41
| 0.947368
| 0.426829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
099acdd9fd5c25d8ba82bd94fede238a36d7233c
| 15,750
|
py
|
Python
|
api/migrations/0001_initial.py
|
Python-Marketing/django-content-server
|
16794265c44152a86f99b8548c8e1cb8c890f51a
|
[
"CC0-1.0"
] | null | null | null |
api/migrations/0001_initial.py
|
Python-Marketing/django-content-server
|
16794265c44152a86f99b8548c8e1cb8c890f51a
|
[
"CC0-1.0"
] | null | null | null |
api/migrations/0001_initial.py
|
Python-Marketing/django-content-server
|
16794265c44152a86f99b8548c8e1cb8c890f51a
|
[
"CC0-1.0"
] | null | null | null |
# Generated by Django 2.2.16 on 2020-10-27 07:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import filer.fields.image
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('djangocms_blog', '0002_auto_20200929_2310'),
migrations.swappable_dependency(settings.FILER_IMAGE_MODEL),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cms', '0022_auto_20180620_1551'),
]
operations = [
migrations.CreateModel(
name='AllowedDomain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75)),
('domain', models.URLField(blank=True)),
('term', models.CharField(blank=True, max_length=75)),
('page_name', models.CharField(default=None, max_length=75)),
('class_names', models.CharField(max_length=150)),
('id_names', models.CharField(max_length=150)),
],
),
migrations.CreateModel(
name='BeautifulGumtreeQuery',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('term', models.CharField(max_length=75)),
('price_start', models.PositiveIntegerField(blank=True, null=True)),
('price_end', models.PositiveIntegerField(blank=True, null=True)),
('date_created', models.DateTimeField(auto_now_add=True, null=True)),
('date_modified', models.DateTimeField(blank=True, null=True)),
('running', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='GumtreeCategoryLabel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75)),
('link', models.CharField(max_length=75)),
('key', models.CharField(max_length=75)),
],
),
migrations.CreateModel(
name='GumtreeProvince',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75)),
('link', models.CharField(max_length=75)),
('key', models.CharField(max_length=75)),
],
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
],
),
migrations.CreateModel(
name='Volunteer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=75)),
('description', models.CharField(blank=True, max_length=255)),
('url', models.CharField(max_length=175)),
('uploaded_at', models.DateTimeField(auto_now_add=True)),
('body', models.TextField(blank=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Testimonial',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField(blank=True)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=75)),
('slug', models.SlugField(unique=True)),
('body', models.TextField(blank=True)),
('link', models.URLField(blank=True)),
('file', models.FileField(upload_to='development/django-content-server/media/Posts')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Page')),
],
),
migrations.CreateModel(
name='Picture',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='')),
('caption', models.TextField(blank=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PageDetailExtension',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='development/django-content-server/media/Posts')),
('description', models.TextField(blank=True)),
('file', models.FileField(upload_to='development/django-content-server/media/Posts')),
('extended_object', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='cms.Page')),
('public_extension', models.OneToOneField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='draft_extension', to='api.PageDetailExtension')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='GumtreeLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75)),
('link', models.CharField(max_length=75)),
('key', models.CharField(max_length=75)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeLocation')),
('province', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeProvince')),
],
),
migrations.CreateModel(
name='GumtreeCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75)),
('link', models.CharField(max_length=75)),
('key', models.CharField(max_length=75)),
('label', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeCategoryLabel')),
],
),
migrations.CreateModel(
name='Gallery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('caption', models.TextField(blank=True, default='Change Me')),
('active', models.BooleanField(default=False)),
('blog_post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='djangocms_blog.Post')),
('image', filer.fields.image.FilerImageField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='djangocms_blog_post_gallery', to=settings.FILER_IMAGE_MODEL, verbose_name='gallery images')),
],
),
migrations.CreateModel(
name='ExtendedPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(blank=True)),
('background_image', models.FileField(upload_to='development/django-content-server/media/Posts')),
('page', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='extended_fields', to='api.Page', verbose_name='Page')),
],
),
migrations.CreateModel(
name='Donation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.PositiveIntegerField()),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('charity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='djangocms_blog.Post')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField(blank=True)),
('object_id', models.PositiveIntegerField()),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='BeautifulGumtreeSearch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('term', models.CharField(max_length=75)),
('link', models.URLField(blank=True)),
('body', models.TextField(blank=True)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('allowed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.AllowedDomain')),
],
),
migrations.CreateModel(
name='BeautifulGumtreeResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=75)),
('subtitle', models.CharField(max_length=75)),
('abstract', models.TextField(blank=True)),
('image', models.CharField(max_length=75)),
('price', models.CharField(max_length=75)),
('cell', models.CharField(max_length=75)),
('email', models.EmailField(max_length=75)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('bgs', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.BeautifulGumtreeSearch')),
('category', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeCategory')),
('label', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeCategoryLabel')),
('location', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeLocation')),
('province', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeProvince')),
('query', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.BeautifulGumtreeQuery')),
],
),
migrations.AddField(
model_name='beautifulgumtreequery',
name='category',
field=models.ManyToManyField(blank=True, to='api.GumtreeCategory'),
),
migrations.AddField(
model_name='beautifulgumtreequery',
name='label',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeCategoryLabel'),
),
migrations.AddField(
model_name='beautifulgumtreequery',
name='location',
field=models.ManyToManyField(blank=True, to='api.GumtreeLocation'),
),
migrations.AddField(
model_name='beautifulgumtreequery',
name='province',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.GumtreeProvince'),
),
migrations.CreateModel(
name='BeautifulGoogleSearch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('term', models.CharField(max_length=75)),
('link', models.URLField(blank=True)),
('body', models.TextField(blank=True)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('allowed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.AllowedDomain')),
],
),
migrations.CreateModel(
name='BeautifulGoogleResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=75)),
('subtitle', models.CharField(max_length=75)),
('abstract', models.TextField(blank=True)),
('image', models.CharField(max_length=75)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('bgs', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.BeautifulGoogleSearch')),
],
),
]
| 56.25
| 237
| 0.598603
| 1,563
| 15,750
| 5.870761
| 0.105566
| 0.045554
| 0.045772
| 0.071927
| 0.794246
| 0.766892
| 0.710331
| 0.701613
| 0.697036
| 0.673605
| 0
| 0.010433
| 0.257524
| 15,750
| 279
| 238
| 56.451613
| 0.774243
| 0.002921
| 0
| 0.669118
| 1
| 0
| 0.142211
| 0.042542
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014706
| 0
| 0.029412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
09a6c071ba31783273bba64529ff66fdfa139695
| 12,694
|
py
|
Python
|
CreateFilesinEOL_HCMnew.py
|
judysu1983/PythonMBSi
|
9481bf1409a888c3f8511bcd05718ea81a063fa1
|
[
"bzip2-1.0.6"
] | null | null | null |
CreateFilesinEOL_HCMnew.py
|
judysu1983/PythonMBSi
|
9481bf1409a888c3f8511bcd05718ea81a063fa1
|
[
"bzip2-1.0.6"
] | null | null | null |
CreateFilesinEOL_HCMnew.py
|
judysu1983/PythonMBSi
|
9481bf1409a888c3f8511bcd05718ea81a063fa1
|
[
"bzip2-1.0.6"
] | null | null | null |
import xml.etree.ElementTree as ETXML
import xml.etree.ElementTree as Element
import xml.etree.ElementTree as etree
import os
##fullfilename=["BusinessProcess.en-US.label.txt", "BusinessProcess.en-US.label.txt"]
##sourcelocation=["[git_OOBAPPs]\HCM\source\metadata\BusinessProcess\BusinessProcess\AxLabelFile\LabelResources\en-US","test"]
##BaseName=["BusinessProcess","testBasename"]
##ExtensionName="label.txt"
##TargetLclLocation= ["BusinessProcess\BusinessProcess\AxLabelFile\LabelResources","test2"]
#fullfilename=["PersonnelCore.en-US.label.txt", "HcmPeopleNavigatorControl.en-US.label.txt", "HcmPersonCard.en-US.label.txt", "TaxEngineConfiguration.en-US.label.txt", "TaxEngine.en-US.label.txt", "TaxEngineInterface.en-US.label.txt", "TaxSettlement.en-US.label.txt", "GetStarted.en-US.label.txt", "SysBasicUpgrade.en-US.label.txt"]
fullfilename=["TaxSettlement.en-US.label.txt", "TaxEngineInterface.en-US.label.txt", "TaxEngineConfiguration.en-US.label.txt", "TaxEngine.en-US.label.txt", "ElectronicReportingMapping.en-US.label.txt", "ElectronicReportingForAx.en-US.label.txt", "ElectronicReportingCore.en-US.label.txt", "ElectronicReportingPrintManagementIntegration.en-US.label.txt", "ElectronicReporting.en-US.label.txt", "Subledger.en-US.label.txt", "TaxEngineIntegration_SourceDocTypes.en-US.label.txt", "TaxEngineIntegration_SourceDoc.en-US.label.txt", "SourceDocumentation.en-US.label.txt", "AccountingFramework.en-US.label.txt", "Measurement.en-US.label.txt", "SegmentedEntry.en-US.label.txt", "Ledger.en-US.label.txt", "FieldDescriptions_Ledger.en-US.label.txt", "Dimension.en-US.label.txt", "FieldDescriptions_GeneralLedger_Currency.en-US.label.txt", "CurrencyExchange.en-US.label.txt", "Calenden-USs.en-US.label.txt", "BankAccountType.en-US.label.txt", "CDSIntegration.en-US.label.txt", "UserDefinedFields.en-US.label.txt", "UnitOfMeasure.en-US.label.txt", "SysBasicUpgrade.en-US.label.txt", "SysPolicy.en-US.label.txt", "DirectoryUpgrade.en-US.label.txt", "GlobalAddressBook.en-US.label.txt", "Directory_InvoicesCommunication.en-US.label.txt", "ContactPersonManagement.en-US.label.txt", "Processflow.en-US.label.txt", "Hieren-USchicalGridCommon.en-US.label.txt", "GetSten-USted.en-US.label.txt", "ApplicationCommon.en-US.label.txt", "PersonnelUpgrade.en-US.label.txt", "HcmMobile.en-US.label.txt", "Workforce.en-US.label.txt", "TalentClient.en-US.label.txt", "Talent.en-US.label.txt", "Payroll.en-US.label.txt", "HcmGenericProcess.en-US.label.txt", "HcmACA.en-US.label.txt", "HCM.en-US.label.txt", "FieldDescriptions_Hcm.en-US.label.txt", "Compensation.en-US.label.txt", "Benefits.en-US.label.txt", "PersonnelIntegration.en-US.label.txt", "PersonnelCore.en-US.label.txt", "PersonnelBusinessProcess.en-US.label.txt", "HcmOnboen-USd.en-US.label.txt", "Personnel.en-US.label.txt", "Leave.en-US.label.txt", "HumanCapitalMobile.en-US.label.txt", "HumanCapitalManagementIntegration.en-US.label.txt", "HumanCapitalManagement.en-US.label.txt", "HcmPersonCen-USd.en-US.label.txt", "HcmPeopleSeen-USchControl.en-US.label.txt", "HcmPeopleNavigatorControl.en-US.label.txt", "CaseManagement.en-US.label.txt", "BusinessProcess.en-US.label.txt", "UserDefinedApp.en-US.label.txt"]
#ModelName=["TaxEngine", "TaxEngine", "TaxEngine", "TaxEngine", "ElectronicReportingMapping", "ElectronicReportingForAx", "ElectronicReportingCore", "ElectronicReporting", "ElectronicReporting", "Subledger", "SourceDocumentationTypes", "SourceDocumentation", "SourceDocumentation", "SourceDocumentation", "Measurement", "Ledger", "Ledger", "Ledger", "Dimensions", "Currency", "Currency", "Calenden-US", "BankTypes", "ApplicationIntegration", "UserDefinedField", "UnitOfMeasure", "SysBasicUpgrade", "Policy", "DirectoryUpgrade", "Directory", "Directory", "ContactPerson", "ApplicationCommon", "ApplicationCommon", "ApplicationCommon", "ApplicationCommon", "PersonnelUpgrade", "PersonnelMobile", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelManagement", "PersonnelIntegration", "PersonnelCore", "PersonnelBusinessProcess", "PersonnelBusinessProcess", "Personnel", "Leave", "HumanCapitalMobile", "HumanCapitalManagementIntegration", "HumanCapitalManagement", "HumanCapitalManagement", "HumanCapitalManagement", "HumanCapitalManagement", "CaseManagement", "BusinessProcess", "ApplicationExtensibility"]
#BaseName=["PersonnelCore", "HcmPeopleNavigatorControl", "HcmPersonCard", "TaxEngineConfiguration", "TaxEngine", "TaxEngineInterface", "TaxSettlement", "GetStarted", "SysBasicUpgrade"]
BaseName=["TaxSettlement", "TaxEngineInterface", "TaxEngineConfiguration", "TaxEngine", "ElectronicReportingMapping", "ElectronicReportingForAx", "ElectronicReportingCore", "ElectronicReportingPrintManagementIntegration", "ElectronicReporting", "Subledger", "TaxEngineIntegration_SourceDocTypes", "TaxEngineIntegration_SourceDoc", "SourceDocumentation", "AccountingFramework", "Measurement", "SegmentedEntry", "Ledger", "FieldDescriptions_Ledger", "Dimension", "FieldDescriptions_GeneralLedger_Currency", "CurrencyExchange", "Calenden-USs", "BankAccountType", "CDSIntegration", "UserDefinedFields", "UnitOfMeasure", "SysBasicUpgrade", "SysPolicy", "DirectoryUpgrade", "GlobalAddressBook", "Directory_InvoicesCommunication", "ContactPersonManagement", "Processflow", "Hieren-USchicalGridCommon", "GetSten-USted", "ApplicationCommon", "PersonnelUpgrade", "HcmMobile", "Workforce", "TalentClient", "Talent", "Payroll", "HcmGenericProcess", "HcmACA", "HCM", "FieldDescriptions_Hcm", "Compensation", "Benefits", "PersonnelIntegration", "PersonnelCore", "PersonnelBusinessProcess", "HcmOnboen-USd", "Personnel", "Leave", "HumanCapitalMobile", "HumanCapitalManagementIntegration", "HumanCapitalManagement", "HcmPersonCen-USd", "HcmPeopleSeen-USchControl", "HcmPeopleNavigatorControl", "CaseManagement", "BusinessProcess", "UserDefinedApp"]
ExtensionName="label.txt"
TargetLclLocation= ["ElectronicReporting\source\Metadata\TaxEngine\TaxEngine\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\TaxEngine\TaxEngine\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\TaxEngine\TaxEngine\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\TaxEngine\TaxEngine\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\ElectronicReportingMapping\ElectronicReportingMapping\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\ElectronicReportingForAx\ElectronicReportingForAx\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\ElectronicReportingCore\ElectronicReportingCore\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\ElectronicReporting\ElectronicReporting\AxLabelFile\LabelResources", "ElectronicReporting\source\Metadata\ElectronicReporting\ElectronicReporting\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Subledger\Subledger\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\SourceDocumentationTypes\SourceDocumentationTypes\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\SourceDocumentation\SourceDocumentation\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\SourceDocumentation\SourceDocumentation\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\SourceDocumentation\SourceDocumentation\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Measurement\Measurement\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Ledger\Ledger\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Ledger\Ledger\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Ledger\Ledger\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Dimensions\Dimensions\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Currency\Currency\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Currency\Currency\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\Calenden-US\Calenden-US\AxLabelFile\LabelResources", "Accounting Foundation\source\Metadata\BankTypes\BankTypes\AxLabelFile\LabelResources", "ApplicationIntegration\source\Metadata\ApplicationIntegration\ApplicationIntegration\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\UserDefinedField\UserDefinedField\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\UnitOfMeasure\UnitOfMeasure\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\SysBasicUpgrade\SysBasicUpgrade\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\Policy\Policy\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\DirectoryUpgrade\DirectoryUpgrade\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\Directory\Directory\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\Directory\Directory\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\ContactPerson\ContactPerson\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\ApplicationCommon\ApplicationCommon\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\ApplicationCommon\ApplicationCommon\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\ApplicationCommon\ApplicationCommon\AxLabelFile\LabelResources", "ApplicationCommon\source\Metadata\ApplicationCommon\ApplicationCommon\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelUpgrade\PersonnelUpgrade\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelMobile\PersonnelMobile\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelManagement\PersonnelManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelIntegration\PersonnelIntegration\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelCore\PersonnelCore\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelBusinessProcess\PersonnelBusinessProcess\AxLabelFile\LabelResources", "HCM\source\Metadata\PersonnelBusinessProcess\PersonnelBusinessProcess\AxLabelFile\LabelResources", "HCM\source\Metadata\Personnel\Personnel\AxLabelFile\LabelResources", "HCM\source\Metadata\Leave\Leave\AxLabelFile\LabelResources", "HCM\source\Metadata\HumanCapitalMobile\HumanCapitalMobile\AxLabelFile\LabelResources", "HCM\source\Metadata\HumanCapitalManagementIntegration\HumanCapitalManagementIntegration\AxLabelFile\LabelResources", "HCM\source\Metadata\HumanCapitalManagement\HumanCapitalManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\HumanCapitalManagement\HumanCapitalManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\HumanCapitalManagement\HumanCapitalManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\HumanCapitalManagement\HumanCapitalManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\CaseManagement\CaseManagement\AxLabelFile\LabelResources", "HCM\source\Metadata\BusinessProcess\BusinessProcess\AxLabelFile\LabelResources", "HCM\source\Metadata\ApplicationExtensibility\ApplicationExtensibility\AxLabelFile\LabelResources"]
root=ETXML.Element('EOL')
for i in range(0,63):
sub=ETXML.SubElement(root, "File", name=fullfilename[i], parser="[parser.Txt]", noType="Comments")
#ETXML.SubElement(sub, "File", name="BusinessProcess.en-US.label.txt" parser="[parser.Txt]" noType="Comments").text='\n'
#ETXML.SubElement(sub, "File", name=fullfilename, parser="[parser.Txt]", noType="Comments").text = "\n"
ETXML.SubElement(sub, "Var", name="File.Model").text = ModelName[i]
ETXML.SubElement(sub, "Var", name="File.BaseName").text = BaseName[i]
ETXML.SubElement(sub, "Var", name="File.ExtensionName").text = ExtensionName
ETXML.SubElement(sub, "Var", name="File.TargetLclLocation").text = TargetLclLocation[i]
rootWrite=ETXML.ElementTree(root)
rootWrite.write('aFile.xml')
| 384.666667
| 5,893
| 0.830314
| 1,144
| 12,694
| 9.200175
| 0.116259
| 0.058527
| 0.064133
| 0.085511
| 0.55924
| 0.433444
| 0.415202
| 0.395819
| 0.395819
| 0.327316
| 0
| 0.000326
| 0.032771
| 12,694
| 32
| 5,894
| 396.6875
| 0.856899
| 0.186545
| 0
| 0
| 0
| 0
| 0.864967
| 0.782858
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.235294
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
09dfd1e310666db5edd3c147dc942be9da8f3842
| 124
|
py
|
Python
|
portfolio/blog/admin.py
|
xeroCBW/portfolio
|
1e14e64cd3235ed95918963dc5734881af75a668
|
[
"MIT"
] | null | null | null |
portfolio/blog/admin.py
|
xeroCBW/portfolio
|
1e14e64cd3235ed95918963dc5734881af75a668
|
[
"MIT"
] | null | null | null |
portfolio/blog/admin.py
|
xeroCBW/portfolio
|
1e14e64cd3235ed95918963dc5734881af75a668
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Blog
# Register your models here.
# 注册model
admin.site.register(Blog)
| 17.714286
| 32
| 0.790323
| 18
| 124
| 5.444444
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137097
| 124
| 6
| 33
| 20.666667
| 0.915888
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
115039428f128ec659a854e5aed90f4f5e9f9050
| 43,362
|
py
|
Python
|
fluiddb/data/test/test_object.py
|
fluidinfo/fluiddb
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
[
"Apache-2.0"
] | 3
|
2021-05-10T14:41:30.000Z
|
2021-12-16T05:53:30.000Z
|
fluiddb/data/test/test_object.py
|
fluidinfo/fluiddb
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
[
"Apache-2.0"
] | null | null | null |
fluiddb/data/test/test_object.py
|
fluidinfo/fluiddb
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
[
"Apache-2.0"
] | 2
|
2018-01-24T09:03:21.000Z
|
2021-06-25T08:34:54.000Z
|
from uuid import uuid4
from twisted.internet.defer import inlineCallbacks
from fluiddb.data.object import (
DirtyObject, ObjectIndex, SearchError, escapeWithWildcards,
createDirtyObject, getDirtyObjects, touchObjects)
from fluiddb.query.parser import parseQuery
from fluiddb.testing.basic import FluidinfoTestCase
from fluiddb.testing.resources import (
ConfigResource, IndexResource, DatabaseResource)
class ObjectIndexTest(FluidinfoTestCase):
resources = [('client', IndexResource()),
('config', ConfigResource())]
def setUp(self):
super(ObjectIndexTest, self).setUp()
self.index = ObjectIndex(self.client)
@inlineCallbacks
def testUpdateWithoutData(self):
"""
L{ObjectIndex.update} is effectively a no-op if no values are
provided.
"""
yield self.index.update({})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([], response.results.docs)
@inlineCallbacks
def testUpdateWithNoneValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and C{None} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': None}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithBoolValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and C{bool} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': True}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithIntValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and C{int} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': 42}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithFloatValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and C{float} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': 42.3}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithUnicodeValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and C{unicode} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': u'value'}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithSetValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and C{list} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': [u'foo', u'bar']}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithBinaryValue(self):
"""
L{ObjectIndex.update} is creates Solr documents for the specified
objects, L{Tag.path}s and binary values.
"""
objectID = uuid4()
yield self.index.update(
{objectID: {u'test/tag': {'mime-type': 'text/html',
'file-id': 'index.html',
'size': 123}}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual([{u'fluiddb/id': str(objectID)}],
response.results.docs)
@inlineCallbacks
def testUpdateWithManyValues(self):
"""
L{ObjectIndex.update} can create or update documents about many
objects, L{Tag.path}s and values at once.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update({objectID1: {u'test/tag1': u'Hi!'},
objectID2: {u'test/tag2': 42}})
yield self.index.commit()
response = yield self.client.search('*:*')
self.assertEqual(sorted([{u'fluiddb/id': str(objectID1)},
{u'fluiddb/id': str(objectID2)}]),
sorted(response.results.docs))
@inlineCallbacks
def testSearchWithoutData(self):
"""
L{ObjectIndex.search} returns an empty result if there are no
documents in the index.
"""
query = parseQuery(u'test/tag = 5')
result = yield self.index.search(query)
self.assertEqual(set(), result)
@inlineCallbacks
def testSearchWithoutMatch(self):
"""
L{ObjectIndex.search} returns an empty result if no documents in the
index match the specified L{Query}.
"""
yield self.index.update({uuid4(): {u'test/tag': 42}})
yield self.index.commit()
query = parseQuery(u'unknown/tag = 5')
result = yield self.index.search(query)
self.assertEqual(set(), result)
@inlineCallbacks
def testSearchWithEqualsUnicodeComparison(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with C{unicode}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/unicode': u'value'},
uuid4(): {u'test/unicode': u'another'}})
yield self.index.commit()
query = parseQuery('test/unicode = "value"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsWithEmptyValue(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with empty strings.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': u''},
uuid4(): {u'test/tag': u'devalue'}})
yield self.index.commit()
query = parseQuery(u'test/tag = ""')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsNullComparison(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with C{null}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': None},
uuid4(): {u'test/tag': u'another'}})
yield self.index.commit()
query = parseQuery('test/tag = null')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithNotEqualsNullComparison(self):
"""
L{ObjectIndex.search} can perform C{!=} comparisons with C{null}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': u'value'},
uuid4(): {u'test/tag': None}})
yield self.index.commit()
query = parseQuery('test/tag != null')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsBoolComparison(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with C{bool}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': True},
uuid4(): {u'test/int': False}})
yield self.index.commit()
query = parseQuery(u'test/int = true')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsIntComparison(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with C{int} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 42},
uuid4(): {u'test/int': 65}})
yield self.index.commit()
query = parseQuery(u'test/int = 42')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsIntComparisonWithNegative(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with negative
C{int} values. See bug #827411.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': -42},
uuid4(): {u'test/int': -65}})
yield self.index.commit()
query = parseQuery(u'test/int = -42')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with C{float}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': 42.3},
uuid4(): {u'test/float': 42.31}})
yield self.index.commit()
query = parseQuery(u'test/float = 42.3')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsFloatComparisonWithNegative(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with negative
C{float} values. See bug #827411.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': -42.3},
uuid4(): {u'test/float': -42.31}})
yield self.index.commit()
query = parseQuery(u'test/float = -42.3')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithEqualsIntAndFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with C{float}
and C{int} values.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/number': 42.0},
objectID2: {u'test/number': 42},
objectID3: {u'test/number': 48}})
yield self.index.commit()
query = parseQuery(u'test/number = 42')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithEqualsIntAndFloatComparisonWithNegative(self):
"""
L{ObjectIndex.search} can perform C{=} comparisons with negative
C{float} and C{int} values. See bug #827411.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/number': -42.0},
objectID2: {u'test/number': -42},
objectID3: {u'test/number': -48}})
yield self.index.commit()
query = parseQuery(u'test/number = -42')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
def testSearchWithEqualsAndFluidDBSlashID(self):
"""
A L{SearchError} is raised if an C{equals} query is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id = "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithNotEqualsUnicodeComparison(self):
"""
L{ObjectIndex.search} can perform C{!=} comparisons with C{unicode}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/unicode': u'novalue'},
uuid4(): {u'test/unicode': u'value'}})
yield self.index.commit()
query = parseQuery(u'test/unicode != "value"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithNotEqualsBoolComparison(self):
"""
L{ObjectIndex.search} can perform C{!=} comparisons with C{bool}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/bool': True},
uuid4(): {u'test/bool': False}})
yield self.index.commit()
query = parseQuery(u'test/bool != False')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithNotEqualsIntComparison(self):
"""
L{ObjectIndex.search} can perform C{!=} comparisons with C{int}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 42},
uuid4(): {u'test/int': 65}})
yield self.index.commit()
query = parseQuery(u'test/int != 65')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithNotEqualsFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{!=} comparisons with C{float}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': 42.1},
uuid4(): {u'test/float': 65.3}})
yield self.index.commit()
query = parseQuery(u'test/float != 65.3')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
def testSearchWithNotEqualsFluidDBSlashIDComparison(self):
"""
A L{SearchError} is raised if a C{!=} comparison is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id != "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithLessThanIntComparison(self):
"""
L{ObjectIndex.search} can perform C{<} comparisons with C{int} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 42},
uuid4(): {u'test/int': 43}})
yield self.index.commit()
query = parseQuery(u'test/int < 43')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithLessThanFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{<} comparisons with C{float}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': 42.1},
uuid4(): {u'test/float': 42.2}})
yield self.index.commit()
query = parseQuery(u'test/float < 42.2')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithLessThanIntAndFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{<} comparisons with C{float}
and C{int} values.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/number': 42.1},
objectID2: {u'test/number': 42.2},
objectID3: {u'test/number': 42}})
yield self.index.commit()
query = parseQuery(u'test/number < 42.2')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID3]), result)
def testSearchWithLessThanFluidDBSlashIDComparison(self):
"""
A L{SearchError} is raised if a C{<} comparison is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id < "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithLessThanOrEqualIntComparison(self):
"""
L{ObjectIndex.search} can perform C{<=} comparisons with C{int}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 42},
uuid4(): {u'test/int': 43}})
yield self.index.commit()
query = parseQuery(u'test/int <= 42')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithLessThanOrEqualFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{<=} comparisons with C{float}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': 42.1},
uuid4(): {u'test/float': 42.11}})
yield self.index.commit()
query = parseQuery(u'test/float <= 42.1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithLessThanOrEqualIntAndFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{<=} comparisons with C{float}
and C{int} values.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/number': 42.1},
objectID2: {u'test/number': 42.11},
objectID3: {u'test/number': 42}})
yield self.index.commit()
query = parseQuery(u'test/number <= 42.1')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID3]), result)
def testSearchWithLessThanOrEqualFluidDBSlashIDComparison(self):
"""
A L{SearchError} is raised if a C{<=} comparison is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id <= "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithGreaterThanIntComparison(self):
"""
L{ObjectIndex.search} can perform C{>} comparisons with C{int} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 43},
uuid4(): {u'test/int': 42}})
yield self.index.commit()
query = parseQuery(u'test/int > 42')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithGreaterThanFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{>} comparisons with C{float}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': 42.2},
uuid4(): {u'test/float': 42.1}})
yield self.index.commit()
query = parseQuery(u'test/float > 42.1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithGreaterThanIntAndFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{>} comparisons with C{float}
and C{int} values.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/number': 42.2},
objectID2: {u'test/number': 42.1},
objectID3: {u'test/number': 43}})
yield self.index.commit()
query = parseQuery(u'test/number > 42.1')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID3]), result)
def testSearchWithGreaterThanFluidDBSlashIDComparison(self):
"""
A L{SearchError} is raised if a C{>} comparison is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id > "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithGreaterThanOrEqualIntComparison(self):
"""
L{ObjectIndex.search} can perform C{>=} comparisons with C{int} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 43},
uuid4(): {u'test/int': 42}})
yield self.index.commit()
query = parseQuery(u'test/int >= 43')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithGreaterThanOrEqualFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{>=} comparisons with C{float}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/float': 42.2},
uuid4(): {u'test/float': 42.1}})
yield self.index.commit()
query = parseQuery(u'test/float >= 42.2')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithGreaterThanOrEqualIntAndFloatComparison(self):
"""
L{ObjectIndex.search} can perform C{>=} comparisons with C{float}
and C{int} values.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/number': 42.2},
objectID2: {u'test/number': 42.1},
objectID3: {u'test/number': 43}})
yield self.index.commit()
query = parseQuery(u'test/number >= 42.2')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID3]), result)
def testSearchWithGreaterThanOrEqualFluidDBSlashIDComparison(self):
"""
A L{SearchError} is raised if a C{>=} comparison is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id > "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithHasNoneValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with C{None} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag1': None},
uuid4(): {u'test/tag2': None}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasBoolValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with C{bool} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag1': True},
uuid4(): {u'test/tag2': True}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasIntValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with C{int} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag1': 42},
uuid4(): {u'test/tag2': 42}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasFloatValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with C{float} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag1': 42.1},
uuid4(): {u'test/tag2': 42.2}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasUnicodeValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with C{unicode}
values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag1': u'value'},
uuid4(): {u'test/tag2': u'value'}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasSetValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with C{list} values.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag1': [u'foo', u'bar']},
uuid4(): {u'test/tag2': [u'foo', u'bar']}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasBinaryValue(self):
"""
L{ObjectIndex.search} can perform C{has} queries with binary values.
"""
objectID = uuid4()
value = {'mime-type': 'text/html', 'file-id': 'index.html', 'size': 7}
yield self.index.update({objectID: {u'test/tag1': value},
uuid4(): {u'test/tag2': value}})
yield self.index.commit()
query = parseQuery(u'has test/tag1')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithHasColonInPath(self):
"""
L{ObjectIndex.search} can perform C{has} queries with paths having a
colon.
"""
objectID = uuid4()
value = {'mime-type': 'text/html', 'file-id': 'index.html', 'size': 7}
yield self.index.update({objectID: {u'test/one:two': value},
uuid4(): {u'test/tag2': value}})
yield self.index.commit()
query = parseQuery(u'has test/one:two')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithMatches(self):
"""L{ObjectIndex.search} can perform C{matches} queries."""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': u'value'},
uuid4(): {u'test/tag': u'devalue'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "value"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithMatchesWithEmptyValue(self):
"""
L{ObjectIndex.search} can perform C{matches} queries with empty
strings.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': u''},
uuid4(): {u'test/tag': u'devalue'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches ""')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithMatchesIsCaseInsensitive(self):
"""
L{ObjectIndex.search} performs C{matches} queries case-insensitively.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update({objectID1: {u'test/tag': u'VALUE'},
objectID2: {u'test/tag': u'value'},
objectID3: {u'test/tag': u'VaLuE'},
uuid4(): {u'test/tag': u'devalue'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "vAlUe"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2, objectID3]), result)
@inlineCallbacks
def testSearchWithMatchesAndManyTerms(self):
"""
L{ObjectIndex.search} can match terms with spaces when the C{matches}
query is used.
"""
objectID = uuid4()
yield self.index.update(
{objectID: {u'test/tag': u'apple orange cherry'},
uuid4(): {u'test/tag': u'value'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "apple orange"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithMatchesAndManyTermsIsCaseInsensitive(self):
"""
L{ObjectIndex.search} can match terms with spaces when the C{matches}
query is used.
"""
objectID1 = uuid4()
objectID2 = uuid4()
objectID3 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'APPLE ORANGE CHERRY'},
objectID2: {u'test/tag': u'apple orange cherry'},
objectID3: {u'test/tag': u'apple orange cherry'},
uuid4(): {u'test/tag': u'devalue'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "aPpLe OrAnGe"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2, objectID3]), result)
@inlineCallbacks
def testSearchWithMatchesAndPunctuation(self):
"""
L{ObjectIndex.search} can match terms with punctuation when a
C{matches} query is used.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'book: Moby Dick'},
objectID2: {u'test/tag': u'One, Two, Three.'},
uuid4(): {u'test/tag': u'One Book'}})
yield self.index.commit()
query = parseQuery(
u'test/tag matches "book:" or test/tag matches "One,"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithMatchesAndStarWildcard(self):
"""
L{ObjectIndex.search} can match terms using the '*' wildcard when a
C{matches} query is used.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'book:Moby Dict'},
objectID2: {u'test/tag': u'book:Alice in Wonderland'},
uuid4(): {u'test/tag': u'One Book'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "book:*"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithMatchesAndStarWildcardAtTheBegining(self):
"""
L{ObjectIndex.search} can match terms using the '*' wildcard at the
begining of a term when a C{matches} query is used.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'book:Moby Dick'},
objectID2: {u'test/tag': u'movie:Moby Dick'},
uuid4(): {u'test/tag': u'One Book'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "*moby"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithMatchesAndQuestionMarkWildcard(self):
"""
L{ObjectIndex.search} can match terms using the '?' wildcard when a
C{matches} query is used.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'red stone'},
objectID2: {u'test/tag': u'get rid of the body'},
uuid4(): {u'test/tag': u'run, forest, run'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "r?d"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithMatchesAndFuzzySearch(self):
"""
L{ObjectIndex.search} can match fuzzy terms using the '~' wildcard when
a C{matches} query is used.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'fuzzy search'},
objectID2: {u'test/tag': u'wuzzy term'},
uuid4(): {u'test/tag': u'not related term'}})
yield self.index.commit()
query = parseQuery(u'test/tag matches "fuzzy~"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithMatchesAndEscapedWildcars(self):
"""
L{ObjectIndex.search} can match terms with '*', '?' and '~' using
character escaping.
"""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update(
{objectID1: {u'test/tag': u'Is that man blue?'},
objectID2: {u'test/tag': u'Syntax: *remark*'},
uuid4(): {u'test/tag': u'Blue and remarkable'}})
yield self.index.commit()
query = parseQuery(
u'test/tag matches "blue\?" or test/tag matches "\*remark\*"')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
def testSearchWithMatchesAndFluidDBSlashID(self):
"""
A L{SearchError} is raised if a C{matches} query is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id matches "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithContains(self):
"""L{ObjectIndex.search} can perform C{contains} queries."""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': [u'foo', u'bar']},
uuid4(): {u'test/tag': [u'baz']}})
yield self.index.commit()
query = parseQuery(u'test/tag contains "foo"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithContainsAndTermWithWhitespace(self):
"""
L{ObjectIndex.search} can perform C{contains} queries with terms that
include whitespace.
"""
objectID = uuid4()
yield self.index.update({objectID: {u'test/tag': [u'foo bar', u'baz']},
uuid4(): {u'test/tag': [u'quux']}})
yield self.index.commit()
query = parseQuery(u'test/tag contains "foo bar"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
def testSearchWithContainsAndFluidDBSlashID(self):
"""
A L{SearchError} is raised if a C{contains} query is used with the
special C{fluiddb/id} virtual tag.
"""
objectID = uuid4()
query = parseQuery(u'fluiddb/id contains "%s"' % objectID)
return self.assertFailure(self.index.search(query), SearchError)
@inlineCallbacks
def testSearchWithOr(self):
"""L{ObjectIndex.search} can perform C{or} queries."""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update({objectID1: {u'test/int': 42},
objectID2: {u'test/int': 67},
uuid4(): {u'test/int': 93}})
yield self.index.commit()
query = parseQuery(u'test/int = 42 or test/int = 67')
result = yield self.index.search(query)
self.assertEqual(set([objectID1, objectID2]), result)
@inlineCallbacks
def testSearchWithOrUnmatched(self):
"""
L{ObjectIndex.search} only returns objects that match one side of an
C{or} query.
"""
yield self.index.update({uuid4(): {u'test/int': 42},
uuid4(): {u'test/int': 67}})
yield self.index.commit()
query = parseQuery(u'test/int = 41 or test/int = 66')
result = yield self.index.search(query)
self.assertEqual(set([]), result)
@inlineCallbacks
def testSearchWithAnd(self):
"""L{ObjectIndex.search} can perform C{and} queries."""
objectID = uuid4()
yield self.index.update({objectID: {u'test/int': 42,
u'test/unicode': u'value'},
uuid4(): {u'test/int': 67}})
yield self.index.commit()
query = parseQuery(u'test/int = 42 and test/unicode = "value"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithAndUnmatched(self):
"""
L{ObjectIndex.search} only returns objects that match both sides of an
C{and} query.
"""
yield self.index.update({uuid4(): {u'test/int': 67,
u'test/unicode': u'value'},
uuid4(): {u'test/int': 95}})
yield self.index.commit()
query = parseQuery(u'test/int = 42 and test/unicode = "value"')
result = yield self.index.search(query)
self.assertEqual(set([]), result)
@inlineCallbacks
def testSearchWithExcept(self):
"""L{ObjectIndex.search} can perform C{except} queries."""
objectID1 = uuid4()
objectID2 = uuid4()
yield self.index.update({objectID1: {u'test/int': 42,
u'test/unicode': u'value'},
objectID2: {u'test/int': 42,
u'test/unicode': u'hello'}})
yield self.index.commit()
query = parseQuery(u'test/int = 42 except test/unicode = "value"')
result = yield self.index.search(query)
self.assertEqual(set([objectID2]), result)
@inlineCallbacks
def testSearchWithUnicodePath(self):
"""
L{ObjectIndex.search} can search for paths with unicode characters in
them.
"""
objectID = uuid4()
path = u'test/\N{HIRAGANA LETTER A}'
yield self.index.update({objectID: {path: u'value'},
uuid4(): {path: u'another'}})
yield self.index.commit()
query = parseQuery(u'test/\N{HIRAGANA LETTER A} = "value"')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
@inlineCallbacks
def testSearchWithComplexQuery(self):
"""L{ObjectIndex.search} can handle complex queries."""
objectID = uuid4()
yield self.index.update({objectID: {u'test/unicode': u'value',
u'test/int': 42,
u'test/float': 42.1}})
yield self.index.commit()
query = parseQuery(u'test/unicode = "value" and '
u'(test/int = 42 or test/float = 42.1) '
u'except test/unknown = 10')
result = yield self.index.search(query)
self.assertEqual(set([objectID]), result)
class EscapeWithWildcards(FluidinfoTestCase):
def testEscapeWithWildcards(self):
"""
L{escapeWithWildcards} escapes all Lucene especial characters except
the wildcards.
"""
terms = [(r'Hello*World', r'Hello*World'),
(r'Hello\*World', r'Hello\*World'),
(r'Hello "World"', r'Hello \"World\"'),
(r'Hello |&^"~*?', r'Hello \|\&\^\"~*?'),
(r'Hello (World)', r'Hello \(World\)'),
(r'Hello:World', r'Hello\:World'),
(r'Hello\World', r'Hello\\World'),
(r'Hello World', r'Hello World'), ]
for raw, escaped in terms:
self.assertEqual(escaped, escapeWithWildcards(raw))
class CreateObjectTest(FluidinfoTestCase):
resources = [('store', DatabaseResource())]
def testCreateObject(self):
"""L{createDirtyObject} creates a new L{DirtyObject}."""
objectID = uuid4()
object1 = createDirtyObject(objectID)
self.assertEqual(objectID, object1.objectID)
def testCreateTagAddsToStore(self):
"""
L{createDirtyObject} adds the new L{DirtyObject} to the main store.
"""
objectID = uuid4()
object1 = createDirtyObject(objectID)
result = self.store.find(DirtyObject, DirtyObject.objectID == objectID)
self.assertIdentical(object1, result.one())
class GetObjectsTest(FluidinfoTestCase):
resources = [('store', DatabaseResource())]
def testGetObjects(self):
"""
L{getDirtyObjects} returns all L{DirtyObject}s in the database, by
default.
"""
object1 = createDirtyObject(uuid4())
self.assertEqual(object1, getDirtyObjects().one())
def testGetObjectsWithObjectIDs(self):
"""
When L{DirtyObject.objectID}s are provided L{getDirtyObjects} returns
matching L{DirtyObject}s.
"""
objectID = uuid4()
object1 = createDirtyObject(objectID)
createDirtyObject(uuid4())
result = getDirtyObjects(objectIDs=[objectID])
self.assertIdentical(object1, result.one())
class TouchObjectsTest(FluidinfoTestCase):
resources = [('store', DatabaseResource())]
def testTouchObjects(self):
"""L{touchObjects} adds the objects to the C{dirty_objects} table."""
objectID = uuid4()
touchObjects([objectID])
self.assertNotIdentical(None, getDirtyObjects([objectID]).one())
| 39.029703
| 79
| 0.574558
| 4,492
| 43,362
| 5.546082
| 0.073464
| 0.070806
| 0.105086
| 0.052182
| 0.794726
| 0.761691
| 0.755549
| 0.743668
| 0.73271
| 0.699153
| 0
| 0.018434
| 0.29316
| 43,362
| 1,110
| 80
| 39.064865
| 0.794388
| 0.151953
| 0
| 0.624143
| 0
| 0
| 0.106837
| 0
| 0
| 0
| 0
| 0
| 0.109739
| 1
| 0.111111
| false
| 0
| 0.00823
| 0
| 0.142661
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fede757cb8f63cae1c1695305be6efd36ec7dff5
| 46
|
py
|
Python
|
src/gui.py
|
LBRY-Omnibus/lbry-multi-channel-uploader
|
cee11b82f4dda71418aa84c0fdd707392bd97f3f
|
[
"Apache-2.0"
] | null | null | null |
src/gui.py
|
LBRY-Omnibus/lbry-multi-channel-uploader
|
cee11b82f4dda71418aa84c0fdd707392bd97f3f
|
[
"Apache-2.0"
] | 2
|
2022-02-25T20:59:06.000Z
|
2022-03-11T20:32:13.000Z
|
src/gui.py
|
LBRY-Omnibus/lbry-multi-channel-uploader
|
cee11b82f4dda71418aa84c0fdd707392bd97f3f
|
[
"Apache-2.0"
] | 1
|
2022-03-11T20:43:53.000Z
|
2022-03-11T20:43:53.000Z
|
# this is going to end up being built in kivy.
| 46
| 46
| 0.73913
| 10
| 46
| 3.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 46
| 1
| 46
| 46
| 0.944444
| 0.956522
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fef249334441b817efc539f483477cc3f7b2d0bf
| 177
|
py
|
Python
|
eplusplus/exception/__init__.py
|
labeee/EPlusPlus
|
da6cbd60575146a8f165fb72e165919cd83ddc24
|
[
"MIT"
] | 1
|
2018-02-06T17:41:12.000Z
|
2018-02-06T17:41:12.000Z
|
eplusplus/exception/__init__.py
|
labeee/EPlusPlus
|
da6cbd60575146a8f165fb72e165919cd83ddc24
|
[
"MIT"
] | null | null | null |
eplusplus/exception/__init__.py
|
labeee/EPlusPlus
|
da6cbd60575146a8f165fb72e165919cd83ddc24
|
[
"MIT"
] | 1
|
2021-06-29T02:49:59.000Z
|
2021-06-29T02:49:59.000Z
|
from .installException import InstallException
from .columnException import ColumnException
from .noIdfException import NoIdfException
from .noCsvException import NoCsvException
| 44.25
| 46
| 0.892655
| 16
| 177
| 9.875
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 177
| 4
| 47
| 44.25
| 0.975309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3a0f53d9c18c480fc07ee446c047a9bbd16a6435
| 275
|
py
|
Python
|
n_klimovych/code_wars/9.3 - Basic subclasses - Adam and Eve.py
|
kolyasalubov/Lv-639.pythonCore
|
06f10669a188318884adb00723127465ebdf2907
|
[
"MIT"
] | null | null | null |
n_klimovych/code_wars/9.3 - Basic subclasses - Adam and Eve.py
|
kolyasalubov/Lv-639.pythonCore
|
06f10669a188318884adb00723127465ebdf2907
|
[
"MIT"
] | null | null | null |
n_klimovych/code_wars/9.3 - Basic subclasses - Adam and Eve.py
|
kolyasalubov/Lv-639.pythonCore
|
06f10669a188318884adb00723127465ebdf2907
|
[
"MIT"
] | null | null | null |
class Human(object):
def __init__(self):
pass
class Man(Human):
def __init__(self):
super().__init__()
class Woman(Human):
def __init__(self):
super().__init__()
def God():
adam = Man()
eva = Woman()
return [adam,eva]
| 18.333333
| 26
| 0.556364
| 32
| 275
| 4.15625
| 0.4375
| 0.157895
| 0.24812
| 0.240602
| 0.37594
| 0.37594
| 0
| 0
| 0
| 0
| 0
| 0
| 0.298182
| 275
| 14
| 27
| 19.642857
| 0.689119
| 0
| 0
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0.076923
| 0
| 0
| 0.615385
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
3a4052903ae0ed6b3dfecbb28ea8d1c41cf14ea6
| 115
|
py
|
Python
|
pulsemaker/__init__.py
|
anushkrishnav/pulsemaker
|
598b0b35569a7b3adb4722d0ebd70dd495e12037
|
[
"Apache-2.0"
] | 14
|
2021-02-13T03:02:45.000Z
|
2021-12-13T06:03:53.000Z
|
pulsemaker/__init__.py
|
anushkrishnav/pulsemaker
|
598b0b35569a7b3adb4722d0ebd70dd495e12037
|
[
"Apache-2.0"
] | null | null | null |
pulsemaker/__init__.py
|
anushkrishnav/pulsemaker
|
598b0b35569a7b3adb4722d0ebd70dd495e12037
|
[
"Apache-2.0"
] | 3
|
2021-02-08T08:07:21.000Z
|
2021-12-17T14:16:32.000Z
|
# __all__ = ['ScheduleEditor', 'PulseDesigner', '...']
from .ScheduleDesigner import *
from .PulseDesigner import *
| 38.333333
| 54
| 0.721739
| 9
| 115
| 8.777778
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113043
| 115
| 3
| 55
| 38.333333
| 0.77451
| 0.452174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3a513009baf5a320f8dd023af69a29b800673d58
| 58
|
py
|
Python
|
vocompr/__init__.py
|
EnzoBnl/vocompr
|
fdbe5df59a698e232fb4b107aba1d96d4f8dba80
|
[
"Apache-2.0"
] | null | null | null |
vocompr/__init__.py
|
EnzoBnl/vocompr
|
fdbe5df59a698e232fb4b107aba1d96d4f8dba80
|
[
"Apache-2.0"
] | 5
|
2019-11-22T22:52:46.000Z
|
2020-04-21T13:09:36.000Z
|
vocompr/__init__.py
|
EnzoBnl/vocompr
|
fdbe5df59a698e232fb4b107aba1d96d4f8dba80
|
[
"Apache-2.0"
] | null | null | null |
from .vocompr import vocompr, unvocompr, compression_rate
| 29
| 57
| 0.844828
| 7
| 58
| 6.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 58
| 1
| 58
| 58
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
28c5a3cbc23a3e5cfc7c9e419f4fe6e0fa3da4c3
| 271
|
py
|
Python
|
config.py
|
Iggip/LPML-bot
|
cd96777d0cc2105d3117c1d918eb4d87be81b47b
|
[
"Apache-2.0"
] | 1
|
2021-04-23T14:15:27.000Z
|
2021-04-23T14:15:27.000Z
|
config.py
|
Iggip/LPML-bot
|
cd96777d0cc2105d3117c1d918eb4d87be81b47b
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
Iggip/LPML-bot
|
cd96777d0cc2105d3117c1d918eb4d87be81b47b
|
[
"Apache-2.0"
] | null | null | null |
TOKEN = "YOUR_TOKEN"
way = 'YOUR_FILE_WHERE_HOMEWORK_WILL_BE_STORED.dat'
students = ['SURNAME NAME', 'SURNAME NAME', 'SURNAME NAME' ...]
color = 0x######
bot_activity = 'STATUS'
edit_role = 'ROLE_WHICH_CAN_CREATE_AND_EDIT_HOMEWORKS'
prefix = 'COMMAND_PREFIX(/, !, ?...)'
| 33.875
| 63
| 0.719557
| 36
| 271
| 4.972222
| 0.75
| 0.184358
| 0.201117
| 0.24581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004132
| 0.107011
| 271
| 7
| 64
| 38.714286
| 0.735537
| 0
| 0
| 0
| 0
| 0
| 0.607547
| 0.313208
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
28df939866977b795813e9387b91c4ae2ce65025
| 2,236
|
py
|
Python
|
uc-backbone/select_backbone.py
|
lovish1234/TPC
|
10e93eeb0e22e411579cfb9f94fac7870f6e2039
|
[
"MIT"
] | null | null | null |
uc-backbone/select_backbone.py
|
lovish1234/TPC
|
10e93eeb0e22e411579cfb9f94fac7870f6e2039
|
[
"MIT"
] | null | null | null |
uc-backbone/select_backbone.py
|
lovish1234/TPC
|
10e93eeb0e22e411579cfb9f94fac7870f6e2039
|
[
"MIT"
] | null | null | null |
from resnet_2d3d import *
def select_resnet(network,
track_running_stats=True,
distance_type='certain',
radius_type='linear'):
param = {'feature_size': 1024}
if network == 'resnet8':
model = resnet8_2d3d_mini(track_running_stats=track_running_stats,
distance_type=distance_type,
radius_type=radius_type)
if distance_type == 'uncertain':
param['feature_size'] = 17
elif distance_type == 'certain':
param['feature_size'] = 16
elif network == 'resnet10':
model = resnet10_2d3d_mini(track_running_stats=track_running_stats,
distance_type=distance_type,
radius_type=radius_type)
if distance_type == 'uncertain':
param['feature_size'] = 17
elif distance_type == 'certain':
param['feature_size'] = 16
elif network == 'resnet18':
model = resnet18_2d3d_full(
track_running_stats=track_running_stats,
distance_type=distance_type,
radius_type=radius_type)
if distance_type == 'uncertain':
param['feature_size'] = 257
elif distance_type == 'certain':
param['feature_size'] = 256
elif network == 'resnet34':
model = resnet34_2d3d_full(
track_running_stats=track_running_stats,
distance_type=distance_type,
radius_type=radius_type)
if distance_type == 'uncertain':
param['feature_size'] = 257
elif distance_type == 'certain':
param['feature_size'] = 256
elif network == 'resnet50':
model = resnet50_2d3d_full(track_running_stats=track_running_stats)
elif network == 'resnet101':
model = resnet101_2d3d_full(track_running_stats=track_running_stats)
elif network == 'resnet152':
model = resnet152_2d3d_full(track_running_stats=track_running_stats)
elif network == 'resnet200':
model = resnet200_2d3d_full(track_running_stats=track_running_stats)
else:
raise IOError('model type is wrong')
return model, param
| 40.654545
| 76
| 0.605993
| 232
| 2,236
| 5.465517
| 0.189655
| 0.160883
| 0.227918
| 0.138801
| 0.726341
| 0.726341
| 0.726341
| 0.726341
| 0.693218
| 0.693218
| 0
| 0.050355
| 0.307245
| 2,236
| 54
| 77
| 41.407407
| 0.768238
| 0
| 0
| 0.509804
| 0
| 0
| 0.120751
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.019608
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e90330aaf8e6cae28c5171b3603be49d1945e671
| 5,232
|
py
|
Python
|
program.py
|
Sanjin84/CompetitionInterface
|
50ba1c58b874897d1991b6a28816f2424803a0b2
|
[
"CC0-1.0"
] | null | null | null |
program.py
|
Sanjin84/CompetitionInterface
|
50ba1c58b874897d1991b6a28816f2424803a0b2
|
[
"CC0-1.0"
] | null | null | null |
program.py
|
Sanjin84/CompetitionInterface
|
50ba1c58b874897d1991b6a28816f2424803a0b2
|
[
"CC0-1.0"
] | null | null | null |
import tkinter as tk
from tkinter import *
root = Tk()
root.geometry("1000x600")
root.resizable(True,True)
root.title("DASHBOARD")
def second():
sec= Frame(root,bg='#4472C4').place(relx=0, rely=0, relwidth=1, relheight=1)
tt = tk.Label(sec,text="GLOBAL WATCHTOWER SV 21 INTERFACE",font = "Verdana 15 bold",bg="#4472C4").place(rely=0.06,relx=0.3)
t2 = tk.Label(sec,text="TO LOG IN YOU MUST ENTER 3 VALID KEYWORDS\nYOU CAN MAKE AS MANY ATTEMPTS AS YOU WISH",font = "Verdana 15 bold",bg="#4472C4").place(rely=0.25,relx=0.25)
e1 = Entry(sec, text = 'WORD 1',bg="#70AD47").place(relx=0.1,rely=0.4, relwidth=0.45, relheight=0.1)
e2 = Entry(sec, text = 'WORD 2',bg="#70AD47").place(relx=0.1,rely=0.55, relwidth=0.45, relheight=0.1)
e3 = Entry(sec, text = 'WORD 3',bg="#70AD47").place(relx=0.1,rely=0.7, relwidth=0.45, relheight=0.1)
b1 = Button(sec,text="VALIDATE", command="",bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.4, relwidth=0.2, relheight=0.1)
b2 = Button(sec,text="VALIDATE", command="",bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.55,relwidth=0.2, relheight=0.1)
b3 = Button(sec,text="VALIDATE", command=third,bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.7,relwidth=0.2, relheight=0.1)
def third():
thr= Frame(root,bg='#4472C4').place(relx=0, rely=0, relwidth=1, relheight=1)
tt = tk.Label(thr,text="GLOBAL WATCHTOWER SV 21 INTERFACE",font = "Verdana 15 bold",bg='#4472C4').place(relx=0.3,rely=0.06)
frame1 = tk.LabelFrame(thr,bd=5,bg='#4472C4').place(relx=0.06,rely=0.35, relwidth=0.25, relheight=0.6)
b1 = Button(frame1,text="Launch Virus", command=launch,bg="#70AD47",font = "Arial 20 bold").place(relx=0.06,rely=0.2, relwidth=0.25, relheight=0.1)
#t2 = tk.Label(frame1,text="This option launches the virus directly\nin 3,125,673 with number of \ninfected devices doubling every 13 hours."+
#"\n"+"Total collapse in global tech infrastructure in 3 -5 days",font = "Verdana 9",justify=LEFT).place(relx=0.08,rely=0.4, relwidth=0.25, relheight=0.5)
frame2 = tk.LabelFrame(thr,bd=5,bg='#4472C4').place(relx=0.4,rely=0.35, relwidth=0.25, relheight=0.6)
b2 = Button(frame2,text="Stall Virus", command=stall,bg="#70AD47",font = "Arial 20 bold").place(relx=0.4,rely=0.2,relwidth=0.25, relheight=0.1)
#t2 = tk.Label(thr,text="GLOBAL WATCHTOWER SV 21 INTERFACE",font = "Verdana 15 bold").place(rely=0.3,relx=0.4)
frame3 = tk.LabelFrame(thr,bd=5,bg='#4472C4').place(relx=0.7,rely=0.35, relwidth=0.25, relheight=0.6)
b3 = Button(frame3,text="Destroy Virus", command="",bg="#70AD47",font = "Arial 20 bold").place(relx=0.7,rely=0.2,relwidth=0.25, relheight=0.1)
#tt = tk.Label(thr,text="GLOBAL WATCHTOWER SV 21 INTERFACE",font = "Verdana 15 bold").place(rely=0.06,relx=0.3)
def launch():
sec= Frame(root,bg='#4472C4').place(relx=0, rely=0, relwidth=1, relheight=1)
tt = tk.Label(sec,text="GLOBAL WATCHTOWER SV 21 INTERFACE",font = "Verdana 15 bold",bg="#4472C4").place(rely=0.06,relx=0.3)
t2 = tk.Label(sec,text="DESTRUCTION OF THE VIRUS REQUIRES THE USE OF THE FINAL TWO KEYWORDS",font = "Verdana 15 bold",bg="#4472C4").place(relx=0.05,rely=0.25)
e1 = Entry(sec, text = 'WORD 1',bg="#70AD47").place(relx=0.1,rely=0.4, relwidth=0.45, relheight=0.1)
e2 = Entry(sec, text = 'WORD 2',bg="#70AD47").place(relx=0.1,rely=0.55, relwidth=0.45, relheight=0.1)
b1 = Button(sec,text="VALIDATE", command="",bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.4, relwidth=0.2, relheight=0.1)
b2 = Button(sec,text="VALIDATE", command="",bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.55,relwidth=0.2, relheight=0.1)
def stall():
sec= Frame(root,bg='#4472C4').place(relx=0, rely=0, relwidth=1, relheight=1)
tt = tk.Label(sec,text="GLOBAL WATCHTOWER SV 21 INTERFACE",font = "Verdana 15 bold",bg="#4472C4").place(rely=0.06,relx=0.3)
t1 = tk.Label(sec,text="VIRUS RELEASE SUSPENDED FOR 5 DAYS!",font = "Verdana 15 bold",bg="#4472C4").place(relx=0.25,rely=0.15)
t2 = tk.Label(sec,text="DESTRUCTION OF THE VIRUS REQUIRES THE USE OF THE FINAL TWO KEYWORDS",font = "Verdana 15 bold",bg="#4472C4").place(relx=0.05,rely=0.25)
e1 = Entry(sec, text = 'WORD 1',bg="#70AD47").place(relx=0.1,rely=0.4, relwidth=0.45, relheight=0.1)
e2 = Entry(sec, text = 'WORD 2',bg="#70AD47").place(relx=0.1,rely=0.55, relwidth=0.45, relheight=0.1)
b1 = Button(sec,text="VALIDATE", command="",bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.4, relwidth=0.2, relheight=0.1)
b2 = Button(sec,text="VALIDATE", command="",bg="gray",font = "Verdana 15 bold").place(relx=0.6,rely=0.55,relwidth=0.2, relheight=0.1)
def raise_frame(frame):
frame.tkraise()
start = Frame(root,bg='#4472C4').place(relx=0, rely=0, relwidth=1, relheight=1)
fr = tk.Label(start,text="GLOBAL WATCHTOWER \n SV 21 INTERFACE",font = "Verdana 30 bold",bg="#4472C4").place(rely=0.1,relx=0.25)
e1 = Entry(start, text = 'ENTER TEAM NAME',bg="#70AD47").place(relx=0.1,rely=0.5, relwidth=0.8, relheight=0.15)
button = Button(start,text="Enter", command=second,bg="gray",font = "Verdana 15 bold")
button.place(rely=0.65,relx=0.1, relwidth=0.8, relheight=0.15)
mainloop()
| 70.702703
| 179
| 0.675268
| 950
| 5,232
| 3.717895
| 0.143158
| 0.05521
| 0.087769
| 0.086636
| 0.778879
| 0.750849
| 0.723386
| 0.714326
| 0.700736
| 0.619196
| 0
| 0.11701
| 0.117928
| 5,232
| 74
| 180
| 70.702703
| 0.648321
| 0.09805
| 0
| 0.4
| 0
| 0
| 0.236217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.04
| 0
| 0.14
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e922f862820a572c1143bb0a95c6ba242c6c2615
| 2,415
|
py
|
Python
|
run_multi-gpu.py
|
cloneniu/mini-AlphaStar
|
b08c48e2c04a384fce5a84245e54ded93c6def4e
|
[
"Apache-2.0"
] | null | null | null |
run_multi-gpu.py
|
cloneniu/mini-AlphaStar
|
b08c48e2c04a384fce5a84245e54ded93c6def4e
|
[
"Apache-2.0"
] | null | null | null |
run_multi-gpu.py
|
cloneniu/mini-AlphaStar
|
b08c48e2c04a384fce5a84245e54ded93c6def4e
|
[
"Apache-2.0"
] | null | null | null |
import os
USED_DEVICES = "2, 3"
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = USED_DEVICES
os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
import alphastarmini
import torch
from alphastarmini.core.arch import entity_encoder
from alphastarmini.core.arch import scalar_encoder
from alphastarmini.core.arch import spatial_encoder
from alphastarmini.core.arch import arch_model
from alphastarmini.core.arch import action_type_head
from alphastarmini.core.arch import selected_units_head
from alphastarmini.core.arch import target_unit_head
from alphastarmini.core.arch import delay_head
from alphastarmini.core.arch import queue_head
from alphastarmini.core.arch import location_head
from alphastarmini.core.arch import agent
from alphastarmini.core.arch import baseline
from alphastarmini.core.sl import load_pickle
from alphastarmini.core.rl import action
from alphastarmini.core.rl import env_utils
from alphastarmini.core.rl import actor
from alphastarmini.core.rl import against_computer
from alphastarmini.core.rl import pseudo_reward
import param as P
if __name__ == '__main__':
# if we don't add this line, it may cause running time error while in Windows
# torch.multiprocessing.freeze_support()
print("run init")
# ------------------------
# 1. first we transform the replays to pickle
# from alphastarmini.core.sl import transform_replay_data
# transform_replay_data.test(on_server=P.on_server)
# # 2. second we use pickle to do multi-gpu supervised learning
from alphastarmini.core.sl import sl_multi_gpu_by_pickle
sl_multi_gpu_by_pickle.test(on_server=P.on_server)
# # 2. second we use to do supervised learning
# from alphastarmini.core.sl import sl_multi_gpu_by_tensor
# sl_multi_gpu_by_tensor.test(on_server=P.on_server)
# 3. third we use SL model and replays to do reinforcement learning
# from alphastarmini.core.rl import rl_train_with_replay
# rl_train_with_replay.test(on_server=P.on_server, replay_path=P.replay_path)
# ------------------------
#
# below is not recommended to use
# from alphastarmini.core.sl import analyze_replay_statistic
# analyze_replay_statistic.test(on_server=False)
# from alphastarmini.core.rl import rl_train_wo_replay
# rl_train_wo_replay.test(on_server=False)
# against_computer.test(on_server=False)
print('run over')
| 34.5
| 81
| 0.775569
| 355
| 2,415
| 5.033803
| 0.312676
| 0.228316
| 0.282037
| 0.167879
| 0.556799
| 0.348069
| 0.143257
| 0.102966
| 0.102966
| 0.102966
| 0
| 0.00337
| 0.139959
| 2,415
| 69
| 82
| 35
| 0.857005
| 0.412422
| 0
| 0
| 0
| 0
| 0.068817
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.741935
| 0
| 0.741935
| 0.064516
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3a7894a7cf6f4fe2181d5ce9bdc3d914f45d6ad4
| 134
|
py
|
Python
|
v2_pir/admin.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
v2_pir/admin.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
v2_pir/admin.py
|
ruslan-ok/ruslan
|
fc402e53d2683581e13f4d6c69a6f21e5c2ca1f8
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from pir.models import PirTable, PirData
admin.site.register(PirTable)
admin.site.register(PirData)
| 22.333333
| 40
| 0.828358
| 19
| 134
| 5.842105
| 0.578947
| 0.162162
| 0.306306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 134
| 5
| 41
| 26.8
| 0.909836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3a861bb3a65a63855afa1f29ff9c36d3ec3e4767
| 7,468
|
py
|
Python
|
cl_streaming/process_results.py
|
x-zho14/bilevel_coresets
|
9ceb91af966a9a0a53d4f322ec747fa7ce853af9
|
[
"MIT"
] | 46
|
2020-05-28T11:25:21.000Z
|
2022-03-30T01:32:27.000Z
|
cl_streaming/process_results.py
|
x-zho14/bilevel_coresets
|
9ceb91af966a9a0a53d4f322ec747fa7ce853af9
|
[
"MIT"
] | 1
|
2021-11-01T13:48:11.000Z
|
2021-11-03T17:52:46.000Z
|
cl_streaming/process_results.py
|
x-zho14/bilevel_coresets
|
9ceb91af966a9a0a53d4f322ec747fa7ce853af9
|
[
"MIT"
] | 6
|
2021-03-29T08:36:19.000Z
|
2022-01-11T05:53:05.000Z
|
import numpy as np
import json
import argparse
def get_best_betas(methods, datasets, betas, seeds, buffer_size, save_best=False, path='cl_results',
save_path='cl_results/best_betas.txt'):
best_betas = {}
for method in methods:
best_beta_for_method = {}
for dataset in datasets:
best_acc, best_beta = -1, -1
for beta in betas:
res = []
for seed in seeds:
with open('{}/{}_{}_{}_{}_{}.txt'.format(path, dataset, method, buffer_size, beta, seed),
'r') as f:
data = json.load(f)
res.append(data['test_acc'])
if len(res) > 0 and np.mean(res) > best_acc:
best_acc = np.mean(res)
best_beta = beta
print(method, dataset, best_beta)
best_beta_for_method[dataset] = best_beta
best_betas[method] = best_beta_for_method
if save_best:
with open(save_path, "w") as f:
json.dump(best_betas, f, sort_keys=True, indent=4)
return best_betas
def get_result(method, dataset, beta, seeds, buffer_size, path='cl_results'):
res = []
for seed in seeds:
with open('{}/{}_{}_{}_{}_{}.txt'.format(path, dataset, method, buffer_size, beta, seed),
'r') as f:
data = json.load(f)
res.append(data)
return res
def continual_learning_results():
datasets = ['permmnist', 'splitmnist']
methods = [
'uniform', 'kmeans_features', 'kmeans_embedding', 'kmeans_grads',
'kcenter_features', 'kcenter_embedding', 'kcenter_grads',
'entropy', 'hardest', 'frcl', 'icarl', 'grad_matching',
'coreset'
]
seeds = range(5)
betas = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]
buffer_size = 100
best_betas = get_best_betas(methods, datasets, betas, seeds, buffer_size, save_best=True, path='cl_results',
save_path='cl_results/best_betas.txt')
print('Continual Learning study\n')
print('Method \ Dataset'.ljust(45), end='')
for dataset in datasets:
print(' ' + dataset.ljust(18), end='')
print('')
for method in methods:
print(method.ljust(43), end='')
for dataset in datasets:
beta = best_betas[method][dataset]
res = get_result(method, dataset, beta, seeds, buffer_size, 'cl_results')
res = [r['test_acc'] for r in res]
print(' {:.2f} +- {:.2f}'.format(np.mean(res), np.std(res)).ljust(20), end='')
print('')
def streaming_results():
datasets = ['permmnist', 'splitmnist']
methods = ['reservoir', 'coreset']
seeds = range(5)
betas = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]
buffer_size = 100
best_betas = get_best_betas(methods, datasets, betas, seeds, buffer_size, save_best=True, path='streaming_results',
save_path='streaming_results/best_betas.txt')
print('Streaming study\n')
print('Method \ Dataset'.ljust(45), end='')
for dataset in datasets:
print(' ' + dataset.ljust(18), end='')
print('')
for method in methods:
print(method.ljust(43), end='')
for dataset in datasets:
beta = best_betas[method][dataset]
res = get_result(method, dataset, beta, seeds, buffer_size, 'streaming_results')
res = [r['test_acc'] for r in res]
print(' {:.2f} +- {:.2f}'.format(np.mean(res), np.std(res)).ljust(20), end='')
print('')
def imbalanced_streaming_results():
datasets = ['splitmnistimbalanced']
methods = ['reservoir', 'cbrs', 'coreset']
seeds = range(5)
betas = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]
buffer_size = 100
best_betas = get_best_betas(methods, datasets, betas, seeds, buffer_size, save_best=True, path='streaming_results',
save_path='streaming_results/best_betas_imbalanced.txt')
print('Streaming study\n')
print('Method \ Dataset'.ljust(45), end='')
for dataset in datasets:
print(' ' + dataset.ljust(18), end='')
print('')
for method in methods:
print(method.ljust(43), end='')
for dataset in datasets:
beta = best_betas[method][dataset]
res = get_result(method, dataset, beta, seeds, buffer_size, 'streaming_results')
res = [r['test_acc'] for r in res]
print(' {:.2f} +- {:.2f}'.format(np.mean(res), np.std(res)).ljust(20), end='')
print('')
def splitcifar_results():
datasets = ['splitcifar']
methods = [
'uniform', 'kmeans_features', 'kmeans_embedding', 'kmeans_grads',
'kcenter_features', 'kcenter_embedding', 'kcenter_grads',
'entropy', 'hardest', 'frcl', 'icarl', 'grad_matching',
'coreset'
]
seeds = range(5)
betas = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]
buffer_size = 200
best_betas = get_best_betas(methods, datasets, betas, seeds, buffer_size, save_best=True, path='cl_results',
save_path='cl_results/best_betas_splitcifar.txt')
print('Streaming study\n')
print('Method \ Dataset'.ljust(45), end='')
for dataset in datasets:
print(' ' + dataset.ljust(18), end='')
print('')
for method in methods:
print(method.ljust(43), end='')
for dataset in datasets:
beta = best_betas[method][dataset]
res = get_result(method, dataset, beta, seeds, buffer_size, 'cl_results')
res = [r['test_acc'] for r in res]
print(' {:.2f} +- {:.2f}'.format(np.mean(res), np.std(res)).ljust(20), end='')
print('')
def imbalanced_streaming_cifar_results():
datasets = ['stream_imbalanced_splitcifar']
methods = ['reservoir', 'cbrs', 'coreset']
seeds = range(5)
betas = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]
buffer_size = 200
best_betas = get_best_betas(methods, datasets, betas, seeds, buffer_size, save_best=True, path='streaming_results',
save_path='streaming_results/best_betas_imbalanced_cifar.txt')
print('Streaming study\n')
print('Method \ Dataset'.ljust(45), end='')
for dataset in datasets:
print(' ' + dataset.ljust(18), end='')
print('')
for method in methods:
print(method.ljust(43), end='')
for dataset in datasets:
beta = best_betas[method][dataset]
res = get_result(method, dataset, beta, seeds, buffer_size, 'streaming_results')
res = [r['test_acc'] for r in res]
print(' {:.2f} +- {:.2f}'.format(np.mean(res), np.std(res)).ljust(20), end='')
print('')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Results processor')
parser.add_argument('--exp', default='cl',
choices=['cl', 'streaming', 'imbalanced_streaming', 'splitcifar', 'imbalanced_streaming_cifar'])
args = parser.parse_args()
exp = args.exp
if exp == 'cl':
continual_learning_results()
elif exp == 'streaming':
streaming_results()
elif exp == 'imbalanced_streaming':
imbalanced_streaming_results()
elif exp == 'splitcifar':
splitcifar_results()
elif exp == 'imbalanced_streaming_cifar':
imbalanced_streaming_cifar_results()
else:
raise Exception('Unknown experiment')
| 38.102041
| 120
| 0.584628
| 912
| 7,468
| 4.596491
| 0.126096
| 0.055821
| 0.042939
| 0.052481
| 0.782443
| 0.735687
| 0.735687
| 0.735687
| 0.725906
| 0.725906
| 0
| 0.030873
| 0.267006
| 7,468
| 195
| 121
| 38.297436
| 0.734929
| 0
| 0
| 0.654762
| 0
| 0
| 0.179834
| 0.044456
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.017857
| 0
| 0.071429
| 0.214286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3aa08e8eb969f147173400ad4c910b1dcc302c0c
| 20
|
py
|
Python
|
wsgi.py
|
argosopentech/argos-search
|
7cb7d85d1d76916f272ce2e52cfd6f6856fd1f69
|
[
"MIT"
] | 5
|
2021-11-05T00:20:45.000Z
|
2021-12-15T02:49:53.000Z
|
wsgi.py
|
argosopentech/argos-search
|
7cb7d85d1d76916f272ce2e52cfd6f6856fd1f69
|
[
"MIT"
] | 1
|
2021-11-02T11:38:09.000Z
|
2021-11-02T11:38:09.000Z
|
wsgi.py
|
argosopentech/argos-search
|
7cb7d85d1d76916f272ce2e52cfd6f6856fd1f69
|
[
"MIT"
] | 1
|
2021-11-05T03:17:54.000Z
|
2021-11-05T03:17:54.000Z
|
from web import app
| 10
| 19
| 0.8
| 4
| 20
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3ab331366adceafa06ffef642f3ca3cfff25e82f
| 176
|
py
|
Python
|
tests/__init__.py
|
tim-fi/swpatterns
|
7fe12f9816a0aef8c8cfdd57536ac578a89a83b9
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
tim-fi/swpatterns
|
7fe12f9816a0aef8c8cfdd57536ac578a89a83b9
|
[
"MIT"
] | 1
|
2020-03-31T04:13:12.000Z
|
2020-03-31T04:13:12.000Z
|
tests/__init__.py
|
tim-fi/swpatterns
|
7fe12f9816a0aef8c8cfdd57536ac578a89a83b9
|
[
"MIT"
] | null | null | null |
from .test_composition import *
from .test_interface import *
from .test_matching import *
__all__ = test_composition.__all__ + test_interface.__all__ + test_matching.__all__
| 29.333333
| 83
| 0.818182
| 22
| 176
| 5.545455
| 0.318182
| 0.196721
| 0.229508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 176
| 5
| 84
| 35.2
| 0.782051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ae2de77bf7d10c75c953edeb19684e676e5d9d4
| 32
|
py
|
Python
|
plugins/filetime_from_hg/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 13
|
2020-01-27T09:02:25.000Z
|
2022-01-20T07:45:26.000Z
|
plugins/filetime_from_hg/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 29
|
2020-03-22T06:57:57.000Z
|
2022-01-24T22:46:42.000Z
|
plugins/filetime_from_hg/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 6
|
2020-07-10T00:13:30.000Z
|
2022-01-26T08:22:33.000Z
|
from .filetime_from_hg import *
| 16
| 31
| 0.8125
| 5
| 32
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3af1c55a3fc3ac30ba4f19ec1194bee303e6d588
| 117
|
py
|
Python
|
Chapter 05 - Functions/Assignments/5.8 Writing Your Own Value-Returning Functions/51218.py
|
EllisBarnes00/COP-1000
|
8509e59e8a566c77295c714ddcb0f557c470358b
|
[
"Unlicense"
] | null | null | null |
Chapter 05 - Functions/Assignments/5.8 Writing Your Own Value-Returning Functions/51218.py
|
EllisBarnes00/COP-1000
|
8509e59e8a566c77295c714ddcb0f557c470358b
|
[
"Unlicense"
] | 1
|
2021-06-07T03:55:29.000Z
|
2021-06-07T03:56:47.000Z
|
Chapter 05 - Functions/Assignments/5.8 Writing Your Own Value-Returning Functions/51218.py
|
EllisBarnes00/COP-1000
|
8509e59e8a566c77295c714ddcb0f557c470358b
|
[
"Unlicense"
] | null | null | null |
def max(num1, num2, num3):
nums = [num1, num2, num3]
nums.sort()
return nums[len(nums) - 1]
print(max(32, 65, 2))
| 19.5
| 27
| 0.623932
| 21
| 117
| 3.47619
| 0.666667
| 0.219178
| 0.328767
| 0.438356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 0.17094
| 117
| 6
| 28
| 19.5
| 0.628866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.4
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
aae6a45d23baf8dcb46f0a754442613f3ffda991
| 104
|
py
|
Python
|
splitting_image.py
|
purinisivakrishna/image
|
550c55807bc92bb81e6c6114d1e79b3b7ff2be42
|
[
"MIT"
] | null | null | null |
splitting_image.py
|
purinisivakrishna/image
|
550c55807bc92bb81e6c6114d1e79b3b7ff2be42
|
[
"MIT"
] | null | null | null |
splitting_image.py
|
purinisivakrishna/image
|
550c55807bc92bb81e6c6114d1e79b3b7ff2be42
|
[
"MIT"
] | null | null | null |
import split_folders
split_folders.ratio("dog_flower", output="image_split", ratio=(0.7, 0.15, 0.15))
| 34.666667
| 80
| 0.740385
| 18
| 104
| 4.055556
| 0.611111
| 0.328767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 0.086538
| 104
| 3
| 80
| 34.666667
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0.203884
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c9149f717d51cd8a808885eb620755d9764bd4e1
| 241
|
py
|
Python
|
Repositories/Python/Basic/format_map_example.py
|
Dong-gi/Dong-gi.github.io
|
2c3d083db72e06032a1daf528ee9b175219aa554
|
[
"MIT"
] | 5
|
2018-02-27T16:19:35.000Z
|
2020-08-25T13:09:49.000Z
|
Repositories/Python/Basic/format_map_example.py
|
Dong-gi/Dong-gi.github.io
|
2c3d083db72e06032a1daf528ee9b175219aa554
|
[
"MIT"
] | 25
|
2019-03-28T00:36:04.000Z
|
2021-08-12T01:42:41.000Z
|
Repositories/Python/Basic/format_map_example.py
|
Dong-gi/Dong-gi.github.io
|
2c3d083db72e06032a1daf528ee9b175219aa554
|
[
"MIT"
] | 1
|
2021-11-28T11:28:29.000Z
|
2021-11-28T11:28:29.000Z
|
# 출처: https://docs.python.org/3/library/stdtypes.html#str.format_map
class Default(dict):
def __missing__(self, key):
return key
print('{name} was born in {country}'.format_map(Default(name='Guido')))
# Guido was born in country
| 34.428571
| 71
| 0.709544
| 37
| 241
| 4.459459
| 0.72973
| 0.109091
| 0.109091
| 0.193939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004808
| 0.136929
| 241
| 7
| 72
| 34.428571
| 0.788462
| 0.377593
| 0
| 0
| 0
| 0
| 0.22449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.75
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c91d47946e6f381bf90096bbdc36d6f639a38696
| 40
|
py
|
Python
|
tests/data/nested_gitignore_tests/root/child/b.py
|
BigNuoLi/black
|
71e71e5f52e5f6bdeae63cc8c11b1bee44d11c30
|
[
"MIT"
] | 16,110
|
2019-07-22T21:54:54.000Z
|
2022-03-31T22:52:39.000Z
|
tests/data/nested_gitignore_tests/root/child/b.py
|
marnixah/black-but-usable
|
83b83d3066d1d857983bfa1a666a409e7255d79d
|
[
"MIT"
] | 1,981
|
2019-07-22T21:26:16.000Z
|
2022-03-31T23:14:35.000Z
|
tests/data/nested_gitignore_tests/root/child/b.py
|
marnixah/black-but-usable
|
83b83d3066d1d857983bfa1a666a409e7255d79d
|
[
"MIT"
] | 1,762
|
2019-07-22T21:23:00.000Z
|
2022-03-31T06:10:22.000Z
|
# should be excluded (child/.gitignore)
| 20
| 39
| 0.75
| 5
| 40
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 40
| 1
| 40
| 40
| 0.857143
| 0.925
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c94009b096978f5e41e55cb6cf4b13dfe214aee4
| 54
|
py
|
Python
|
crosswalk/crosswalk/envs/__init__.py
|
ArayCHN/Pedestrian-navigation
|
1cd9a6400bbc39200e6b27a2cc7e802418697ef7
|
[
"MIT"
] | null | null | null |
crosswalk/crosswalk/envs/__init__.py
|
ArayCHN/Pedestrian-navigation
|
1cd9a6400bbc39200e6b27a2cc7e802418697ef7
|
[
"MIT"
] | null | null | null |
crosswalk/crosswalk/envs/__init__.py
|
ArayCHN/Pedestrian-navigation
|
1cd9a6400bbc39200e6b27a2cc7e802418697ef7
|
[
"MIT"
] | null | null | null |
from crosswalk.envs.crosswalk_env import CrosswalkEnv
| 27
| 53
| 0.888889
| 7
| 54
| 6.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 54
| 1
| 54
| 54
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c964510f2a2d11a61bee9dc4b8ebf5830ce6ba64
| 268
|
py
|
Python
|
stores/admin.py
|
ashishkr619/dukaan_main
|
b236b498b95f62160959b5e84bb642a0be6063b0
|
[
"MIT"
] | null | null | null |
stores/admin.py
|
ashishkr619/dukaan_main
|
b236b498b95f62160959b5e84bb642a0be6063b0
|
[
"MIT"
] | null | null | null |
stores/admin.py
|
ashishkr619/dukaan_main
|
b236b498b95f62160959b5e84bb642a0be6063b0
|
[
"MIT"
] | null | null | null |
# from django.contrib import admin
from django.contrib import admin
from .models import Store
@admin.register(Store)
class StoreAdmin(admin.ModelAdmin):
list_display = ['id', 'store_link', 'name', 'address', ]
list_filter = ['name', 'address', 'updated_at']
| 26.8
| 60
| 0.716418
| 34
| 268
| 5.529412
| 0.588235
| 0.106383
| 0.180851
| 0.244681
| 0.319149
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141791
| 268
| 9
| 61
| 29.777778
| 0.817391
| 0.119403
| 0
| 0
| 0
| 0
| 0.188034
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c96e9091d9cf9eeaa9787437678eda7f858c26b2
| 64
|
py
|
Python
|
aiohttp_apispec_plugin/__init__.py
|
ckkz-it/aiohttp-apispec-plugin
|
41aedb69426c4292a59036bcac14660f31810c1b
|
[
"MIT"
] | 2
|
2021-01-06T08:21:16.000Z
|
2021-02-11T09:04:32.000Z
|
aiohttp_apispec_plugin/__init__.py
|
ckkz-it/aiohttp-apispec-plugin
|
41aedb69426c4292a59036bcac14660f31810c1b
|
[
"MIT"
] | null | null | null |
aiohttp_apispec_plugin/__init__.py
|
ckkz-it/aiohttp-apispec-plugin
|
41aedb69426c4292a59036bcac14660f31810c1b
|
[
"MIT"
] | null | null | null |
from aiohttp_apispec_plugin.aiohttp_plugin import AioHttpPlugin
| 32
| 63
| 0.921875
| 8
| 64
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 64
| 1
| 64
| 64
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a317e281de71adaecf0557b261fab45211bc24cd
| 61
|
py
|
Python
|
white_matter/wm_recipe/layer_profiles/__init__.py
|
alex4200/Long-range-micro-connectome
|
833aad78bc71e49a5059b276e65d3fef21686f9d
|
[
"BSD-3-Clause"
] | 9
|
2019-05-01T13:12:17.000Z
|
2021-11-23T10:34:56.000Z
|
white_matter/wm_recipe/layer_profiles/__init__.py
|
alex4200/Long-range-micro-connectome
|
833aad78bc71e49a5059b276e65d3fef21686f9d
|
[
"BSD-3-Clause"
] | 2
|
2022-02-03T13:56:22.000Z
|
2022-02-04T07:16:37.000Z
|
white_matter/wm_recipe/layer_profiles/__init__.py
|
alex4200/Long-range-micro-connectome
|
833aad78bc71e49a5059b276e65d3fef21686f9d
|
[
"BSD-3-Clause"
] | 1
|
2022-02-03T12:05:12.000Z
|
2022-02-03T12:05:12.000Z
|
from .layer_profile_mixer import ProfileMixer, LayerProfiles
| 30.5
| 60
| 0.885246
| 7
| 61
| 7.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081967
| 61
| 1
| 61
| 61
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.