hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c88164d568f7019355c463f53c568e4155639ba8
| 68
|
py
|
Python
|
mmdet2trt/models/necks/__init__.py
|
jackweiwang/mmdetection-to-tensorrt
|
f988ba8e923764fb1173385a1c7160b8f8b5bd99
|
[
"Apache-2.0"
] | 1
|
2021-08-23T10:09:37.000Z
|
2021-08-23T10:09:37.000Z
|
mmdet2trt/models/necks/__init__.py
|
gcong18/mmdetection-to-tensorrt
|
c31c32ee4720ff56010bcda77bacf3a110d0526c
|
[
"Apache-2.0"
] | null | null | null |
mmdet2trt/models/necks/__init__.py
|
gcong18/mmdetection-to-tensorrt
|
c31c32ee4720ff56010bcda77bacf3a110d0526c
|
[
"Apache-2.0"
] | null | null | null |
from .base_neck import BaseNeckWraper
from .hrfpn import HRFPNWraper
| 34
| 37
| 0.867647
| 9
| 68
| 6.444444
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 68
| 2
| 38
| 34
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c8911a2e28cd5ddc171401a1c873e2bbffa32ae3
| 48
|
py
|
Python
|
src/FlaUILibrary/flaui/__init__.py
|
GDATASoftwareAG/robotframework-flaui
|
81b654be0c4bdd7a176c7186eff2757f7e5da72a
|
[
"MIT"
] | 22
|
2020-05-04T14:40:53.000Z
|
2022-03-24T05:57:00.000Z
|
src/FlaUILibrary/flaui/__init__.py
|
Fxztam/robotframework-flaui
|
81b654be0c4bdd7a176c7186eff2757f7e5da72a
|
[
"MIT"
] | 33
|
2020-07-27T12:47:33.000Z
|
2022-03-29T11:16:30.000Z
|
src/FlaUILibrary/flaui/__init__.py
|
Fxztam/robotframework-flaui
|
81b654be0c4bdd7a176c7186eff2757f7e5da72a
|
[
"MIT"
] | 4
|
2020-07-23T09:50:25.000Z
|
2022-01-18T10:32:55.000Z
|
from .uia2 import UIA2
from .uia3 import UIA3
| 16
| 23
| 0.75
| 8
| 48
| 4.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.208333
| 48
| 2
| 24
| 24
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c8a3d29c519f48926ab735082b04e57bb4a6fb31
| 342
|
py
|
Python
|
tempo-api/src/app/tempo/utils/base_controller.py
|
cuappdev/archives
|
061d0f9cccf278363ffaeb27fc655743b1052ae5
|
[
"MIT"
] | null | null | null |
tempo-api/src/app/tempo/utils/base_controller.py
|
cuappdev/archives
|
061d0f9cccf278363ffaeb27fc655743b1052ae5
|
[
"MIT"
] | 7
|
2017-10-28T22:45:19.000Z
|
2018-09-30T07:04:33.000Z
|
appdev/controllers/base_controller.py
|
cuappdev/appdev.py
|
fffe58a01ba5bc00493213e2baa00a6a9e393280
|
[
"MIT"
] | null | null | null |
import abc
class BaseController:
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_path(self): # URI-path that begins and ends with a '/'
return ''
@abc.abstractmethod
def get_methods(self): # List of different HTTP methods supported
return []
@abc.abstractmethod
def response(self, **kwargs):
return None
| 20.117647
| 67
| 0.707602
| 43
| 342
| 5.488372
| 0.651163
| 0.216102
| 0.254237
| 0.194915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201754
| 342
| 16
| 68
| 21.375
| 0.864469
| 0.236842
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.083333
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
c8ea121bd87b24e2dc31d2ea3e6ef715e04887a7
| 144
|
py
|
Python
|
main/check-strict-superset/check-strict-superset.py
|
EliahKagan/old-practice-snapshot
|
1b53897eac6902f8d867c8f154ce2a489abb8133
|
[
"0BSD"
] | null | null | null |
main/check-strict-superset/check-strict-superset.py
|
EliahKagan/old-practice-snapshot
|
1b53897eac6902f8d867c8f154ce2a489abb8133
|
[
"0BSD"
] | null | null | null |
main/check-strict-superset/check-strict-superset.py
|
EliahKagan/old-practice-snapshot
|
1b53897eac6902f8d867c8f154ce2a489abb8133
|
[
"0BSD"
] | null | null | null |
def input_set():
return set(map(int, raw_input().split()))
s = input_set()
print all(s > input_set() for i in xrange(0, int(raw_input())))
| 24
| 63
| 0.652778
| 26
| 144
| 3.423077
| 0.615385
| 0.269663
| 0.247191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008197
| 0.152778
| 144
| 5
| 64
| 28.8
| 0.721311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c8f9272dbde31a4ad914ff56f645c15f2e56b4f5
| 185
|
py
|
Python
|
pfioh/__init__.py
|
FNNDSC/pfioh-flask
|
64240cced4d7a3e399814acd8246cf7bf42a4074
|
[
"MIT"
] | null | null | null |
pfioh/__init__.py
|
FNNDSC/pfioh-flask
|
64240cced4d7a3e399814acd8246cf7bf42a4074
|
[
"MIT"
] | null | null | null |
pfioh/__init__.py
|
FNNDSC/pfioh-flask
|
64240cced4d7a3e399814acd8246cf7bf42a4074
|
[
"MIT"
] | null | null | null |
from .pfioh import StoreHandler, HandleRequests, base64_process, zip_process, zipdir, SetupFlaskServer
from .swift_store import SwiftStore
from .mount_dir import MountDir
| 46.25
| 110
| 0.789189
| 21
| 185
| 6.761905
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012987
| 0.167568
| 185
| 3
| 111
| 61.666667
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cdb0abc5ad538e33d412ee44f6c0738ff3d4a00a
| 8,428
|
py
|
Python
|
app/onlib/views.py
|
gladunvv/library-api
|
5975bd6cd86e468354c8b05daa1f82c7a02350a2
|
[
"MIT"
] | null | null | null |
app/onlib/views.py
|
gladunvv/library-api
|
5975bd6cd86e468354c8b05daa1f82c7a02350a2
|
[
"MIT"
] | 7
|
2020-06-06T00:26:17.000Z
|
2022-02-10T12:17:47.000Z
|
app/onlib/views.py
|
gladunvv/library-api
|
5975bd6cd86e468354c8b05daa1f82c7a02350a2
|
[
"MIT"
] | null | null | null |
from django.shortcuts import get_object_or_404
from rest_framework import permissions
from rest_framework import generics
from rest_framework import filters
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from django_filters import rest_framework as filter_rest
from onlib.models import Book, Genre, Author
from onlib.serializers import (
BookSerializer,
GenreSerializer,
FullAuthorSerializer,
FullBookSerializer,
)
SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS']
UNSAFE_METHODS = ['POST', 'PUT', 'PATCH', 'DELETE']
class CustomPermissions(permissions.BasePermission):
def has_permission(self, request, view):
if (request.method in SAFE_METHODS and
request.user and
request.user.is_authenticated):
return True
elif (request.method in UNSAFE_METHODS and
request.user and
request.user.is_staff):
return True
return False
class SearchBooksView(generics.ListAPIView):
permission_classes = (permissions.AllowAny,)
filter_backends = (filters.SearchFilter,)
search_fields = ('title',)
queryset = Book.objects.all()
serializer_class = BookSerializer
class FilterBooksView(generics.ListAPIView):
permission_classes = (permissions.IsAuthenticated,)
filter_backends = (filter_rest.DjangoFilterBackend,)
filterset_fields = ('genre', 'author')
queryset = Book.objects.all()
serializer_class = BookSerializer
class GenreView(APIView):
permission_classes = (CustomPermissions,)
def get(self, request, *args, **kwargs):
genres = Genre.objects.all()
serializer = GenreSerializer(genres, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request, *args, **kwargs):
data = request.data
serializer = GenreSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def put(self, request, *args, **kwargs):
genre = request.GET.get('genre', None)
if not genre:
return Response({'errors': 'No genre provided'}, status=status.HTTP_400_BAD_REQUEST)
try:
item = get_object_or_404(Genre, pk=genre)
except (TypeError, ValueError):
return Response({'errors': 'Argument must be int'}, status=status.HTTP_400_BAD_REQUEST)
data = request.data
serializer = GenreSerializer(item, data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, *args, **kwargs):
genre = request.GET.get('genre', None)
if not genre:
return Response({'errors': 'No genre provided'}, status=status.HTTP_400_BAD_REQUEST)
try:
item = get_object_or_404(Genre, pk=genre)
except (TypeError, ValueError):
return Response({'errors': 'Argument must be int'}, status=status.HTTP_400_BAD_REQUEST)
if item.books_genre.all():
message = {
'message': 'The {title} genre has books'.format(
title=item.title,
),
}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
item.delete()
message = {
'message': 'Genre {title} deleted successfully'.format(
title=item.title,
),
}
return Response(message, status=status.HTTP_200_OK)
class AuthorView(APIView):
permission_classes = (CustomPermissions,)
def get(self, request, *args, **kwargs):
authors = Author.objects.all()
serializer = FullAuthorSerializer(authors, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request, *args, **kwargs):
data = request.data
serializer = FullAuthorSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def put(self, request, *args, **kwargs):
author = request.GET.get('author', None)
if not author:
return Response({'errors': 'No author provided'}, status=status.HTTP_400_BAD_REQUEST)
try:
item = get_object_or_404(Author, pk=author)
except (TypeError, ValueError):
return Response({'errors': 'Argument must be int'}, status=status.HTTP_400_BAD_REQUEST)
data = request.data
serializer = FullAuthorSerializer(item, data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, *args, **kwargs):
author = request.GET.get('author', None)
if not author:
return Response({'errors': 'No author provided'}, status=status.HTTP_400_BAD_REQUEST)
try:
item = get_object_or_404(Author, pk=author)
except (TypeError, ValueError):
return Response({'errors': 'Argument must be int'}, status=status.HTTP_400_BAD_REQUEST)
if item.books_author.all():
message = {
'message': 'The {author_name} author has books'.format(
author_name=item.last_name,
),
}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
item.delete()
message = {
'message': 'Author {author_name} deleted successfully'.format(
author_name=item.last_name,
),
}
return Response(message, status=status.HTTP_200_OK)
class BookView(APIView):
permission_classes = (CustomPermissions,)
def get(self, request, *args, **kwargs):
books = Book.objects.all()
serializer = FullBookSerializer(books, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def post(self, request, *args, **kwargs):
data = request.data
author = data['author_id']
genre = data['genre_id']
serializer = FullBookSerializer(data=data)
if serializer.is_valid():
serializer.save(author_id=author, genre_id=genre)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def put(self, request, *args, **kwargs):
book = request.GET.get('book', None)
if not book:
return Response({'errors': 'No book provided'}, status=status.HTTP_400_BAD_REQUEST)
try:
item = get_object_or_404(Book, pk=book)
except (TypeError, ValueError):
return Response({'errors': 'Argument must be int'}, status=status.HTTP_400_BAD_REQUEST)
data = request.data
author = data['author_id']
genre = data['genre_id']
serializer = FullBookSerializer(item, data=data)
if serializer.is_valid():
serializer.save(author_id=author, genre_id=genre)
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, *args, **kwargs):
book = request.GET.get('book', None)
if not book:
return Response({'errors': 'No book provided'}, status=status.HTTP_400_BAD_REQUEST)
try:
item = get_object_or_404(Book, pk=book)
except (TypeError, ValueError):
return Response({'errors': 'Argument must be int'}, status=status.HTTP_400_BAD_REQUEST)
item.delete()
message = {
'message': 'Book {title} deleted successfully'.format(
title=item.title,
),
}
return Response(message, status=status.HTTP_200_OK)
| 37.457778
| 99
| 0.638111
| 930
| 8,428
| 5.616129
| 0.127957
| 0.085774
| 0.098028
| 0.072755
| 0.780203
| 0.748803
| 0.748803
| 0.748803
| 0.713192
| 0.709554
| 0
| 0.018744
| 0.259374
| 8,428
| 224
| 100
| 37.625
| 0.818007
| 0
| 0
| 0.689474
| 0
| 0
| 0.072378
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068421
| false
| 0
| 0.052632
| 0
| 0.405263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cdcc34041ba0a1e5c326ac634efb4b9fdc8defc0
| 115
|
py
|
Python
|
earth/functions.py
|
Stuff-for-CodeCool/basic-flask-blueprint-example
|
9e4c51f07296d856810e476d1e15d06d72e8f1cb
|
[
"MIT"
] | null | null | null |
earth/functions.py
|
Stuff-for-CodeCool/basic-flask-blueprint-example
|
9e4c51f07296d856810e476d1e15d06d72e8f1cb
|
[
"MIT"
] | null | null | null |
earth/functions.py
|
Stuff-for-CodeCool/basic-flask-blueprint-example
|
9e4c51f07296d856810e476d1e15d06d72e8f1cb
|
[
"MIT"
] | null | null | null |
from database import execute_query
def crack_stone():
execute_query("", {})
return "the stone has cracked"
| 23
| 34
| 0.713043
| 15
| 115
| 5.266667
| 0.8
| 0.303797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182609
| 115
| 5
| 35
| 23
| 0.840426
| 0
| 0
| 0
| 0
| 0
| 0.181034
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a8011728894f6515beef010cc4c7fe0f46746f6e
| 12
|
py
|
Python
|
tfcone/util/numerical.py
|
ma0ho/Deep-Learning-Cone-Beam-CT
|
9a4fb82f15672ba46249d947df987de5b1e57c2f
|
[
"Apache-2.0"
] | 24
|
2018-06-17T06:41:22.000Z
|
2022-03-30T06:02:01.000Z
|
tfcone/util/numerical.py
|
SShiYuU/Deep-Learning-Cone-Beam-CT
|
223a24d3db2edbf681594782a8c0e664c0510ac5
|
[
"Apache-2.0"
] | 2
|
2018-10-26T18:57:42.000Z
|
2019-03-28T13:57:23.000Z
|
tfcone/util/numerical.py
|
SShiYuU/Deep-Learning-Cone-Beam-CT
|
223a24d3db2edbf681594782a8c0e664c0510ac5
|
[
"Apache-2.0"
] | 7
|
2018-09-06T09:06:12.000Z
|
2020-10-21T08:47:00.000Z
|
eps = 10e-7
| 6
| 11
| 0.583333
| 3
| 12
| 2.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.25
| 12
| 1
| 12
| 12
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b570d7d337c55170486d86b9f26bee0a43c1af00
| 21,703
|
py
|
Python
|
infra/services/gnumbd/test/gnumbd_test_definitions.py
|
eunchong/infra
|
ce3728559112bfb3e8b32137eada517aec6d22f9
|
[
"BSD-3-Clause"
] | null | null | null |
infra/services/gnumbd/test/gnumbd_test_definitions.py
|
eunchong/infra
|
ce3728559112bfb3e8b32137eada517aec6d22f9
|
[
"BSD-3-Clause"
] | null | null | null |
infra/services/gnumbd/test/gnumbd_test_definitions.py
|
eunchong/infra
|
ce3728559112bfb3e8b32137eada517aec6d22f9
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
from infra.services.gnumbd import gnumbd
content_of = gnumbd.content_of
REAL = 'refs/heads/master'
PEND = 'refs/pending/heads/master'
PEND_TAG = 'refs/pending-tags/heads/master'
BRANCH = 'refs/branch-heads/cool_branch'
BRANCH_PEND = 'refs/pending/branch-heads/cool_branch'
BRANCH_TAG = 'refs/pending-tags/branch-heads/cool_branch'
GNUMBD_TESTS = {}
def gnumbd_test(f):
GNUMBD_TESTS[f.__name__] = f
return f
def svn_footers(num):
return {
gnumbd.GIT_SVN_ID: [
'svn://repo/path@%s 0039d316-1c4b-4281-b951-d872f2087c98' % num]
}
def gnumbd_footers(ref, num):
return {
gnumbd.COMMIT_POSITION: [gnumbd.FMT_COMMIT_POSITION(ref, num)]
}
# Error cases
@gnumbd_test
def no_real_ref(origin, _local, _config_ref, RUN, CHECKPOINT):
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('One commit in origin')
RUN()
CHECKPOINT('Origin should not have changed')
@gnumbd_test
def no_pending_tag(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
origin[PEND].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Two commits in origin')
RUN()
CHECKPOINT('Origin should not have changed')
@gnumbd_test
def bad_position_footer(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers={gnumbd.COMMIT_POSITION: ['BlobbyGumpus!']})
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Bad master commit footer')
RUN()
CHECKPOINT('Should be the same')
assert origin[REAL].commit == base_commit
@gnumbd_test
def bad_svn_footer(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers={gnumbd.GIT_SVN_ID: ['BlobbyGumpus!']})
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Bad master commit footer')
RUN()
CHECKPOINT('Should be the same')
assert origin[REAL].commit == base_commit
@gnumbd_test
def no_position_footer(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers={'Sup': ['Not a footer']})
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Master has no position footer')
RUN()
CHECKPOINT('Should be the same')
assert origin[REAL].commit == base_commit
@gnumbd_test
def merge_commits_fail(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
o_commit = origin['refs/heads/other'].make_full_tree_commit(
'Incoming merge!', footers=gnumbd_footers(origin['refs/heads/other'], 20))
m_commit = base_commit.alter(
parents=(base_commit.hsh, o_commit.hsh),
message_lines=['Two for one!'],
footers={k: None for k in base_commit.data.footers}
)
origin[PEND].fast_forward(m_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('The setup.')
RUN()
CHECKPOINT('Should be the same')
@gnumbd_test
def manual_merge_commits_ok(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
o_commit = origin['refs/heads/other'].make_full_tree_commit(
'Incoming merge!', footers=gnumbd_footers(origin['refs/heads/other'], 20))
footers = {k: None for k in base_commit.data.footers}
footers[gnumbd.COMMIT_POSITION] = ['refs/heads/master@{#101}']
m_commit = base_commit.alter(
parents=(base_commit.hsh, o_commit.hsh),
message_lines=['Two for one!'],
footers=footers
)
origin[REAL].fast_forward(m_commit)
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(m_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('The setup.')
RUN()
CHECKPOINT('Hello world landed w/o a hitch')
@gnumbd_test
def no_number_on_parent(origin, local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit('Base without number')
user_commit = origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('One commit in origin')
RUN()
CHECKPOINT('Should still only have 1 commit')
assert local[PEND].commit == user_commit
assert local[REAL].commit == base_commit
# Normal cases
@gnumbd_test
def incoming_svn_id_drops(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=svn_footers(100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Two commits in origin')
RUN()
CHECKPOINT('Hello world should be 101')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
# pending > master == tag
@gnumbd_test
def normal_update(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Two commits')
RUN()
CHECKPOINT('Hello world should be 101')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
# master == pending == tag
@gnumbd_test
def steady_state(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
CHECKPOINT('Hello world should be 101')
RUN()
CHECKPOINT('Hello world should still be 101')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
# master == pending > tag
@gnumbd_test
def tag_lagging_no_actual(origin, _local, _config_ref, RUN, CHECKPOINT):
origin[REAL].make_full_tree_commit(
'Root commit', footers=gnumbd_footers(origin[REAL], 99))
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
origin[PEND_TAG].update_to(origin[PEND_TAG].commit.parent.parent)
CHECKPOINT('Tag on root (2 behind pend)')
RUN()
CHECKPOINT('Tag caught up')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
assert origin[PEND_TAG].commit == origin[PEND].commit
# pending > master > tag
@gnumbd_test
def tag_lagging(origin, _local, _config_ref, RUN, CHECKPOINT):
origin[REAL].make_full_tree_commit(
'Root commit', footers=gnumbd_footers(origin[REAL], 99))
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
landed_commit = origin[REAL].commit
origin[PEND_TAG].update_to(origin[PEND_TAG].commit.parent.parent)
user_commit = origin[PEND].make_full_tree_commit('New commit')
CHECKPOINT('Tag on root (3 behind pend). Real 1 behind pend')
RUN()
CHECKPOINT('Tag + pending caught up')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == landed_commit
assert origin[PEND_TAG].commit == origin[PEND].commit
@gnumbd_test
def multi_pending(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit1 = origin[PEND].make_full_tree_commit('Hello world')
user_commit2 = origin[PEND].make_full_tree_commit('Cat food')
CHECKPOINT('Two pending commits')
RUN()
CHECKPOINT('And now they\'re on master')
assert content_of(origin[REAL].commit.parent) == content_of(user_commit1)
assert content_of(origin[REAL].commit) == content_of(user_commit2)
assert origin[REAL].commit.parent.parent == base_commit
# Inconsistency
# tag > pending
# Implicitly covers:
# * master > tag > pending
# * tag > pending > master
# * tag > master > pending
@gnumbd_test
def master_tag_ahead_pending(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
origin[PEND].update_to(base_commit)
CHECKPOINT('Master and tag ahead of pending')
RUN()
CHECKPOINT('Should see errors and no change')
# tag == pending > master
@gnumbd_test
def tag_ahead_master_pending(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
origin[REAL].update_to(base_commit)
CHECKPOINT('Tag+pending ahead of master')
RUN()
CHECKPOINT('Should see errors and no change')
# pending > tag > master
@gnumbd_test
def normal_with_master_lag(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
# master moves back
origin[REAL].update_to(base_commit)
# pending gets a new commit
origin[PEND].make_full_tree_commit('New pending')
CHECKPOINT('Master is behind, pending is ahead of tag')
RUN()
CHECKPOINT('Should see errors and no change')
# fix by rewinding tag
origin[PEND_TAG].update_to(origin[PEND_TAG].commit.parent)
CHECKPOINT('Fix by rewinding tag')
RUN()
CHECKPOINT('All better')
@gnumbd_test
def master_ahead_tag_ahead_pending(origin, _local, _config_ref, RUN,
CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[REAL].make_full_tree_commit('Directly landed commit!')
origin[PEND_TAG].make_full_tree_commit('Tag ahead of pending')
CHECKPOINT('Master and tag have diverged, pend lags')
RUN()
CHECKPOINT('Should have errored and nothing changed')
# master > pending == tag
@gnumbd_test
def master_ahead(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
base_commit = origin[REAL].make_full_tree_commit('Directly landed commit!')
CHECKPOINT('Master contains a commit whose content isn\'t in pending')
RUN()
CHECKPOINT('Should have errored and nothing changed')
assert origin[REAL].commit == base_commit
# pending == tag > master
@gnumbd_test
def master_behind(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit('Hello world')
origin[PEND_TAG].fast_forward(user_commit)
CHECKPOINT('Master should have new commit but does not')
RUN()
CHECKPOINT('Error and no change')
# master > pending > tag
@gnumbd_test
def master_mismatch_and_pend(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
base_commit = origin[REAL].make_full_tree_commit('Directly landed commit!')
CHECKPOINT('Master contains a commit whose content isn\'t in pending')
RUN()
CHECKPOINT('Should have errored and nothing changed')
assert origin[REAL].commit == base_commit
# Branching
@gnumbd_test
def branch(origin, _local, config_ref, RUN, CHECKPOINT):
new_globs = config_ref['enabled_refglobs'] + ['refs/branch-heads/*']
config_ref.update(enabled_refglobs=new_globs)
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
CHECKPOINT('Pending commit', include_config=True)
RUN()
CHECKPOINT('And now it\'s on master', include_config=True)
# Build a new branch
for ref in (BRANCH, BRANCH_TAG, BRANCH_PEND):
origin[ref].fast_forward(origin[REAL].commit)
origin[BRANCH_PEND].make_full_tree_commit('Branch commit!')
CHECKPOINT('New branch with pending', include_config=True)
RUN()
CHECKPOINT('Pending commit now on branch', include_config=True)
origin[BRANCH_PEND].make_full_tree_commit('Another branch commit')
CHECKPOINT('New pending commit for branch', include_config=True)
RUN()
CHECKPOINT('Second pending commit now on branch', include_config=True)
assert origin[BRANCH].commit.data.footers[gnumbd.BRANCHED_FROM] == (
'%s-%s' % (
origin[REAL].commit.hsh,
origin[REAL].commit.data.footers[gnumbd.COMMIT_POSITION][0]
),
)
@gnumbd_test
def branch_from_branch(origin, _local, config_ref, RUN, CHECKPOINT):
new_globs = config_ref['enabled_refglobs'] + ['refs/branch-heads/*']
config_ref.update(enabled_refglobs=new_globs)
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit('Hello world')
RUN(include_log=False)
# Build a new branch
for ref in (BRANCH, BRANCH_TAG, BRANCH_PEND):
origin[ref].fast_forward(origin[REAL].commit)
origin[BRANCH_PEND].make_full_tree_commit('Branch commit!')
RUN(include_log=False)
CHECKPOINT('Branch 1 in place', include_config=True)
yo_branch = BRANCH+'_yo'
yo_branch_tag = BRANCH_TAG+'_yo'
yo_branch_pend = BRANCH_PEND+'_yo'
for ref in (yo_branch, yo_branch_tag, yo_branch_pend):
origin[ref].fast_forward(origin[BRANCH].commit)
origin[yo_branch_pend].make_full_tree_commit('Super branchey commit')
CHECKPOINT('New pending commit for branch', include_config=True)
RUN()
CHECKPOINT('Second pending commit now on branch', include_config=True)
assert origin[yo_branch].commit.data.footers[gnumbd.BRANCHED_FROM] == (
'%s-%s' % (
origin[BRANCH].commit.hsh,
origin[BRANCH].commit.data.footers[gnumbd.COMMIT_POSITION][0]
),
'%s-%s' % (
origin[REAL].commit.hsh,
origin[REAL].commit.data.footers[gnumbd.COMMIT_POSITION][0]
),
)
# Extra footers
@gnumbd_test
def extra_user_footer(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit(
'Hello world', footers=collections.OrderedDict([
('Change-Id', ['Icafebabe1cec6eadfeba']),
('Reviewed-by', [
'Cool Dudette 64 <cd64@example.com>',
'Epic Sky Troll <est@example.com>',
]),
('Tested-by', ['Lol JK <lol_jk@example.com>'])
]))
CHECKPOINT('The setup...')
RUN()
CHECKPOINT('The new footers should appear after the current ones')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
@gnumbd_test
def extra_user_footer_bad(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit(
'Hello world', footers=collections.OrderedDict([
('Cr-Double-Secret', ['I can impersonate the daemon!']),
('git-svn-id', ['Well... this should never happen'])
]))
CHECKPOINT('Two commits')
RUN()
CHECKPOINT('The bogus footers should be gone')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.data.footers == {
'Cr-Original-Double-Secret': ('I can impersonate the daemon!',),
gnumbd.COMMIT_POSITION: (gnumbd.FMT_COMMIT_POSITION(origin[REAL], 101),),
}
@gnumbd_test
def enforce_commit_timestamps(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=gnumbd_footers(origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
# cheat and rewind the TestClock
origin._clock._time -= 100 # pylint: disable=W0212
user_commit = origin[PEND].make_full_tree_commit('Hello world')
assert (
user_commit.data.committer.timestamp.secs <
base_commit.data.committer.timestamp.secs
)
CHECKPOINT('%r has a timestamp behind %r' % (
user_commit.hsh, base_commit.hsh), include_committer=True)
RUN()
CHECKPOINT('Presto! Timestamp is fixed', include_committer=True)
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
assert (
origin[REAL].commit.data.committer.timestamp.secs >
origin[REAL].commit.parent.data.committer.timestamp.secs
)
# git_svn_mode test.
@gnumbd_test
def svn_mode_uses_svn_rev(origin, _local, config_ref, RUN, CHECKPOINT):
config_ref.update(git_svn_mode=True)
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=svn_footers(100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit(
'Hello world', footers=svn_footers(200))
CHECKPOINT('Two commits in origin')
RUN()
CHECKPOINT('Hello world should be 200')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
@gnumbd_test
def push_extra(origin, _local, config_ref, RUN, CHECKPOINT):
config_ref.update(
git_svn_mode=True,
push_synth_extra={
'refs/heads/master': ['refs/heads/crazy-times']
}
)
base_commit = origin[REAL].make_full_tree_commit(
'Base commit', footers=svn_footers(100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
user_commit = origin[PEND].make_full_tree_commit(
'Hello world', footers=svn_footers(200))
CHECKPOINT('Two commits in origin')
RUN()
CHECKPOINT('Should have crazy-times')
assert content_of(origin[REAL].commit) == content_of(user_commit)
assert origin[REAL].commit.parent == base_commit
@gnumbd_test
def cherry_pick_regression(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Numbered commit', footers=gnumbd_footers(
origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit(
'cherry pick', footers=gnumbd_footers(origin['refs/other/branch'], 200))
origin[PEND].make_full_tree_commit('normal commit')
CHECKPOINT('OK commit with cherrypick (including cr-commit-pos)')
RUN()
CHECKPOINT('Cherry pick\'s number should be overwritten')
@gnumbd_test
def cherry_pick_x_regression(origin, _local, _config_ref, RUN, CHECKPOINT):
base_commit = origin[REAL].make_full_tree_commit(
'Numbered commit', footers=gnumbd_footers(
origin[REAL], 100))
for ref in (PEND, PEND_TAG):
origin[ref].fast_forward(base_commit)
origin[PEND].make_full_tree_commit(
'cherry pick\n'
'\n'
'Cr-Commit-Position: refs/other/branch@{#200}\n'
'(cherry picked from commit 42a2ff8eb8b7167353587c52f4e06e67f87d4b60)'
)
origin[PEND].make_full_tree_commit('normal commit')
CHECKPOINT('OK commit with cherrypick (including cr-commit-pos)')
RUN()
CHECKPOINT('Cherry pick\'s number should be overwritten')
cp = str(origin[REAL].commit.parent.data).rstrip()
cp += '\n(cherry picked from commit %s)' % origin[REAL].commit.parent.hsh
origin[PEND].make_full_tree_commit(cp)
origin[PEND].make_full_tree_commit('another normal commit')
CHECKPOINT('Now we see a double-cherry-pick')
RUN()
CHECKPOINT('Should see two Cr-Original footers')
| 33.134351
| 79
| 0.734
| 3,071
| 21,703
| 4.936177
| 0.087919
| 0.072564
| 0.060162
| 0.090243
| 0.8019
| 0.759153
| 0.732898
| 0.70473
| 0.683554
| 0.675638
| 0
| 0.010581
| 0.1421
| 21,703
| 654
| 80
| 33.185015
| 0.803588
| 0.033221
| 0
| 0.667355
| 0
| 0
| 0.177605
| 0.017087
| 0
| 0
| 0
| 0
| 0.07438
| 1
| 0.068182
| false
| 0
| 0.004132
| 0.004132
| 0.078512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a91a40191cb62c12ce462a446f1aac2af082def6
| 368
|
py
|
Python
|
tests/functional/pages/main.py
|
mikepolo-JQ/inter
|
64f5da344c69e7885f4d76778016e9723c8bc7a6
|
[
"MIT"
] | 1
|
2020-10-18T19:04:48.000Z
|
2020-10-18T19:04:48.000Z
|
tests/functional/pages/main.py
|
mikepolo-JQ/inter
|
64f5da344c69e7885f4d76778016e9723c8bc7a6
|
[
"MIT"
] | 6
|
2021-04-06T18:19:10.000Z
|
2021-09-22T19:44:06.000Z
|
tests/functional/pages/main.py
|
mikepolo-JQ/TMS
|
a043a04740645b0fab672366bc0d944f25e6efff
|
[
"MIT"
] | null | null | null |
from selenium.webdriver.common.by import By
from .abstract import PageElement
from .abstract import PageObject
class MainPage(PageObject):
h1 = PageElement(By.CSS_SELECTOR, "h1")
h2 = PageElement(By.CSS_SELECTOR, "h2")
# p = PageElement(By.CSS_SELECTOR, "p")
# a = PageElement(By.CSS_SELECTOR, "a")
# # nav = PageElement(By.CSS_SELECTOR, "nav")
| 28.307692
| 49
| 0.706522
| 48
| 368
| 5.3125
| 0.375
| 0.254902
| 0.313725
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013029
| 0.165761
| 368
| 12
| 50
| 30.666667
| 0.81759
| 0.320652
| 0
| 0
| 0
| 0
| 0.016327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a9295513371949fd55699402ccc5b25fb41d542c
| 142
|
py
|
Python
|
rest_utils/permissions.py
|
itkpi/events-storage
|
69dfdd11ce7ce28abb0df1d5707db65ecc5ad4f8
|
[
"Apache-2.0"
] | 1
|
2016-01-02T21:18:56.000Z
|
2016-01-02T21:18:56.000Z
|
rest_utils/permissions.py
|
itkpi/events-storage
|
69dfdd11ce7ce28abb0df1d5707db65ecc5ad4f8
|
[
"Apache-2.0"
] | null | null | null |
rest_utils/permissions.py
|
itkpi/events-storage
|
69dfdd11ce7ce28abb0df1d5707db65ecc5ad4f8
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABCMeta, abstractmethod
class Permission(metaclass=ABCMeta):
@abstractmethod
def check(self, request):
pass
| 17.75
| 39
| 0.71831
| 15
| 142
| 6.8
| 0.866667
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211268
| 142
| 7
| 40
| 20.285714
| 0.910714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
a94731fdac4d8403ce8c870ce598f373b8e48bb8
| 228
|
py
|
Python
|
api/core/access_control.py
|
iashraful/survey-api
|
67de151f2938c24db96186117eed60b7909146d2
|
[
"MIT"
] | null | null | null |
api/core/access_control.py
|
iashraful/survey-api
|
67de151f2938c24db96186117eed60b7909146d2
|
[
"MIT"
] | 3
|
2021-09-22T13:55:12.000Z
|
2021-09-28T14:27:34.000Z
|
api/core/access_control.py
|
iashraful/survey-api
|
67de151f2938c24db96186117eed60b7909146d2
|
[
"MIT"
] | 1
|
2021-09-10T06:55:17.000Z
|
2021-09-10T06:55:17.000Z
|
from sqlalchemy.orm import Session
from starlette.requests import Request
class DataAccessQueryModelManager:
@classmethod
def objects(cls, session: Session, request: Request = None):
return session.query(cls)
| 22.8
| 64
| 0.758772
| 25
| 228
| 6.92
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 228
| 9
| 65
| 25.333333
| 0.920213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
a952cccdd2e156f8c00556eeccf7993c5219829f
| 112
|
py
|
Python
|
src/modules/util/exception.py
|
frodo821/WebNovelFormatOfflineConverter
|
9dbc2ffccc7f56752fcbb80d515d4e5e99972632
|
[
"BSD-2-Clause"
] | null | null | null |
src/modules/util/exception.py
|
frodo821/WebNovelFormatOfflineConverter
|
9dbc2ffccc7f56752fcbb80d515d4e5e99972632
|
[
"BSD-2-Clause"
] | null | null | null |
src/modules/util/exception.py
|
frodo821/WebNovelFormatOfflineConverter
|
9dbc2ffccc7f56752fcbb80d515d4e5e99972632
|
[
"BSD-2-Clause"
] | null | null | null |
#-*- coding:utf-8 -*-
class ParseException(Exception):
pass
class ThroughLoop(Exception):
pass
| 14
| 33
| 0.633929
| 11
| 112
| 6.454545
| 0.727273
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.232143
| 112
| 7
| 34
| 16
| 0.813953
| 0.178571
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
a95b208f0b60eb7d7a8acd5c44cb2ba8fdb02d15
| 58
|
py
|
Python
|
Verkefni 1/RemoveEmptyStrings.py
|
Kallehz/Python
|
7bc08d4ebed6f97d6cfa8efc585a566388eeec8f
|
[
"Apache-2.0"
] | 1
|
2019-12-14T04:34:50.000Z
|
2019-12-14T04:34:50.000Z
|
Verkefni 1/RemoveEmptyStrings.py
|
Kallehz/Python
|
7bc08d4ebed6f97d6cfa8efc585a566388eeec8f
|
[
"Apache-2.0"
] | null | null | null |
Verkefni 1/RemoveEmptyStrings.py
|
Kallehz/Python
|
7bc08d4ebed6f97d6cfa8efc585a566388eeec8f
|
[
"Apache-2.0"
] | null | null | null |
def remove_empty(l):
return [i for i in l if i != '']
| 19.333333
| 36
| 0.568966
| 12
| 58
| 2.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.275862
| 58
| 2
| 37
| 29
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
a961a8bafa73f436a1c5f08b2435bbde1854e3d5
| 179
|
py
|
Python
|
modules/logging/logmanager.py
|
predragf/symc
|
f1ed239af7591bcf32648b4031708e74ce88fe2c
|
[
"MIT"
] | 1
|
2022-03-18T05:50:56.000Z
|
2022-03-18T05:50:56.000Z
|
modules/logging/logmanager.py
|
predragf/symc
|
f1ed239af7591bcf32648b4031708e74ce88fe2c
|
[
"MIT"
] | null | null | null |
modules/logging/logmanager.py
|
predragf/symc
|
f1ed239af7591bcf32648b4031708e74ce88fe2c
|
[
"MIT"
] | null | null | null |
class LogManager:
def __init__():
pass
def getLogger(loggerName):
logger = log.setup_custom_logger(loggerName)
logger = logging.getLogger('root')
| 22.375
| 52
| 0.648045
| 18
| 179
| 6.111111
| 0.722222
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.256983
| 179
| 7
| 53
| 25.571429
| 0.827068
| 0
| 0
| 0
| 0
| 0
| 0.022346
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
a985fb0858d4a3370c7d66281c80a9922d7effe1
| 114
|
py
|
Python
|
kaishi/image/labelers/__init__.py
|
kungfuai/kaishi
|
e20360170ccac2111cab61fcd71b81be3c2a7468
|
[
"MIT"
] | 10
|
2020-04-01T16:46:25.000Z
|
2021-02-09T15:56:42.000Z
|
kaishi/image/labelers/__init__.py
|
kungfuai/kaishi
|
e20360170ccac2111cab61fcd71b81be3c2a7468
|
[
"MIT"
] | 14
|
2020-03-23T13:32:35.000Z
|
2021-12-07T19:30:23.000Z
|
kaishi/image/labelers/__init__.py
|
kungfuai/kaishi
|
e20360170ccac2111cab61fcd71b81be3c2a7468
|
[
"MIT"
] | 2
|
2020-08-14T07:23:06.000Z
|
2021-12-06T18:20:42.000Z
|
"""Pipeline components that label data points based on user-defined criteria, specifically for image datasets."""
| 57
| 113
| 0.798246
| 15
| 114
| 6.066667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 114
| 1
| 114
| 114
| 0.91
| 0.938596
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8d37a14eddf17b2abe0f2759fd9c12466c578452
| 112
|
py
|
Python
|
pyclustering/tests/__main__.py
|
JosephChataignon/pyclustering
|
bf4f51a472622292627ec8c294eb205585e50f52
|
[
"BSD-3-Clause"
] | 1,013
|
2015-01-26T19:50:14.000Z
|
2022-03-31T07:38:48.000Z
|
pyclustering/tests/__main__.py
|
peterlau0626/pyclustering
|
bf4f51a472622292627ec8c294eb205585e50f52
|
[
"BSD-3-Clause"
] | 542
|
2015-01-20T16:44:32.000Z
|
2022-01-29T14:57:20.000Z
|
pyclustering/tests/__main__.py
|
peterlau0626/pyclustering
|
bf4f51a472622292627ec8c294eb205585e50f52
|
[
"BSD-3-Clause"
] | 262
|
2015-03-19T07:28:12.000Z
|
2022-03-30T07:28:24.000Z
|
from pyclustering.tests.tests_runner import tests_runner
if __name__ == "__main__":
tests_runner.run()
| 22.4
| 57
| 0.75
| 14
| 112
| 5.214286
| 0.642857
| 0.452055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 112
| 4
| 58
| 28
| 0.776596
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8d489fe0535e0ba3d683ef881e7bd7a94e3c698d
| 73
|
py
|
Python
|
plotutils/axpositioning/__init__.py
|
johndamen/plotutils
|
60bc043314c823bbcb27740b22393f0fd2b58476
|
[
"MIT"
] | null | null | null |
plotutils/axpositioning/__init__.py
|
johndamen/plotutils
|
60bc043314c823bbcb27740b22393f0fd2b58476
|
[
"MIT"
] | null | null | null |
plotutils/axpositioning/__init__.py
|
johndamen/plotutils
|
60bc043314c823bbcb27740b22393f0fd2b58476
|
[
"MIT"
] | null | null | null |
from .axpositioning import PositioningAxes
from .gui import adjust_axes
| 18.25
| 42
| 0.849315
| 9
| 73
| 6.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 73
| 3
| 43
| 24.333333
| 0.953125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8d6251e8213ca79e67b340346adb4097ecf73ee3
| 88
|
py
|
Python
|
obqa/models/__init__.py
|
dianags/OpenBookQA
|
186dcd4d90633f13706966925995ff0f135ffd11
|
[
"Apache-2.0"
] | 66
|
2018-10-23T03:55:03.000Z
|
2021-07-29T07:38:17.000Z
|
obqa/models/__init__.py
|
dianags/OpenBookQA
|
186dcd4d90633f13706966925995ff0f135ffd11
|
[
"Apache-2.0"
] | 4
|
2019-02-19T05:46:16.000Z
|
2021-06-04T01:17:38.000Z
|
obqa/models/__init__.py
|
dianags/OpenBookQA
|
186dcd4d90633f13706966925995ff0f135ffd11
|
[
"Apache-2.0"
] | 23
|
2018-10-23T22:30:11.000Z
|
2022-03-11T00:35:48.000Z
|
from obqa.models.entailment.stacked_nn_aggregate_custom import StackedNNAggregateCustom
| 44
| 87
| 0.920455
| 10
| 88
| 7.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 88
| 1
| 88
| 88
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8d68edc2a267dabfef9dd38c07f3f82fb103e73e
| 323
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowRunningConfigNtp/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowRunningConfigNtp/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowRunningConfigNtp/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'vrf': {
'default': {
'address': {
'10.4.1.1': {
'type': 'server'}
}
},
'management': {
'address': {
'10.4.1.1': {
'type': 'server'}
}
}
}
}
| 17
| 37
| 0.244582
| 19
| 323
| 4.105263
| 0.578947
| 0.230769
| 0.25641
| 0.282051
| 0.564103
| 0.564103
| 0.564103
| 0
| 0
| 0
| 0
| 0.074627
| 0.585139
| 323
| 18
| 38
| 17.944444
| 0.507463
| 0
| 0
| 0.375
| 0
| 0
| 0.218069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a5d94233b442ee576e9b6c0fe44c63cd3e4a8946
| 532
|
py
|
Python
|
DQM/SiStripMonitorClient/test/sistrip_gres_layout.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/SiStripMonitorClient/test/sistrip_gres_layout.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/SiStripMonitorClient/test/sistrip_gres_layout.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
def dtlayout(i, p, *rows): i["Layouts/SiStrip Layouts/" + p] = DQMItem(layout=rows)
dtlayout(dqmitems, "SiStrip_Digi_Summary",
["SiStrip/MechanicalView/TIB/Summary_NumberOfDigis_in_TIB"],
["SiStrip/MechanicalView/TOB/Summary_NumberOfDigis_in_TOB"],
["SiStrip/MechanicalView/TID/MINUS/Summary_NumberOfDigis_in_MINUS",
"SiStrip/MechanicalView/TID/PLUS/Summary_NumberOfDigis_in_PLUS"],
["SiStrip/MechanicalView/TEC/MINUS/Summary_NumberOfDigis_in_MINUS",
"SiStrip/MechanicalView/TEC/PLUS/Summary_NumberOfDigis_in_PLUS"])
| 53.2
| 83
| 0.806391
| 64
| 532
| 6.390625
| 0.328125
| 0.308068
| 0.322738
| 0.132029
| 0.405868
| 0.259169
| 0.259169
| 0
| 0
| 0
| 0
| 0
| 0.06203
| 532
| 9
| 84
| 59.111111
| 0.819639
| 0
| 0
| 0
| 0
| 0
| 0.755639
| 0.672932
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
93d340551c953f4fc8f968a1cb9b5f79aaf59bd2
| 924
|
py
|
Python
|
fintools/__init__.py
|
scubamut/fintools
|
ba05c6b6c9e506f55910987e9d04c4babdcc6370
|
[
"MIT"
] | 2
|
2020-11-29T11:12:19.000Z
|
2021-11-14T01:08:49.000Z
|
fintools/__init__.py
|
scubamut/fintools
|
ba05c6b6c9e506f55910987e9d04c4babdcc6370
|
[
"MIT"
] | null | null | null |
fintools/__init__.py
|
scubamut/fintools
|
ba05c6b6c9e506f55910987e9d04c4babdcc6370
|
[
"MIT"
] | 1
|
2019-07-29T23:12:11.000Z
|
2019-07-29T23:12:11.000Z
|
import cvxopt
from fintools.backtest import backtest
from fintools.allocation_helper_functions import *
from fintools.cla import *
from fintools.compute_weights_PMA import *
from fintools.compute_weights_RS_DM import *
from fintools.endpoints import *
from fintools.finhelpers3 import *
from fintools.get_yahoo_prices import *
from fintools.mlhelpers3 import *
from fintools.monthly_return_table import *
from fintools.Parameters import *
from fintools.portfolio_helper_functions import *
from fintools.set_start_end import set_start_end
from fintools.show_return_table import show_return_table
from fintools.show_annual_returns import show_annual_returns
from fintools.get_DataArray import get_DataArray
from fintools.get_Dataset import get_Dataset
from fintools.make_pipeline_engine import make_pipeline_engine
from fintools.get_tiingo_prices import get_tiingo_prices
from fintools.pipeline_engine import pipeline_engine
| 40.173913
| 62
| 0.876623
| 130
| 924
| 5.930769
| 0.284615
| 0.311284
| 0.256809
| 0.064851
| 0.168612
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002378
| 0.089827
| 924
| 22
| 63
| 42
| 0.914388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
93da1d6727a133197d14b46cff7ba8b52f7ae6a8
| 226
|
py
|
Python
|
psono/fileserver/views/__init__.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 48
|
2018-04-19T15:50:58.000Z
|
2022-01-23T15:58:11.000Z
|
psono/fileserver/views/__init__.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 9
|
2018-09-13T14:56:18.000Z
|
2020-01-17T16:44:33.000Z
|
psono/fileserver/views/__init__.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 11
|
2019-09-20T11:53:47.000Z
|
2021-07-18T22:41:31.000Z
|
from .alive import AliveView
from .authorize_upload import AuthorizeUploadView
from .authorize_download import AuthorizeDownloadView
from .cleanup_chunks import CleanupChunksView
from .revoke_download import RevokeDownloadView
| 45.2
| 53
| 0.893805
| 24
| 226
| 8.25
| 0.583333
| 0.131313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084071
| 226
| 5
| 54
| 45.2
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9e21bf030eb665d8f78af01347c7be7db76d04b7
| 123
|
py
|
Python
|
Assignment/dsa/__init__.py
|
MC-DeltaT/DSA-Practicals
|
5c77cac1cfee5d756b84722e563813c153486770
|
[
"MIT"
] | null | null | null |
Assignment/dsa/__init__.py
|
MC-DeltaT/DSA-Practicals
|
5c77cac1cfee5d756b84722e563813c153486770
|
[
"MIT"
] | null | null | null |
Assignment/dsa/__init__.py
|
MC-DeltaT/DSA-Practicals
|
5c77cac1cfee5d756b84722e563813c153486770
|
[
"MIT"
] | null | null | null |
from .array import *
from .hash_table import *
from .set import *
from .singly_linked_list import *
from .sorting import *
| 20.5
| 33
| 0.756098
| 18
| 123
| 5
| 0.555556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162602
| 123
| 5
| 34
| 24.6
| 0.873786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9e25d8ef1ee7cb2f706cb2de14390e81ab1059a0
| 1,778
|
py
|
Python
|
toolchain/disassemble_hexdump.py
|
nielsdos/kwast
|
a2118d0598b35ed1ddb6520ff1caa571a22bd427
|
[
"MIT"
] | 45
|
2020-03-28T20:51:17.000Z
|
2022-03-18T07:15:45.000Z
|
toolchain/disassemble_hexdump.py
|
nielsdos/kwast
|
a2118d0598b35ed1ddb6520ff1caa571a22bd427
|
[
"MIT"
] | 11
|
2020-02-22T17:28:05.000Z
|
2020-08-26T20:39:39.000Z
|
toolchain/disassemble_hexdump.py
|
nielsdos/kwast
|
a2118d0598b35ed1ddb6520ff1caa571a22bd427
|
[
"MIT"
] | 2
|
2021-06-25T17:00:13.000Z
|
2022-03-24T21:22:04.000Z
|
#!/usr/bin/env python3
import struct
import os
import tempfile
list = [0x55, 0x48, 0x89, 0xe5, 0xb8, 0x1, 0x0, 0x0, 0x0, 0x5d, 0xc3, 0x55, 0x48, 0x89, 0xe5, 0xb8, 0x2, 0x0, 0x0, 0x0, 0x5d, 0xc3, 0x55, 0x48, 0x89, 0xe5, 0xb8, 0x3, 0x0, 0x0, 0x0, 0x5d, 0xc3, 0x55, 0x48, 0x89, 0xe5, 0xb8, 0x4, 0x0, 0x0, 0x0, 0x5d, 0xc3, 0x55, 0x48, 0x89, 0xe5, 0xb8, 0x5, 0x0, 0x0, 0x0, 0x5d, 0xc3, 0x55, 0x48, 0x89, 0xe5, 0x40, 0x89, 0xf0, 0x83, 0xc0, 0xfd, 0x83, 0xf8, 0x7, 0x77, 0x1e, 0x83, 0xf8, 0x8, 0x73, 0x12, 0x89, 0xc0, 0x48, 0x8d, 0xd, 0x2c, 0x0, 0x0, 0x0, 0x48, 0x63, 0x4, 0x81, 0x48, 0x1, 0xc1, 0xff, 0xe1, 0xe8, 0x9e, 0xff, 0xff, 0xff, 0x5d, 0xc3, 0xe8, 0xc3, 0xff, 0xff, 0xff, 0x5d, 0xc3, 0xe8, 0x9b, 0xff, 0xff, 0xff, 0x5d, 0xc3, 0xe8, 0x9f, 0xff, 0xff, 0xff, 0x5d, 0xc3, 0xe8, 0xa3, 0xff, 0xff, 0xff, 0x5d, 0xc3, 0xdd, 0xff, 0xff, 0xff, 0xe4, 0xff, 0xff, 0xff, 0xe4, 0xff, 0xff, 0xff, 0xe4, 0xff, 0xff, 0xff, 0xe4, 0xff, 0xff, 0xff, 0xeb, 0xff, 0xff, 0xff, 0xf2, 0xff, 0xff, 0xff, 0xf9, 0xff, 0xff, 0xff, 0x55, 0x48, 0x89, 0xe5, 0x41, 0x57, 0x48, 0x83, 0xec, 0x8, 0x48, 0x89, 0xbc, 0x24, 0x0, 0x0, 0x0, 0x0, 0xb8, 0x3, 0x0, 0x0, 0x0, 0x49, 0x89, 0xff, 0x4c, 0x89, 0xff, 0x40, 0x89, 0xc6, 0xe8, 0x72, 0xff, 0xff, 0xff, 0x4c, 0x8b, 0xbc, 0x24, 0x0, 0x0, 0x0, 0x0, 0x49, 0x8b, 0x4f, 0x8, 0x4c, 0x89, 0xff, 0x40, 0x89, 0xc6, 0xff, 0xd1, 0x48, 0x83, 0xc4, 0x8, 0x41, 0x5f, 0x5d, 0xc3]
list = [0x5d, 0x41, 0x5c, 0x5b, 0x5d, 0xc3, 0xf, 0xb, 0xbe, 0x0, 0x0, 0x0, 0x0, 0x4c, 0x89, 0xe7, 0xe8, 0x14, 0x2b, 0x2a, 0x0, 0xb9, 0x10, 0x0, 0x0, 0x0, 0x49, 0x89, 0xc4, 0x41, 0xd3, 0xe4]
out = tempfile.NamedTemporaryFile(mode = 'wb')
out.write(struct.pack(f'{len(list)}B', *list))
out.flush()
os.system(f'objdump -b binary -D -m i386:x86-64 {out.name} --adjust-vma 0x7ffffffb0097')
out.close()
| 118.533333
| 1,314
| 0.663105
| 303
| 1,778
| 3.891089
| 0.333333
| 0.189992
| 0.10687
| 0.094996
| 0.445293
| 0.374046
| 0.217981
| 0.217981
| 0.217981
| 0.189992
| 0
| 0.358871
| 0.163105
| 1,778
| 14
| 1,315
| 127
| 0.433468
| 0.011811
| 0
| 0
| 0
| 0.1
| 0.050114
| 0
| 0
| 0
| 0.563212
| 0
| 0
| 1
| 0
| false
| 0
| 0.3
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f50d7c84d30b7e79f17c34ebd45d1bd7003f604e
| 316
|
py
|
Python
|
app/consumer/settings.py
|
multicatch/django-example-oauth
|
432fa29cec72f4b4c12ddd8fdfaf1c34067716cb
|
[
"MIT"
] | null | null | null |
app/consumer/settings.py
|
multicatch/django-example-oauth
|
432fa29cec72f4b4c12ddd8fdfaf1c34067716cb
|
[
"MIT"
] | 4
|
2021-03-30T13:09:50.000Z
|
2021-09-22T18:55:59.000Z
|
app/consumer/settings.py
|
multicatch/django-example-oauth
|
432fa29cec72f4b4c12ddd8fdfaf1c34067716cb
|
[
"MIT"
] | null | null | null |
OAUTH2_SERVER_URL = 'http://localhost:8000/oauth2'
OAUTH2_CLIENT_ID = 'Qv6vn7hxGGNyGuLxOU7DHtvPAykevYe1fKwy0eEP'
OAUTH2_CLIENT_SECRET = '51SAwsXvhxjMTRUufxQdobMnqs2PrcmK4AEyajWTPfr2LCDt9ML7zeOWPirAhC5rvQW9hKfsjNPoraLV9meCHyLGomvM4H0y7eMXVS2eNFH1H3kJFekePBbbRBUFEsLt'
REDIRECT_URL = 'http://localhost:8000/consumer/'
| 63.2
| 153
| 0.889241
| 21
| 316
| 13.047619
| 0.619048
| 0.051095
| 0.116788
| 0.145985
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108553
| 0.037975
| 316
| 4
| 154
| 79
| 0.792763
| 0
| 0
| 0
| 0
| 0
| 0.718354
| 0.531646
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
19333a18a2685743d721bcb608a01ed93f2dd9c5
| 690
|
py
|
Python
|
agents/foundation/AgentType.py
|
lmarent/network_agents_ver2_python
|
f85811086d2bfe067b44e52c6aff3009d3353586
|
[
"MIT"
] | null | null | null |
agents/foundation/AgentType.py
|
lmarent/network_agents_ver2_python
|
f85811086d2bfe067b44e52c6aff3009d3353586
|
[
"MIT"
] | null | null | null |
agents/foundation/AgentType.py
|
lmarent/network_agents_ver2_python
|
f85811086d2bfe067b44e52c6aff3009d3353586
|
[
"MIT"
] | null | null | null |
class AgentType():
CONSUMER_TYPE = 1
PROVIDER_ISP = 2
PROVIDER_BACKHAUL = 3
PRESENTER_TYPE = 4
PROVIDER_SUP = 5
def __init__(self, type):
self.intfNames = {}
self.intfNames[AgentType.CONSUMER_TYPE] = 'consumer'
self.intfNames[AgentType.PROVIDER_ISP] = 'provider'
self.intfNames[AgentType.PROVIDER_BACKHAUL] = 'provider'
self.intfNames[AgentType.PRESENTER_TYPE] = 'presenter'
self.intfNames[AgentType.PROVIDER_SUP] = 'provider'
self.value = type
def getInterfaceName(self):
return self.intfNames[self.value]
def getType(self):
return self.value
| 31.363636
| 65
| 0.624638
| 70
| 690
| 5.957143
| 0.314286
| 0.218225
| 0.263789
| 0.215827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.282609
| 690
| 22
| 66
| 31.363636
| 0.832323
| 0
| 0
| 0
| 0
| 0
| 0.061194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0.111111
| 0.611111
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1944abc88a150323576564b80938e8dfd7713cfa
| 52
|
py
|
Python
|
src/utils/runners/__init__.py
|
kryvokhyzha/bert-for-ukranian-ner
|
48da40f09cb216ad51a97c303998157858fbe8bc
|
[
"MIT"
] | null | null | null |
src/utils/runners/__init__.py
|
kryvokhyzha/bert-for-ukranian-ner
|
48da40f09cb216ad51a97c303998157858fbe8bc
|
[
"MIT"
] | null | null | null |
src/utils/runners/__init__.py
|
kryvokhyzha/bert-for-ukranian-ner
|
48da40f09cb216ad51a97c303998157858fbe8bc
|
[
"MIT"
] | null | null | null |
from utils.runners.CustomRunner import CustomRunner
| 26
| 51
| 0.884615
| 6
| 52
| 7.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 52
| 1
| 52
| 52
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
198b2ce9f1702f539431929b94bd3d235dd3535a
| 331
|
py
|
Python
|
tests/test_views.py
|
jorzel/quotes-storage
|
e2b42e07d2fbf2db7c375865f06a277f15a17dd9
|
[
"MIT"
] | null | null | null |
tests/test_views.py
|
jorzel/quotes-storage
|
e2b42e07d2fbf2db7c375865f06a277f15a17dd9
|
[
"MIT"
] | null | null | null |
tests/test_views.py
|
jorzel/quotes-storage
|
e2b42e07d2fbf2db7c375865f06a277f15a17dd9
|
[
"MIT"
] | null | null | null |
def test_index(client):
response = client.get('/')
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
def test_book_views_get(client):
response = client.get('/book')
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
| 25.461538
| 62
| 0.697885
| 45
| 331
| 4.955556
| 0.422222
| 0.251121
| 0.179372
| 0.206278
| 0.636771
| 0.636771
| 0.636771
| 0.636771
| 0.636771
| 0.636771
| 0
| 0.029091
| 0.169184
| 331
| 12
| 63
| 27.583333
| 0.781818
| 0
| 0
| 0.5
| 0
| 0
| 0.164134
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
19948c95de56e79d17c7a8ac6c62405d4866da55
| 7,695
|
py
|
Python
|
testing/unit_tests/test_completeness_and_consistency_checker.py
|
IPASC/DataConversionTool
|
88f04c4df97f4a060c566da4bfc0c0e4fe246536
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2021-12-16T04:21:37.000Z
|
2022-03-10T03:19:21.000Z
|
testing/unit_tests/test_completeness_and_consistency_checker.py
|
IPASC/DataConversionTool
|
88f04c4df97f4a060c566da4bfc0c0e4fe246536
|
[
"MIT",
"BSD-3-Clause"
] | 12
|
2021-12-20T13:22:45.000Z
|
2022-02-17T19:57:03.000Z
|
testing/unit_tests/test_completeness_and_consistency_checker.py
|
amasmiller/PACFISH
|
f9bc912e701df3261a2b9a94df281c664701701d
|
[
"BSD-3-Clause"
] | 1
|
2022-02-11T21:33:28.000Z
|
2022-02-11T21:33:28.000Z
|
# SPDX-FileCopyrightText: 2021 International Photoacoustics Standardisation Consortium (IPASC)
# SPDX-License-Identifier: BSD 3-Clause License
import unittest
import numpy as np
from unittest.case import TestCase
import pacfish as pf
from testing.unit_tests.utils import create_complete_device_metadata_dictionary, create_complete_acquisition_meta_data_dictionary
import os
class CompletenessAndConsistencyTest(TestCase):
def setUp(self):
print("setUp")
def tearDown(self):
print("tearDown")
def test_wrong_input_type(self):
device_dict = create_complete_device_metadata_dictionary()
device_dict[pf.MetadataDeviceTags.GENERAL.tag][pf.MetadataDeviceTags.NUMBER_OF_ILLUMINATION_ELEMENTS.tag] = "4"
device_dict[pf.MetadataDeviceTags.GENERAL.tag][pf.MetadataDeviceTags.FIELD_OF_VIEW.tag] = "Wrong"
acquisition_dict = create_complete_acquisition_meta_data_dictionary()
pa_data = pf.PAData(binary_time_series_data=np.zeros([256, 2048]),
meta_data_acquisition=acquisition_dict,
meta_data_device=device_dict)
self.assertFalse(pf.quality_check_pa_data(pa_data))
def test_empty_dictionaries(self):
completeness_checker = pf.CompletenessChecker()
self.assertFalse(completeness_checker.check_device_meta_data(dict()))
self.assertFalse(completeness_checker.check_acquisition_meta_data(dict()))
consistency_checker = pf.ConsistencyChecker()
self.assertFalse(consistency_checker.check_device_meta_data(dict()))
self.assertFalse(consistency_checker.check_acquisition_meta_data(dict()))
def test_None_input_to_completeness_checker(self):
completeness_checker = pf.CompletenessChecker(verbose=True)
failed = False
try:
completeness_checker.check_device_meta_data(None)
except ValueError:
failed = True
self.assertTrue(failed)
failed = False
try:
completeness_checker.check_acquisition_meta_data(None)
except ValueError:
failed = True
self.assertTrue(failed)
failed = False
try:
completeness_checker.check_device_meta_data("None")
except TypeError:
failed = True
self.assertTrue(failed)
failed = False
try:
completeness_checker.check_acquisition_meta_data("None")
except TypeError:
failed = True
self.assertTrue(failed)
def test_None_input_to_consistency_checker(self):
consistency_checker = pf.ConsistencyChecker(verbose=True)
failed = False
try:
consistency_checker.check_device_meta_data(None)
except ValueError:
failed = True
self.assertTrue(failed)
failed = False
try:
consistency_checker.check_acquisition_meta_data(None)
except ValueError:
failed = True
self.assertTrue(failed)
failed = False
try:
consistency_checker.check_device_meta_data("None")
except TypeError:
failed = True
self.assertTrue(failed)
failed = False
try:
consistency_checker.check_acquisition_meta_data("None")
except TypeError:
failed = True
self.assertTrue(failed)
def test_log_file_writing(self):
device_dict = create_complete_device_metadata_dictionary()
acquisition_dict = create_complete_acquisition_meta_data_dictionary()
pa_data = pf.PAData(binary_time_series_data=np.zeros([256, 2048]),
meta_data_acquisition=acquisition_dict,
meta_data_device=device_dict)
completeness_checker = pf.CompletenessChecker(verbose=False, log_file_path="")
self.assertFalse(os.path.exists(completeness_checker.save_file_name))
completeness_checker.check_acquisition_meta_data(acquisition_dict)
self.assertTrue(os.path.exists(completeness_checker.save_file_name))
os.remove(completeness_checker.save_file_name)
completeness_checker.check_device_meta_data(device_dict)
self.assertTrue(os.path.exists(completeness_checker.save_file_name))
os.remove(completeness_checker.save_file_name)
consistency_checker = pf.ConsistencyChecker(verbose=False, log_file_path="")
self.assertFalse(os.path.exists(consistency_checker.save_file_name))
consistency_checker.check_acquisition_meta_data(acquisition_dict)
self.assertTrue(os.path.exists(consistency_checker.save_file_name))
os.remove(consistency_checker.save_file_name)
consistency_checker.check_device_meta_data(device_dict)
self.assertTrue(os.path.exists(consistency_checker.save_file_name))
os.remove(consistency_checker.save_file_name)
def test_pa_data_check(self):
device_dict = create_complete_device_metadata_dictionary()
acquisition_dict = create_complete_acquisition_meta_data_dictionary()
pa_data = pf.PAData(binary_time_series_data=np.zeros([256, 2048]),
meta_data_acquisition=acquisition_dict,
meta_data_device=device_dict)
self.assertTrue(pf.quality_check_pa_data(pa_data))
def test_check_a_complete_and_consistent_pa_data_instance(self):
device_dict = create_complete_device_metadata_dictionary()
acquisition_dict = create_complete_acquisition_meta_data_dictionary()
pa_data = pf.PAData(binary_time_series_data=np.zeros([256, 2048]),
meta_data_acquisition=acquisition_dict,
meta_data_device=device_dict)
completeness_checker = pf.CompletenessChecker(verbose=True)
consistency_checker = pf.ConsistencyChecker(verbose=True)
assert completeness_checker.check_acquisition_meta_data(pa_data.meta_data_acquisition)
assert completeness_checker.check_device_meta_data(pa_data.meta_data_device)
assert consistency_checker.check_binary_data(pa_data.binary_time_series_data)
assert consistency_checker.check_acquisition_meta_data(pa_data.meta_data_acquisition)
assert consistency_checker.check_device_meta_data(pa_data.meta_data_device)
def test_check_a_complete_but_inconsistent_pa_data_instance(self):
device_dict = create_complete_device_metadata_dictionary()
for illuminator_tag in device_dict[pf.MetadataDeviceTags.ILLUMINATORS.tag]:
device_dict[pf.MetadataDeviceTags.ILLUMINATORS.tag][illuminator_tag]\
[pf.MetadataDeviceTags.PULSE_WIDTH.tag] = -0.1
acquisition_dict = create_complete_acquisition_meta_data_dictionary()
acquisition_dict[pf.MetadataAcquisitionTags.DIMENSIONALITY.tag] = "Wrong string"
pa_data = pf.PAData(binary_time_series_data=np.zeros([256, 2048]),
meta_data_acquisition=acquisition_dict,
meta_data_device=device_dict)
completeness_checker = pf.CompletenessChecker(verbose=True)
consistency_checker = pf.ConsistencyChecker(verbose=True)
assert completeness_checker.check_acquisition_meta_data(pa_data.meta_data_acquisition)
assert completeness_checker.check_device_meta_data(pa_data.meta_data_device)
assert consistency_checker.check_binary_data(pa_data.binary_time_series_data)
assert consistency_checker.check_acquisition_meta_data(pa_data.meta_data_acquisition) is False
assert consistency_checker.check_device_meta_data(pa_data.meta_data_device) is False
| 42.75
| 129
| 0.721507
| 864
| 7,695
| 6.015046
| 0.131944
| 0.073889
| 0.065807
| 0.050799
| 0.860112
| 0.821628
| 0.75101
| 0.75101
| 0.668847
| 0.656148
| 0
| 0.007082
| 0.210916
| 7,695
| 179
| 130
| 42.988827
| 0.848814
| 0.017934
| 0
| 0.615942
| 0
| 0
| 0.006222
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 1
| 0.072464
| false
| 0
| 0.043478
| 0
| 0.123188
| 0.014493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
199e7322f7ac232ab6fe70e49e4da9871823eb8f
| 92
|
py
|
Python
|
dtf/features/admin.py
|
WebPowerLabs/django-trainings
|
97f7a96c0fbeb85a001201c74713f7944cb77236
|
[
"BSD-3-Clause"
] | null | null | null |
dtf/features/admin.py
|
WebPowerLabs/django-trainings
|
97f7a96c0fbeb85a001201c74713f7944cb77236
|
[
"BSD-3-Clause"
] | null | null | null |
dtf/features/admin.py
|
WebPowerLabs/django-trainings
|
97f7a96c0fbeb85a001201c74713f7944cb77236
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Feature
admin.site.register(Feature)
| 15.333333
| 32
| 0.815217
| 13
| 92
| 5.769231
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119565
| 92
| 6
| 33
| 15.333333
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
19a7c5e0c78fd6810ca9e272fdf163b94b9fe73b
| 99
|
py
|
Python
|
view/base.py
|
54bp6cl6/MiaAndMax
|
1f56987b68d847c3fe2136a97a404480b7537714
|
[
"MIT"
] | null | null | null |
view/base.py
|
54bp6cl6/MiaAndMax
|
1f56987b68d847c3fe2136a97a404480b7537714
|
[
"MIT"
] | null | null | null |
view/base.py
|
54bp6cl6/MiaAndMax
|
1f56987b68d847c3fe2136a97a404480b7537714
|
[
"MIT"
] | null | null | null |
from linebot.models import TextSendMessage
def TextMessage(text):
return TextSendMessage(text)
| 24.75
| 42
| 0.818182
| 11
| 99
| 7.363636
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 99
| 4
| 43
| 24.75
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
5fd85c8be3fabbbfecd2b42d850c6a9056e67046
| 256
|
py
|
Python
|
corehq/form_processor/utils/general.py
|
akashkj/commcare-hq
|
b00a62336ec26cea1477dfb8c048c548cc462831
|
[
"BSD-3-Clause"
] | 471
|
2015-01-10T02:55:01.000Z
|
2022-03-29T18:07:18.000Z
|
corehq/form_processor/utils/general.py
|
akashkj/commcare-hq
|
b00a62336ec26cea1477dfb8c048c548cc462831
|
[
"BSD-3-Clause"
] | 14,354
|
2015-01-01T07:38:23.000Z
|
2022-03-31T20:55:14.000Z
|
corehq/form_processor/utils/general.py
|
akashkj/commcare-hq
|
b00a62336ec26cea1477dfb8c048c548cc462831
|
[
"BSD-3-Clause"
] | 175
|
2015-01-06T07:16:47.000Z
|
2022-03-29T13:27:01.000Z
|
def use_sqlite_backend(domain_name):
return True
def is_commcarecase(obj):
from casexml.apps.case.models import CommCareCase
from corehq.form_processor.models import CommCareCaseSQL
return isinstance(obj, (CommCareCase, CommCareCaseSQL))
| 28.444444
| 60
| 0.792969
| 31
| 256
| 6.387097
| 0.709677
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140625
| 256
| 8
| 61
| 32
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.166667
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
5ffe326babf953b848694ddab3392ecda818189d
| 36
|
py
|
Python
|
build/lib/src/ttsRogue/__init__.py
|
AlexBacho/ezyt
|
5d9526d7901270ed5d97adf73ed0df9b21bf0387
|
[
"MIT"
] | 1
|
2020-10-21T09:28:56.000Z
|
2020-10-21T09:28:56.000Z
|
build/lib/src/ttsRogue/__init__.py
|
AlexBacho/ezyt
|
5d9526d7901270ed5d97adf73ed0df9b21bf0387
|
[
"MIT"
] | null | null | null |
build/lib/src/ttsRogue/__init__.py
|
AlexBacho/ezyt
|
5d9526d7901270ed5d97adf73ed0df9b21bf0387
|
[
"MIT"
] | null | null | null |
from .redditThief import RedditThief
| 36
| 36
| 0.888889
| 4
| 36
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
270588e7f5d006e36163f42b0b07b2d5540b9db7
| 23
|
py
|
Python
|
__init__.py
|
unhideschool/uhql
|
b2194388ee73d7bb4d08ef73ced44c73ab598e6a
|
[
"MIT"
] | null | null | null |
__init__.py
|
unhideschool/uhql
|
b2194388ee73d7bb4d08ef73ced44c73ab598e6a
|
[
"MIT"
] | null | null | null |
__init__.py
|
unhideschool/uhql
|
b2194388ee73d7bb4d08ef73ced44c73ab598e6a
|
[
"MIT"
] | null | null | null |
from .main import UHQL
| 11.5
| 22
| 0.782609
| 4
| 23
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2724643156c88feb9a1ae170b29408a8844f7b96
| 1,110
|
py
|
Python
|
devel/getReqs.py
|
arrismo/tripods-testing
|
4ab9a7059df84cfd1f8984960900a3cf80b5c13c
|
[
"ECL-2.0"
] | 2
|
2019-09-27T20:17:46.000Z
|
2020-06-24T22:15:52.000Z
|
devel/getReqs.py
|
arrismo/tripods-testing
|
4ab9a7059df84cfd1f8984960900a3cf80b5c13c
|
[
"ECL-2.0"
] | 3
|
2021-03-31T19:53:37.000Z
|
2021-12-13T20:44:25.000Z
|
devel/getReqs.py
|
arrismo/tripods-testing
|
4ab9a7059df84cfd1f8984960900a3cf80b5c13c
|
[
"ECL-2.0"
] | 1
|
2019-05-22T18:55:39.000Z
|
2019-05-22T18:55:39.000Z
|
#!/usr/bin/env python3
import os
from sys import platform
os.system("pip install -r requirements.txt")
os.system("python -m spacy download en")
if platform == "linux":
try:
os.system("apt install enchant")
except:
print("--------------------------------------------------------------------------")
print("Use your package manager to install 'enchant'")
print("--------------------------------------------------------------------------")
elif platform == "darwin":
try:
os.system("brew install enchant")
except:
print("--------------------------------------------------------------------------")
print("brew is not installed; not able to install 'enchant'")
print("--------------------------------------------------------------------------")
elif platform == "win32":
print("--------------------------------------------------------------------------")
print("You need to install 'enchant' for the wordninja parsing to work correctly")
print("--------------------------------------------------------------------------")
| 46.25
| 91
| 0.37027
| 84
| 1,110
| 4.892857
| 0.571429
| 0.170316
| 0.116788
| 0.121655
| 0.306569
| 0.160584
| 0
| 0
| 0
| 0
| 0
| 0.003175
| 0.148649
| 1,110
| 23
| 92
| 48.26087
| 0.431746
| 0.018919
| 0
| 0.454545
| 0
| 0
| 0.668199
| 0.408088
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0.409091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
2743225e81bc4a6f2fc8b69ad506bbde64e9ac42
| 72
|
py
|
Python
|
pylinux/common/default_settings.py
|
ruiruige/pylinux
|
a0a85e8928d7847c0596b21c9213bc7863037297
|
[
"MIT"
] | null | null | null |
pylinux/common/default_settings.py
|
ruiruige/pylinux
|
a0a85e8928d7847c0596b21c9213bc7863037297
|
[
"MIT"
] | null | null | null |
pylinux/common/default_settings.py
|
ruiruige/pylinux
|
a0a85e8928d7847c0596b21c9213bc7863037297
|
[
"MIT"
] | null | null | null |
# /usr/bin/env python
# coding=utf-8
def disable_iptables():
pass
| 10.285714
| 23
| 0.666667
| 11
| 72
| 4.272727
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.194444
| 72
| 6
| 24
| 12
| 0.793103
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
27510287083bc4d0727004136817af534a9834b7
| 60
|
py
|
Python
|
client/__init__.py
|
siroossarmadi/client-python
|
55f80c5461c679f96820eb5ab678c7ad102fd40d
|
[
"MIT"
] | 1
|
2020-06-22T07:28:04.000Z
|
2020-06-22T07:28:04.000Z
|
client/__init__.py
|
siroossarmadi/client-python
|
55f80c5461c679f96820eb5ab678c7ad102fd40d
|
[
"MIT"
] | null | null | null |
client/__init__.py
|
siroossarmadi/client-python
|
55f80c5461c679f96820eb5ab678c7ad102fd40d
|
[
"MIT"
] | null | null | null |
from .client import Client
from .objects import Key, Object
| 20
| 32
| 0.8
| 9
| 60
| 5.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 60
| 2
| 33
| 30
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
276f4a20a68897edca3f4bc68d21505d3fb0401a
| 293
|
py
|
Python
|
src/setting.py
|
Parham-sagharchi/rabbitmqCeleryDocker
|
262744ff3390e475774ee33b6576b65c6909e35a
|
[
"MIT"
] | null | null | null |
src/setting.py
|
Parham-sagharchi/rabbitmqCeleryDocker
|
262744ff3390e475774ee33b6576b65c6909e35a
|
[
"MIT"
] | null | null | null |
src/setting.py
|
Parham-sagharchi/rabbitmqCeleryDocker
|
262744ff3390e475774ee33b6576b65c6909e35a
|
[
"MIT"
] | null | null | null |
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
RABBITMQ_DEFAULT_USER = os.environ.get("RABBITMQ_DEFAULT_USER")
RABBITMQ_DEFAULT_PASS = os.environ.get("RABBITMQ_DEFAULT_PASS")
TARGET_FILE = os.environ.get("TARGET_FILE")
LOCAL_IP = os.environ.get("LOCAL_IP")
| 29.3
| 63
| 0.812287
| 45
| 293
| 4.933333
| 0.355556
| 0.27027
| 0.216216
| 0.18018
| 0.243243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075085
| 293
| 9
| 64
| 32.555556
| 0.819188
| 0
| 0
| 0
| 0
| 0
| 0.208191
| 0.143345
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.142857
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
279e223c0171a55ced9bd19491ac93ce64e75e2a
| 130
|
py
|
Python
|
nose2/tests/functional/support/scenario/tests_in_package/setup.py
|
deeplow/nose2
|
eb0394160e24afe760e984d93dbece8351dbae7a
|
[
"BSD-2-Clause"
] | 637
|
2015-01-12T02:02:53.000Z
|
2022-03-30T19:47:48.000Z
|
nose2/tests/functional/support/scenario/tests_in_package/setup.py
|
deeplow/nose2
|
eb0394160e24afe760e984d93dbece8351dbae7a
|
[
"BSD-2-Clause"
] | 276
|
2015-01-02T19:14:06.000Z
|
2022-03-18T04:03:08.000Z
|
nose2/tests/functional/support/scenario/tests_in_package/setup.py
|
deeplow/nose2
|
eb0394160e24afe760e984d93dbece8351dbae7a
|
[
"BSD-2-Clause"
] | 127
|
2015-01-08T12:02:10.000Z
|
2022-01-10T20:52:29.000Z
|
from setuptools import find_packages, setup
setup(name="pkg1", packages=find_packages(), test_suite="nose2.collector.collector")
| 32.5
| 84
| 0.807692
| 17
| 130
| 6
| 0.705882
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016529
| 0.069231
| 130
| 3
| 85
| 43.333333
| 0.826446
| 0
| 0
| 0
| 0
| 0
| 0.223077
| 0.192308
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fd7230cf3156ba49da3cb29b6a47965b06eec934
| 292
|
py
|
Python
|
app/shared/exc.py
|
neurothrone/project-dot
|
20889075611bed645689a76a30257f96e4b55988
|
[
"MIT"
] | null | null | null |
app/shared/exc.py
|
neurothrone/project-dot
|
20889075611bed645689a76a30257f96e4b55988
|
[
"MIT"
] | null | null | null |
app/shared/exc.py
|
neurothrone/project-dot
|
20889075611bed645689a76a30257f96e4b55988
|
[
"MIT"
] | null | null | null |
class UserRepositoryError(Exception):
def __init__(self, message: str):
raise Exception(message)
class UserDoesNotExist(UserRepositoryError):
pass
class UsernameAlreadyTakenError(UserRepositoryError):
pass
class EmailAlreadyTakenError(UserRepositoryError):
pass
| 18.25
| 53
| 0.773973
| 23
| 292
| 9.652174
| 0.565217
| 0.310811
| 0.252252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160959
| 292
| 15
| 54
| 19.466667
| 0.906122
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0.333333
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
fd9c073ae04681aa75bdbf4e44df9a9972672b10
| 146
|
py
|
Python
|
indexed_img.py
|
albert-tomanek/gan-indexed
|
f0c0c2ee003979ca51a6225576f243ed09294578
|
[
"MIT"
] | null | null | null |
indexed_img.py
|
albert-tomanek/gan-indexed
|
f0c0c2ee003979ca51a6225576f243ed09294578
|
[
"MIT"
] | null | null | null |
indexed_img.py
|
albert-tomanek/gan-indexed
|
f0c0c2ee003979ca51a6225576f243ed09294578
|
[
"MIT"
] | null | null | null |
import numpy as np
def onehot_to_indexed(imgs):
return imgs.argmax(axis=-1)
def idx_to_rgb(imgs_idx, palette):
return palette[imgs_idx]
| 18.25
| 34
| 0.746575
| 25
| 146
| 4.12
| 0.64
| 0.135922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.157534
| 146
| 7
| 35
| 20.857143
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fdb44700835c9ba73321e1036b07884b2f7a0762
| 197
|
py
|
Python
|
users/admin.py
|
nhy17-thu/ImageProcessingWebsite
|
d90eaf115c870dd359bf5315f7b56f042270f31f
|
[
"MIT"
] | 1
|
2021-03-29T12:40:39.000Z
|
2021-03-29T12:40:39.000Z
|
users/admin.py
|
nhy17-thu/FinalProject_Upgrade
|
d90eaf115c870dd359bf5315f7b56f042270f31f
|
[
"MIT"
] | null | null | null |
users/admin.py
|
nhy17-thu/FinalProject_Upgrade
|
d90eaf115c870dd359bf5315f7b56f042270f31f
|
[
"MIT"
] | 1
|
2021-03-29T12:40:42.000Z
|
2021-03-29T12:40:42.000Z
|
from django.contrib import admin
# Register your models here.
from django.contrib import admin
from .models import User
from .models import Pic
admin.site.register(User)
admin.site.register(Pic)
| 19.7
| 32
| 0.80203
| 30
| 197
| 5.266667
| 0.4
| 0.126582
| 0.21519
| 0.291139
| 0.35443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126904
| 197
| 10
| 33
| 19.7
| 0.918605
| 0.13198
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fdb759723eef36c09f6b84853130622fc8b04c24
| 71
|
py
|
Python
|
frictionless/step.py
|
kant/frictionless-py
|
09cc98e1966d6f97f4eecb47757f45f8a946c5e7
|
[
"MIT"
] | null | null | null |
frictionless/step.py
|
kant/frictionless-py
|
09cc98e1966d6f97f4eecb47757f45f8a946c5e7
|
[
"MIT"
] | null | null | null |
frictionless/step.py
|
kant/frictionless-py
|
09cc98e1966d6f97f4eecb47757f45f8a946c5e7
|
[
"MIT"
] | null | null | null |
# TODO: rebase on API to support props validation
class Step:
pass
| 17.75
| 49
| 0.732394
| 11
| 71
| 4.727273
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225352
| 71
| 3
| 50
| 23.666667
| 0.945455
| 0.661972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
fdbdf9ed0108be7b6ebfbc13dac64bd19cd8aa81
| 24,219
|
py
|
Python
|
src/align.py
|
lionking0000/recYnH
|
da1e4898c4148b399ed0f0a0848f5748679b7b98
|
[
"MIT"
] | 2
|
2020-03-02T11:09:31.000Z
|
2020-03-12T13:48:40.000Z
|
src/align.py
|
lionking0000/recYnH
|
da1e4898c4148b399ed0f0a0848f5748679b7b98
|
[
"MIT"
] | null | null | null |
src/align.py
|
lionking0000/recYnH
|
da1e4898c4148b399ed0f0a0848f5748679b7b98
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import os
import threading
import fasta
import subprocess
VERBOSE = False # True
NUM_THREADS = 1 #
BLASTN_E_VALUE = "1e-8"
def getCommandOutput(command, exedir = None):
if exedir != None:
os.chdir( exedir )
#child = os.popen(command)
#data = child.read()
task = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return task
def run_cmd( cmd ):
if VERBOSE: print cmd
os.system( cmd )
def FastqToFasta( fastq, fasta ):
'''
@M03766:53:000000000-B63MG:1:1101:12798:1849 1:N:0:4
AACGTAAAATGATATAAATATCAATATATTAAATTATATTTTGCATAAAAAACAGTCTACATAATACTGTAAATCACAACATATCCTGTCACT
+
AFGFHHHFHHFHH5FDGBGEGGBD55B4FG444BFG444FFF434B43433///?43BBD4B4?F44FGH44B443B33?/B1B?22?B1?12
'''
f = open( fastq )
fout = open( fasta, "w" )
for line in f.xreadlines():
print >> fout, ">%s" % line[:-1]
print >> fout, f.next()[:-1]
f.next()
f.next()
fout.close()
f.close()
def GenerateLastNts( fasta_file, length = 150 ):
output_file = fasta_file
output_file += ".-%d" % length
fa = fasta.read_fasta( fasta_file )
lastXnt_dic = {}
fout = open( output_file, "w" )
for id in fa:
lastXnt = fa[id][-length:].upper()
print >> fout, ">%s\n%s" % ( id, lastXnt ) #, lastXnt in lastXnt_dic
lastXnt_dic[ lastXnt ] = id
fout.close()
return output_file
def CheckBlastnDB( fasta_file, lastnt_length ):
print "[ Checking BlastN Database ]", fasta_file, lastnt_length
bChecked = True
if os.path.exists( fasta_file + ".-%d.nhr" % lastnt_length ) == False:
bChecked = False
if os.path.exists( fasta_file + ".-%d.nin" % lastnt_length ) == False:
bChecked = False
if os.path.exists( fasta_file + ".-%d.nsq" % lastnt_length ) == False:
bChecked = False
return bChecked
def MakeBlastnDB( fasta_file, lastnt_length ):
print "[ Making BlastN Database ]", fasta_file, lastnt_length
command = "makeblastdb -in %s.-%d -dbtype nucl" % ( fasta_file, lastnt_length )
run_cmd( command )
def BLASTN_NEW( fasta_file, lastnt_length, filepath1, filepath2, output_file ):
#print ( fasta_file, filepath1, filepath2, output_file )
'''
parse blastn output and make ppi
'''
ppi_cnt_dic = {}
read1_cnt_dic = {}
read2_cnt_dic = {}
read1_dic = {}
read2_dic = {}
'''
['M03766:33:000000000-AT3T3:1:1101:21081:6509', '113', 'NMI', '880', '36', '54S41M', 'IGF2', '452', '0', 'ATTTTGATCATATGACTGCTCTGTTTCATTTTTTTCAATAAACCCTTTACAATTAAGTGTTCTCTAGGTCAACCTCACATAGCATACTTTGAAGA', 'HHFFHHHHFDHHHGHHHHHHHHEHHHHHGGHFHGBHGHHGHHEG4GHHHHHHHHHHHHHFFFG3GEBGBFHHHHGHHHHHGHHFHFHGHHGHHHH', 'AS:i:82', 'XN:i:0', 'XM:i:0', 'XO:i:0', 'XG:i:0', 'NM:i:0', 'MD:Z:41', 'YS:i:174', 'YT:Z:DP']
['M03766:33:000000000-AT3T3:1:1101:21081:6509', '177', 'IGF2', '452', '36', '5S87M', 'NMI', '880', '0', 'TCTCTAGGCCAAACGTCACCGTCCCCTGATTGCTCTACCCACCCAAGACCCCGCCCACGGGGGCGCCCCCCCAGAGATGGCCAGCAATCGGA', '/BBB/BBBFFFEFFFEEFAFB?FFFFBFFFFFFFEB;@-DFFFFFFD@FFFFEFFFFAFFFFDAFGCGGHGGHHHHHHHFFHHHGFEGFHHH', 'AS:i:174', 'XN:i:0', 'XM:i:0', 'XO:i:0', 'XG:i:0', 'NM:i:0', 'MD:Z:87', 'YS:i:82', 'YT:Z:DP']^C
'''
#if len( sys.argv ) < 2:
# print "python SAM.py ../data/roth2016_control_set_plus_control.fa output/2016-12-22_MiSeq/Friedrich/17543_S1.sam"
# sys.exit(0)
total_cnt = 0
#fa = fasta.read_fasta_file( sys.argv[2] )
#filepath1 = sys.argv[3]
#filepath2 = sys.argv[4]
fa = fasta.read_fasta_file( "%s.-%d" % ( fasta_file, lastnt_length ) )
#filepath1 = sys.argv[3]
#filepath2 = sys.argv[4]
PREV_QNAME = ""
f = open( filepath1 )
read_cnt = 0
for line in f.xreadlines():
#if read_cnt % 10000 == 0: print read_cnt
read_cnt += 1
## READ 1
# @M03766:53:000000000-B63MG:1:1101:13982:1738 cask_p142 98.969 97 1 0 1 97 99 3 3.06e-50 184
[ QNAME, TARGET, PERCENT, LENGTH, MISMATCH, GAPOPEN, QSTART, QEND, SSTART, SEND, EVALUE, BITSCORE ] = line[:-1].split("\t")
if QNAME == PREV_QNAME: continue
if int( SEND ) > int ( SSTART ): continue # DEFAULT CONDITION. ==> 1968532 for 2016-12-22_MiSeq/Blastn/17543_S1.ppi.txt
if int( QSTART ) > 10 or int( SSTART ) < 90: continue ## NEW CONDITION. 2017-10-13 => 1878423
#if int( QSTART ) >= 5 or int( SSTART ) <= 95: continue ## NEW3 CONDITION. 2017-10-21
#if int( QSTART ) + int( SSTART ) < 90: continue # NEW4
read1_dic[ QNAME ] = TARGET
read1_cnt_dic[ TARGET ] = read1_cnt_dic.get( TARGET, 0 ) + 1
PREV_QNAME = QNAME
f.close()
PREV_QNAME = ""
read_cnt = 0
f = open( filepath2 )
for line in f.xreadlines():
#if read_cnt % 10000 == 0: print read_cnt
read_cnt += 1
## READ 2
# @M03766:53:000000000-B63MG:1:1101:13982:1738 cask_p142 98.969 97 1 0 1 97 99 3 3.06e-50 184
[ QNAME, TARGET, PERCENT, LENGTH, MISMATCH, GAPOPEN, QSTART, QEND, SSTART, SEND, EVALUE, BITSCORE ] = line[:-1].split("\t")
if QNAME == PREV_QNAME: continue
if int( SEND ) > int ( SSTART ): continue # DEFAULT CONDITION. ==> 1968532 for 2016-12-22_MiSeq/Blastn/17543_S1.ppi.txt
if int( QSTART ) > 10 or int( SSTART ) < 90: continue ## NEW CONDITION. 2017-10-13
#if int( QSTART ) >= 5 or int( SSTART ) <= 95: continue ## NEW3 CONDITION. 2017-10-21
#if int( QSTART ) + int( SSTART ) < 90: continue # NEW4
read2_dic[ QNAME ] = TARGET
read2_cnt_dic[ TARGET ] = read2_cnt_dic.get( TARGET, 0 ) + 1
PREV_QNAME = QNAME
f.close()
for QNAME in read1_dic:
TARGET2 = read2_dic.get( QNAME, "" )
if TARGET2 == "": continue
TARGET1 = read1_dic[ QNAME ]
ppi_cnt_dic[ ( TARGET1, TARGET2 ) ] = ppi_cnt_dic.get( ( TARGET1, TARGET2 ), 0 ) + 1
total_cnt += 1
f.close()
id_list = fa.keys()
id_list.sort()
fout = open( output_file, "w" )
print >> fout, "# This file is generated by BLASTN_NEW"
print >> fout, "DB(Read 1) \ AD(Read 2)\t"+"\t".join( id_list )
for id1 in id_list:
output = id1
for id2 in id_list:
cnt = ppi_cnt_dic.get( (id1, id2 ), 0 )
output += "\t%d" % cnt
print >> fout, output
fout.close()
# print total_cnt # only for debug
# output/$1/Blastn/$2.blastn
fout1 = open( "%s.cnt.txt" % filepath1, "w" )
fout2 = open( "%s.cnt.txt" % filepath2, "w" )
for id in id_list:
print >> fout1, "%s\t%d" % ( id, read1_cnt_dic.get( id, 0 ) )
print >> fout2, "%s\t%d" % ( id, read2_cnt_dic.get( id, 0 ) )
fout1.close()
fout2.close()
def align_subprocess( original_fasta, lastnt_length, fasta_file, fastq_file ):
# convert fastq to fasta ( Temporarily now using fastq file generated in Friedrich folder; since it is the same fastq. But We need to change it to Blastn for general cases )
#cmd = "python main.py fastq_to_fasta %s > %s" % ( fastq, fasta )
#cmd = "main.py fastq_to_fasta %s > %s" % ( fastq, fasta )
#run_cmd( cmd )
FastqToFasta( fastq_file, fasta_file )
# blastn-short search (20)
cmd = "blastn -num_threads %d -db %s.-%d -query %s -task blastn-short -outfmt 6 -max_target_seqs 5 -evalue %s > %s.blastn" % ( NUM_THREADS, original_fasta, lastnt_length, fasta_file, BLASTN_E_VALUE, fasta_file )
run_cmd( cmd )
def run_Y2H( args ):
if args.fasta2 == None:
args.fasta2 = args.fasta1
try:
args.lastnt1 = int( args.lastnt1 )
args.lastnt2 = int( args.lastnt2 )
except:
exit(0)
# Blastn check
if CheckBlastnDB( args.fasta1, args.lastnt1 ) == False:
GenerateLastNts( args.fasta1, args.lastnt1 )
MakeBlastnDB( args.fasta1, args.lastnt1 )
if CheckBlastnDB( args.fasta2, args.lastnt2 ) == False:
GenerateLastNts( args.fasta2, args.lastnt2 )
MakeBlastnDB( args.fasta2, args.lastnt2 )
[ dirname1, fq1 ] = os.path.split( args.fastq1 )
[ dirname2, fq2 ] = os.path.split( args.fastq2 )
if args.output == None:
args.output = dirname1
#if os.path.exists( args.output ) == None:
# sys.exit(0)
# #args.output = dirname1
if fq1[-3:] == ".gz":
fq1 = fq1[:-3]
if fq2[-3:] == ".gz":
fq2 = fq2[:-3]
# make output folder
if os.path.exists( args.output ) == False:
os.makedirs( args.output )
temp_dir = os.path.join( args.output, "tmp" )
if os.path.exists( temp_dir ) == False:
os.makedirs( temp_dir )
fq1 = os.path.join( temp_dir, fq1 )
fq2 = os.path.join( temp_dir, fq2 )
fa1 = fq1 + ".fa"
fa2 = fq2 + ".fa"
# remove 5' end adaptor sequences in both fastq file; remove reads if any of read does not have an adaptor sequence ( Temporarily now using fastq files in Friedrich folder which are already trimmed )
# -m 15 : Discard trimmed reads that are shorter than LENGTH.
# --discard-untrimmed : Discard reads that do not contain the adapter.
#
# bait --CGCTGCAGGTCGACGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT
# prey GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT
# *** ** **** ****************************************
#
#cmd = "cutadapt -g CGCTGCAGGTCGACGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -G GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -o output/$1/$6/$2.fastq -p output/$1/$6/$3.fastq -m 15 --discard-untrimmed ./fastq/$2.fastq.gz ./fastq/$3.fastq.gz"
print "[ Removing adaptor sequences ]", fq1, fq2
cmd = "cutadapt --quiet -g CGCTGCAGGTCGACGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -G GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -o %s -p %s -m 15 --discard-untrimmed %s %s" % ( fq1, fq2, args.fastq1, args.fastq2 )
run_cmd( cmd )
'''
# convert fastq to fasta ( Temporarily now using fastq file generated in Friedrich folder; since it is the same fastq. But We need to change it to Blastn for general cases )
cmd = "python main.py fastq_to_fasta %s > %s" % ( fq1, fa1 )
run_cmd( cmd )
cmd = "python main.py fastq_to_fasta %s > %s" % ( fq2, fa2 )
run_cmd( cmd )
# blastn-short search
cmd = "blastn -db %s -query %s -task blastn-short -outfmt 6 -max_target_seqs 20 -evalue 1e-8 > %s.blastn" % ( args.fasta1, fa1, fa1 )
run_cmd( cmd )
cmd = "blastn -db %s -query %s -task blastn-short -outfmt 6 -max_target_seqs 20 -evalue 1e-8 > %s.blastn" % ( args.fasta2, fa2, fa2 )
run_cmd( cmd )
'''
# multi-threading
print "[ Mapping sequences into reference baits and preys sequences ]", fq1, fq2
th1 = threading.Thread(target=align_subprocess, args = ( args.fasta1, args.lastnt1, fa1, fq1 ) )
th1.start()
th2 = threading.Thread(target=align_subprocess, args = ( args.fasta2, args.lastnt2, fa2, fq2 ) )
th2.start()
th1.join()
th2.join()
# python parse blastn output and make ppi map
# currently stringent case, no restriction for position cases
# maybe ignoring orientation could be added in the future
print "[ Making a pair matrix ]", os.path.join( args.output, args.name )
if args.relaxed == True:
# no restriction for aligned position
cmd = "main.py BLASTN_RELAXED %s.-%d %s.blastn %s.blastn > %s/%s" % ( args.fasta1, args.lastnt1, fa1, fa2, args.output, args.name )
run_cmd( cmd )
else:
# very stringent case
#cmd = "main.py BLASTN_NEW %s %s.blastn %s.blastn > %s/%s" % ( args.fasta1, fa1, fa2, args.output, args.name )
#run_cmd( cmd )
BLASTN_NEW( args.fasta1, args.lastnt1, "%s.blastn" % fa1, "%s.blastn" % fa2, "%s/%s" % ( args.output, args.name ) )
def ReadSequenceFile( filepath ):
if filepath[-3:] == ".gz":
cmd = "zcat %s" % filepath
else:
cmd = "cat %s" % filepath
task = getCommandOutput( cmd )
return task
def ReverseComplement(seq):
#print seq
seq_dict = {'A':'T','T':'A','G':'C','C':'G'}
return "".join([seq_dict[base] for base in reversed(seq)])
def align_RNA_subprocess( original_fasta, lastnt_length, fasta_file, fastq_file ):
RNAMapping( fastq_file, original_fasta, "%s.exact" % fasta_file )
# blastn-short search (20)
#cmd = "blastn -num_threads %d -db %s.-%d -query %s -task blastn-short -outfmt 6 -max_target_seqs 5 -evalue %s > %s.blastn" % ( NUM_THREADS, original_fasta, lastnt_length, fasta_file, BLASTN_E_VALUE, fasta_file )
#run_cmd( cmd )
def BLASTN_RNA( fasta1, fasta2, exact_file1, blastn_file2, output_file ):
'''
parse blastn output and make ppi
'''
ppi_cnt_dic = {}
read1_dic = {}
read2_dic = {}
'''
['M03766:33:000000000-AT3T3:1:1101:21081:6509', '113', 'NMI', '880', '36', '54S41M', 'IGF2', '452', '0', 'ATTTTGATCATATGACTGCTCTGTTTCATTTTTTTCAATAAACCCTTTACAATTAAGTGTTCTCTAGGTCAACCTCACATAGCATACTTTGAAGA', 'HHFFHHHHFDHHHGHHHHHHHHEHHHHHGGHFHGBHGHHGHHEG4GHHHHHHHHHHHHHFFFG3GEBGBFHHHHGHHHHHGHHFHFHGHHGHHHH', 'AS:i:82', 'XN:i:0', 'XM:i:0', 'XO:i:0', 'XG:i:0', 'NM:i:0', 'MD:Z:41', 'YS:i:174', 'YT:Z:DP']
['M03766:33:000000000-AT3T3:1:1101:21081:6509', '177', 'IGF2', '452', '36', '5S87M', 'NMI', '880', '0', 'TCTCTAGGCCAAACGTCACCGTCCCCTGATTGCTCTACCCACCCAAGACCCCGCCCACGGGGGCGCCCCCCCAGAGATGGCCAGCAATCGGA', '/BBB/BBBFFFEFFFEEFAFB?FFFFBFFFFFFFEB;@-DFFFFFFD@FFFFEFFFFAFFFFDAFGCGGHGGHHHHHHHFFHHHGFEGFHHH', 'AS:i:174', 'XN:i:0', 'XM:i:0', 'XO:i:0', 'XG:i:0', 'NM:i:0', 'MD:Z:87', 'YS:i:82', 'YT:Z:DP']^C
'''
#if len( sys.argv ) < 2:
# print "python SAM.py ../data/roth2016_control_set_plus_control.fa output/2016-12-22_MiSeq/Friedrich/17543_S1.sam"
# sys.exit(0)
total_cnt = 0
RNA_fa = fasta.read_fasta( fasta1 )
fa = fasta.read_fasta( fasta2 )
filepath1 = exact_file1 # sys.argv[3] # read1 = bait
filepath2 = blastn_file2 #sys.argv[4] # read2 = prey
# Read1 = bait = RNA part
PREV_QNAME = ""
f = open( filepath1 )
read_cnt = 0
for line in f.xreadlines():
#if read_cnt % 10000 == 0: print read_cnt
read_cnt += 1
## READ 1
# @M03766:53:000000000-B63MG:1:1101:13982:1738 cask_p142 98.969 97 1 0 1 97 99 3 3.06e-50 184
[ QNAME, TARGET, PERCENT, LENGTH, MISMATCH, GAPOPEN, QSTART, QEND, SSTART, SEND, EVALUE, BITSCORE ] = line[:-1].split("\t")
if QNAME == PREV_QNAME: continue
if int( SEND ) > int ( SSTART ): continue # don't allow both direction for RNA
read1_dic[ QNAME ] = TARGET
PREV_QNAME = QNAME
f.close()
# Read2 = prey = Protein part
PREV_QNAME = ""
read_cnt = 0
f = open( filepath2 )
for line in f.xreadlines():
#if read_cnt % 10000 == 0: print read_cnt
read_cnt += 1
## READ 2
# @M03766:53:000000000-B63MG:1:1101:13982:1738 cask_p142 98.969 97 1 0 1 97 99 3 3.06e-50 184
[ QNAME, TARGET, PERCENT, LENGTH, MISMATCH, GAPOPEN, QSTART, QEND, SSTART, SEND, EVALUE, BITSCORE ] = line[:-1].split("\t")
if QNAME == PREV_QNAME: continue
if int( SEND ) > int ( SSTART ): continue
read2_dic[ QNAME ] = TARGET
PREV_QNAME = QNAME
f.close()
for QNAME in read1_dic:
TARGET2 = read2_dic.get( QNAME, "" )
if TARGET2 == "": continue
TARGET1 = read1_dic[ QNAME ]
ppi_cnt_dic[ ( TARGET1, TARGET2 ) ] = ppi_cnt_dic.get( ( TARGET1, TARGET2 ), 0 ) + 1
total_cnt += 1
f.close()
RNA_id_list = RNA_fa.keys()
RNA_id_list.sort()
id_list = fa.keys()
id_list.sort()
fo = open( output_file, "w" )
print >> fo, "# This file is generated by BLASTN_RNA"
print >> fo, "DB(Read 1) \ AD(Read 2)\t"+"\t".join( id_list )
for RNA_id1 in RNA_id_list:
output = RNA_id1
for id2 in id_list:
cnt = ppi_cnt_dic.get( (RNA_id1, id2 ), 0 )
output += "\t%d" % cnt
print >> fo, output
fo.close()
def RNAMapping( fastq_file, fasta_file, output_file ):
'''
Input format: fastq.gz, fa file
Output format: Blast matrix format
python library_checker.py RNA_mapping ../2017-10-19_MiSeq/S1_W_R1.fastq.gz ../data/KRP_set.fa > ./output/2017-10-19_MiSeq/Blastn/S1_W_R1.exact
python library_checker.py RNA_mapping ../2017-10-19_MiSeq/S2_WH_R1.fastq.gz ../data/KRP_set.fa > ./output/2017-10-19_MiSeq/Blastn/S2_WH_R1.exact
'''
#===============================
# Read Fasta file (RNA) and make motifs for exact search
#===============================
# GCAGGCATGCAAGCTGCC
#ggcagcttgcatgcctg
#gctagaactagtggatccc
# GCAGGCATGCAAGCTGCC TCCTCGTTCATGGGGAATAATTGCAATCCCCGATCCCCAT GGGATCCACTAGTTCTAGCCGG "
prefix = "GCAGGCATGCAAGCTGCCCGGG"
motifs = {}
fa = fasta.read_fasta( fasta_file ) # RNA fasta file
for id in fa:
seq = fa[id].upper()
rc_seq = ReverseComplement( seq )
motif = prefix + rc_seq[:50] # using only 50 nt for all cases (This can be chaged)
motifs[ motif ] = id
#===============================
# Read Fastq file (RNA, Read1) and Searching id with exact matching the pattern (motif)
#===============================
fo = open( output_file, "w" )
cnt_dic = {}
read1 = ReadSequenceFile( fastq_file )
for line in read1.stdout.xreadlines():
id = line.split()[0]
seq = read1.stdout.next()[:-1]
read1.stdout.next()
read1.stdout.next()
for motif in motifs:
if seq[:len(motif)] == motif:
print >> fo, "%s\t%s\t100.00\t%d\t0\t0\t1\t50\t50\t1\t1e-10\t100.0" % ( id, motifs[motif], len(motif) )
# @M03766:67:000000000-BGPDM:1:1101:14936:1730 1NYB_A:B 100.00 24 0 0 5 28 24 1 9e-10 48.1
# @M03766:67:000000000-BGPDM:1:1101:16561:1732 2PJP_A:B 100.00 23 0 0 5 27 23 1 3e-09 46.1
cnt_dic[ id ] = cnt_dic.get( id, 0 ) + 1
break
fo.close()
read1.stdout.close()
read1.kill()
def run_Y3H( args ):
if args.fasta2 == None: # temporarily allow to have one RNA+Protein combined fasta
args.fasta2 = args.fasta1
try:
args.lastnt1 = int( args.lastnt1 )
args.lastnt2 = int( args.lastnt2 )
except:
exit(0)
# Blastn check
#if CheckBlastnDB( args.fasta1, args.lastnt1 ) == False:
# GenerateLastNts( args.fasta1, args.lastnt1 )
# MakeBlastnDB( args.fasta1, args.lastnt1 )
if CheckBlastnDB( args.fasta2, args.lastnt2 ) == False:
GenerateLastNts( args.fasta2, args.lastnt2 )
MakeBlastnDB( args.fasta2, args.lastnt2 )
[ dirname1, fq1 ] = os.path.split( args.fastq1 )
[ dirname2, fq2 ] = os.path.split( args.fastq2 )
if args.output == None:
args.output = dirname1
#if os.path.exists( args.output ) == None:
# sys.exit(0)
# #args.output = dirname1
if fq1[-3:] == ".gz":
fq1 = fq1[:-3]
if fq2[-3:] == ".gz":
fq2 = fq2[:-3]
# make output folder
if os.path.exists( args.output ) == False:
os.makedirs( args.output )
temp_dir = os.path.join( args.output, "tmp" )
if os.path.exists( temp_dir ) == False:
os.makedirs( temp_dir )
fq1 = os.path.join( temp_dir, fq1 )
fq2 = os.path.join( temp_dir, fq2 )
fa1 = fq1 + ".fa"
fa2 = fq2 + ".fa"
# remove 5' end adaptor sequences in both fastq file; remove reads if any of read does not have an adaptor sequence ( Temporarily now using fastq files in Friedrich folder which are already trimmed )
# -m 15 : Discard trimmed reads that are shorter than LENGTH.
# --discard-untrimmed : Discard reads that do not contain the adapter.
#
# bait --CGCTGCAGGTCGACGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT
# prey GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT
# *** ** **** ****************************************
#
#cmd = "cutadapt -g CGCTGCAGGTCGACGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -G GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -o output/$1/$6/$2.fastq -p output/$1/$6/$3.fastq -m 15 --discard-untrimmed ./fastq/$2.fastq.gz ./fastq/$3.fastq.gz"
print "[ Removing adaptor sequences ]", fq1, fq2
#cmd = "cutadapt --quiet -g CGCTGCAGGTCGACGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -G GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -o %s -p %s -m 15 --discard-untrimmed %s %s" % ( fq1, fq2, args.fastq1, args.fastq2 )
cmd = "cutadapt --quiet -g GCAGGCATGCAAGCTGCC -G GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -o %s -p %s -m 15 --discard-untrimmed %s %s" % ( fq1, fq2, args.fastq1, args.fastq2 )
run_cmd( cmd )
# multi-threading
print "[ Mapping sequences into reference baits and preys sequences ]", fq1, fq2
th1 = threading.Thread(target=align_RNA_subprocess, args = ( args.fasta1, args.lastnt1, fa1, args.fastq1 ) )
th1.start()
th2 = threading.Thread(target=align_subprocess, args = ( args.fasta2, args.lastnt2, fa2, fq2 ) )
th2.start()
th1.join()
th2.join()
# python parse blastn output and make ppi map
# currently stringent case, no restriction for position cases
# maybe ignoring orientation could be added in the future
print "[ Making a pair matrix ]", os.path.join( args.output, args.name )
exact_file1 = "%s.exact" % fa1
blastn_file2 = "%s.blastn" % fa2
output_file = os.path.join( args.output, args.name ) # "%s/%s.exact.rpi.txt" % ( args.output, args.name )
BLASTN_RNA( args.fasta1, args.fasta2, exact_file1, blastn_file2, output_file )
#print cmd
#run_cmd( cmd )
#if args.relaxed == True:
# # no restriction for aligned position
# cmd = "main.py BLASTN_RELAXED %s.-%d %s.exact %s.blastn > %s/%s" % ( args.fasta1, args.lastnt1, fq1, fa2, args.output, args.name )
# run_cmd( cmd )
#else:
# # very stringent case
# #cmd = "main.py BLASTN_NEW %s %s.blastn %s.blastn > %s/%s" % ( args.fasta1, fa1, fa2, args.output, args.name )
# #run_cmd( cmd )
# BLASTN_NEW( args.fasta1, args.lastnt1, "%s.exact" % fq1, "%s.blastn" % fa2, "%s/%s" % ( args.output, args.name ) )
'''
mkdir output/2017-08-24_MiSeq
mkdir output/2017-08-24_MiSeq/Blastn
cutadapt -g GCAGGCATGCAAGCTGCC -G GCAGCTCGAGCTCGATGGATCTTAGTTACTTACCACTTTGTACAAGAAAGCTGGGT -o output/2017-08-24_MiSeq/Blastn/S1_W_R1.fastq -p output/2017-08-24_MiSeq/Blastn/S1_W_R2.fastq -m 15 --discard-untrimmed ../2017-08-24_MiSeq/S1_W_R1.fastq.gz ../2017-08-24_MiSeq/S1_W_R2.fastq.gz
python main.py fastq_to_fasta output/2017-08-24_MiSeq/Blastn/S1_W_R1.fastq > output/2017-08-24_MiSeq/Blastn/S1_W_R1.fa
python main.py fastq_to_fasta output/2017-08-24_MiSeq/Blastn/S1_W_R2.fastq > output/2017-08-24_MiSeq/Blastn/S1_W_R2.fa
blastn -db ../data/1st_set.-100.fa -query output/2017-08-24_MiSeq/Blastn/S1_W_R1.fa -task blastn-short -outfmt 6 -max_target_seqs 20 -evalue 1e-8 > output/2017-08-24_MiSeq/Blastn/S1_W_R1.blastn
blastn -db ../data/1st_set.-100.fa -query output/2017-08-24_MiSeq/Blastn/S1_W_R2.fa -task blastn-short -outfmt 6 -max_target_seqs 20 -evalue 1e-8 > output/2017-08-24_MiSeq/Blastn/S1_W_R2.blastn
python main.py BLASTN ../data/1st_set.-100.fa output/2017-08-24_MiSeq/Blastn/S1_W_R1.blastn output/2017-08-24_MiSeq/Blastn/S1_W_R2.blastn > output/2017-08-24_MiSeq/Blastn/S1_W.rpi.txt
python library_checker.py RNA_mapping ../2017-08-24_MiSeq/S1_W_R1.fastq.gz ../data/2nd_RNA_set.fa > ./output/2017-08-24_MiSeq/Blastn/S1_W_R1.exact
python main.py BLASTN ../data/1st_set.-100.fa output/2017-08-24_MiSeq/Blastn/S1_W_R1.exact output/2017-08-24_MiSeq/Blastn/S1_W_R2.blastn > output/2017-08-24_MiSeq/Blastn/S1_W.exact.rpi.txt
'''
def run( args ):
if args.program == "Y2H":
run_Y2H( args )
if args.program == "Y3H":
run_Y3H( args )
| 43.559353
| 401
| 0.623147
| 3,285
| 24,219
| 4.478539
| 0.130289
| 0.017129
| 0.011963
| 0.01944
| 0.788744
| 0.750816
| 0.728725
| 0.709353
| 0.693855
| 0.67428
| 0
| 0.081646
| 0.235352
| 24,219
| 555
| 402
| 43.637838
| 0.712781
| 0.276353
| 0
| 0.588629
| 0
| 0.016722
| 0.096238
| 0.018809
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013378
| null | null | 0.070234
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fdc764c9262b9309391b35a77297cf75c73b6b04
| 10
|
py
|
Python
|
t.py
|
guninara/test
|
610bb861ce904731aff8e51df39ee87dab10a693
|
[
"MIT"
] | null | null | null |
t.py
|
guninara/test
|
610bb861ce904731aff8e51df39ee87dab10a693
|
[
"MIT"
] | null | null | null |
t.py
|
guninara/test
|
610bb861ce904731aff8e51df39ee87dab10a693
|
[
"MIT"
] | null | null | null |
print("A")
| 10
| 10
| 0.6
| 2
| 10
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 1
| 10
| 10
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fdd0a2914fcffa437ac9c8412837f6437b1328d0
| 82
|
py
|
Python
|
wire/__init__.py
|
cheetahbyte/wire
|
f34a41804770968313a8a16a1d9794b7a0abc7dc
|
[
"MIT"
] | 5
|
2021-09-01T17:34:03.000Z
|
2021-12-26T09:41:18.000Z
|
wire/__init__.py
|
cheetahbyte/wire
|
f34a41804770968313a8a16a1d9794b7a0abc7dc
|
[
"MIT"
] | 4
|
2021-12-26T14:53:09.000Z
|
2022-01-02T12:50:43.000Z
|
wire/__init__.py
|
cheetahbyte/wire
|
f34a41804770968313a8a16a1d9794b7a0abc7dc
|
[
"MIT"
] | null | null | null |
__version__ = '0.3.1.1'
from wire.app import Wire
from wire.request import Request
| 27.333333
| 32
| 0.780488
| 15
| 82
| 4
| 0.6
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.121951
| 82
| 3
| 32
| 27.333333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fdf64934d035a3f28df76c7f79657d781ba140e2
| 39
|
py
|
Python
|
python_scrappers/scrapper_1.py
|
TareqMonwer/BookList
|
66b9d0881e90ed2b2303e87a72e7a3a09bc703c2
|
[
"MIT"
] | null | null | null |
python_scrappers/scrapper_1.py
|
TareqMonwer/BookList
|
66b9d0881e90ed2b2303e87a72e7a3a09bc703c2
|
[
"MIT"
] | null | null | null |
python_scrappers/scrapper_1.py
|
TareqMonwer/BookList
|
66b9d0881e90ed2b2303e87a72e7a3a09bc703c2
|
[
"MIT"
] | null | null | null |
# python3.8
# A program to scrap books
| 13
| 26
| 0.717949
| 7
| 39
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.205128
| 39
| 2
| 27
| 19.5
| 0.83871
| 0.871795
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e3499b7010c51183fef32f47ce4872e0531b3081
| 25
|
py
|
Python
|
hello_world.py
|
nantthet739/profiles-rest-api-new
|
c449108360ed7a39f8ffcefa38b32bca57d732c2
|
[
"MIT"
] | null | null | null |
hello_world.py
|
nantthet739/profiles-rest-api-new
|
c449108360ed7a39f8ffcefa38b32bca57d732c2
|
[
"MIT"
] | null | null | null |
hello_world.py
|
nantthet739/profiles-rest-api-new
|
c449108360ed7a39f8ffcefa38b32bca57d732c2
|
[
"MIT"
] | null | null | null |
print('Hello world 111')
| 12.5
| 24
| 0.72
| 4
| 25
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 0.12
| 25
| 1
| 25
| 25
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e349b6571945ec6bfa56324d61a3791669b65a6d
| 94
|
py
|
Python
|
app/settings.py
|
4linux/HandsOn-Flask-App
|
f54482a46b41d6a156b232c6cb98399de559e063
|
[
"MIT"
] | 6
|
2021-06-29T20:37:13.000Z
|
2021-06-30T11:16:05.000Z
|
app/settings.py
|
4linux/HandsOn-Flask-App
|
f54482a46b41d6a156b232c6cb98399de559e063
|
[
"MIT"
] | null | null | null |
app/settings.py
|
4linux/HandsOn-Flask-App
|
f54482a46b41d6a156b232c6cb98399de559e063
|
[
"MIT"
] | 9
|
2021-06-29T20:36:53.000Z
|
2022-03-17T12:27:18.000Z
|
import os
MONGO_URI = os.environ.get("MONGO_URI")
SECRET_KEY = os.environ.get("SECRET_KEY")
| 15.666667
| 41
| 0.744681
| 16
| 94
| 4.125
| 0.5
| 0.242424
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 94
| 5
| 42
| 18.8
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.202128
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4735cb8288f5444ca3b56cf5725365a9bd0c705a
| 246
|
py
|
Python
|
test/nose_integration_tests/dummy_first_level_pkg_one_tests/dummy_second_level_pkg_A_tests/dummy_test_b.py
|
Points/teamcity-python
|
58dd0e9d83fea92f9212f2cadaaaeaefd1deb68e
|
[
"Apache-2.0"
] | 1
|
2018-08-14T16:29:57.000Z
|
2018-08-14T16:29:57.000Z
|
test/nose_integration_tests/dummy_first_level_pkg_one_tests/dummy_second_level_pkg_A_tests/dummy_test_b.py
|
denisenkom/teamcity-python
|
25d0008596cfc7b895f367d36de5dd95b6722b4c
|
[
"Apache-2.0"
] | null | null | null |
test/nose_integration_tests/dummy_first_level_pkg_one_tests/dummy_second_level_pkg_A_tests/dummy_test_b.py
|
denisenkom/teamcity-python
|
25d0008596cfc7b895f367d36de5dd95b6722b4c
|
[
"Apache-2.0"
] | null | null | null |
from nose.plugins.attrib import attr
@attr('demo_smoke', 'smoke', 'known_good')
def test_dummy_known_good_with_assertion_pass():
"""
test_dummy_known_good_with_assertion_pass
I'd like to buy the world a coke!
"""
assert True
| 24.6
| 48
| 0.727642
| 38
| 246
| 4.342105
| 0.710526
| 0.163636
| 0.169697
| 0.218182
| 0.424242
| 0.424242
| 0.424242
| 0
| 0
| 0
| 0
| 0
| 0.174797
| 246
| 10
| 49
| 24.6
| 0.812808
| 0.304878
| 0
| 0
| 0
| 0
| 0.164474
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4750ffb501af07361e962595a5fb94c2dc2694b6
| 165
|
py
|
Python
|
src/geneflow/data/migrations/20180727-01.py
|
CDCgov/geneflow
|
a2ef128efa0c60fa96b622be757e0695fc9a68d4
|
[
"Apache-2.0"
] | 7
|
2019-04-11T03:50:51.000Z
|
2020-03-27T15:59:04.000Z
|
src/geneflow/data/migrations/20180727-01.py
|
CDCgov/geneflow
|
a2ef128efa0c60fa96b622be757e0695fc9a68d4
|
[
"Apache-2.0"
] | 1
|
2019-05-06T14:18:42.000Z
|
2019-05-08T22:06:12.000Z
|
src/geneflow/data/migrations/20180727-01.py
|
CDCgov/geneflow
|
a2ef128efa0c60fa96b622be757e0695fc9a68d4
|
[
"Apache-2.0"
] | 6
|
2019-04-10T20:25:27.000Z
|
2021-12-16T15:59:59.000Z
|
# migration 20180727-01
#
# Add notifications field to job table.
#
from yoyo import step
step("ALTER TABLE job ADD COLUMN notifications TEXT NOT NULL DEFAULT ''")
| 20.625
| 73
| 0.757576
| 24
| 165
| 5.208333
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072993
| 0.169697
| 165
| 7
| 74
| 23.571429
| 0.839416
| 0.357576
| 0
| 0
| 0
| 0
| 0.643564
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4768391a7f21da5188bf7a452d527344c3300b34
| 35
|
py
|
Python
|
launch.py
|
fybmain/TiebaAutoSign
|
8390d9e69efff1eb851dd8c7a0f4ff1ce450be53
|
[
"MIT"
] | 2
|
2019-01-31T01:38:20.000Z
|
2019-01-31T01:57:56.000Z
|
launch.py
|
fybmain/TiebaAutoSign
|
8390d9e69efff1eb851dd8c7a0f4ff1ce450be53
|
[
"MIT"
] | null | null | null |
launch.py
|
fybmain/TiebaAutoSign
|
8390d9e69efff1eb851dd8c7a0f4ff1ce450be53
|
[
"MIT"
] | null | null | null |
from src import *
do_auto_task()
| 7
| 17
| 0.714286
| 6
| 35
| 3.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 35
| 4
| 18
| 8.75
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
47b0834498c4827e9805ef63a2fa8e0b661f3e4e
| 165
|
py
|
Python
|
src/compath_resources/__init__.py
|
ComPath/compath-resources
|
e8da7b511c2b558b8fd0bf38888b512008ac1ba3
|
[
"MIT"
] | 3
|
2020-11-24T18:43:59.000Z
|
2021-06-04T20:52:10.000Z
|
src/compath_resources/__init__.py
|
ComPath/compath-resources
|
e8da7b511c2b558b8fd0bf38888b512008ac1ba3
|
[
"MIT"
] | 13
|
2020-03-28T13:36:32.000Z
|
2021-01-19T15:00:07.000Z
|
src/compath_resources/__init__.py
|
ComPath/resources
|
e8da7b511c2b558b8fd0bf38888b512008ac1ba3
|
[
"MIT"
] | 1
|
2021-12-01T09:49:59.000Z
|
2021-12-01T09:49:59.000Z
|
# -*- coding: utf-8 -*-
"""Utilities for ComPath's resources."""
from .exporters import get_bel, get_rdf # noqa: F401
from .resources import get_df # noqa: F401
| 23.571429
| 53
| 0.684848
| 24
| 165
| 4.583333
| 0.708333
| 0.163636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051095
| 0.169697
| 165
| 6
| 54
| 27.5
| 0.751825
| 0.478788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
47c0b2826749260d43d14c91add0fd44a5f40826
| 90
|
py
|
Python
|
dataset_builder/exceptions/exceptions.py
|
statisticsnorway/microdata-dataset-builder
|
c58fe5804f146290e1d523536729f1a5b1ac2c73
|
[
"Apache-2.0"
] | null | null | null |
dataset_builder/exceptions/exceptions.py
|
statisticsnorway/microdata-dataset-builder
|
c58fe5804f146290e1d523536729f1a5b1ac2c73
|
[
"Apache-2.0"
] | 3
|
2022-01-18T15:21:49.000Z
|
2022-03-07T13:49:03.000Z
|
dataset_builder/exceptions/exceptions.py
|
statisticsnorway/microdata-dataset-builder
|
c58fe5804f146290e1d523536729f1a5b1ac2c73
|
[
"Apache-2.0"
] | null | null | null |
class BuilderStepError(Exception):
pass
class HttpAdapterError(Exception):
pass
| 12.857143
| 34
| 0.755556
| 8
| 90
| 8.5
| 0.625
| 0.382353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 90
| 6
| 35
| 15
| 0.918919
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
47ccb925d78b38e647828c16c5a53e2bca0e7c99
| 21
|
py
|
Python
|
tests/tests.py
|
ryderdamen/objectdaddy
|
57f9fdca2b0fd35114474ead1faf736ace092562
|
[
"MIT"
] | 2
|
2021-07-06T21:24:25.000Z
|
2021-07-11T23:14:46.000Z
|
tests/tests.py
|
ryderdamen/objectdaddy
|
57f9fdca2b0fd35114474ead1faf736ace092562
|
[
"MIT"
] | null | null | null |
tests/tests.py
|
ryderdamen/objectdaddy
|
57f9fdca2b0fd35114474ead1faf736ace092562
|
[
"MIT"
] | null | null | null |
# lol there are none
| 10.5
| 20
| 0.714286
| 4
| 21
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 21
| 1
| 21
| 21
| 0.9375
| 0.857143
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
47d0a46d230375d40c2fc2dbd498bc6f2297ecf6
| 88
|
py
|
Python
|
utils/nn/modules/__init__.py
|
roshanr11/Research-DCST
|
225461e6ffd7ca5a48b9688946eb36b2d98f358e
|
[
"MIT"
] | 5
|
2020-04-29T08:48:53.000Z
|
2020-12-23T10:11:39.000Z
|
utils/nn/modules/__init__.py
|
roshanr11/Research-DCST
|
225461e6ffd7ca5a48b9688946eb36b2d98f358e
|
[
"MIT"
] | 2
|
2020-01-11T08:31:06.000Z
|
2021-06-09T12:41:32.000Z
|
utils/nn/modules/__init__.py
|
roshanr11/Research-DCST
|
225461e6ffd7ca5a48b9688946eb36b2d98f358e
|
[
"MIT"
] | 5
|
2019-11-20T02:49:03.000Z
|
2020-09-17T15:27:34.000Z
|
from .crf import *
from .sparse import *
from .attention import *
from .linear import *
| 17.6
| 24
| 0.727273
| 12
| 88
| 5.333333
| 0.5
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 4
| 25
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
47fe6e2dd0d399fa7a2a3cd24581d614384c310c
| 1,571
|
py
|
Python
|
tests/test_ec2.py
|
fdmsantos/troposphere
|
c372dc300e579989cb44849c5f4dd6cb97f90d78
|
[
"BSD-2-Clause"
] | 1
|
2019-05-27T21:22:51.000Z
|
2019-05-27T21:22:51.000Z
|
tests/test_ec2.py
|
fdmsantos/troposphere
|
c372dc300e579989cb44849c5f4dd6cb97f90d78
|
[
"BSD-2-Clause"
] | 1
|
2021-06-25T15:20:46.000Z
|
2021-06-25T15:20:46.000Z
|
tests/test_ec2.py
|
fdmsantos/troposphere
|
c372dc300e579989cb44849c5f4dd6cb97f90d78
|
[
"BSD-2-Clause"
] | 2
|
2018-05-05T18:40:43.000Z
|
2018-09-19T04:17:05.000Z
|
import unittest
import troposphere.ec2 as ec2
class TestEC2(unittest.TestCase):
def test_securitygroupegress(self):
egress = ec2.SecurityGroupEgress(
'egress',
ToPort='80',
FromPort='80',
IpProtocol="tcp",
GroupId="id",
CidrIp="0.0.0.0/0",
)
egress.to_dict()
egress = ec2.SecurityGroupEgress(
'egress',
ToPort='80',
FromPort='80',
IpProtocol="tcp",
GroupId="id",
DestinationPrefixListId='id',
)
egress.to_dict()
egress = ec2.SecurityGroupEgress(
'egress',
ToPort='80',
FromPort='80',
IpProtocol="tcp",
GroupId="id",
DestinationSecurityGroupId='id',
)
egress.to_dict()
egress = ec2.SecurityGroupEgress(
'egress',
ToPort='80',
FromPort='80',
IpProtocol="tcp",
GroupId="id",
CidrIp="0.0.0.0/0",
DestinationPrefixListId='id',
)
with self.assertRaises(ValueError):
egress.to_dict()
egress = ec2.SecurityGroupEgress(
'egress',
ToPort='80',
FromPort='80',
IpProtocol="tcp",
GroupId="id",
CidrIp="0.0.0.0/0",
DestinationPrefixListId='id',
DestinationSecurityGroupId='id',
)
with self.assertRaises(ValueError):
egress.to_dict()
| 24.936508
| 44
| 0.481859
| 126
| 1,571
| 5.960317
| 0.230159
| 0.031957
| 0.035952
| 0.031957
| 0.770972
| 0.770972
| 0.770972
| 0.770972
| 0.672437
| 0.672437
| 0
| 0.045407
| 0.397199
| 1,571
| 62
| 45
| 25.33871
| 0.747624
| 0
| 0
| 0.833333
| 0
| 0
| 0.071292
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.018519
| false
| 0
| 0.037037
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9a0173065c684bfbe726ed11fe7a7b1eebdd1cfd
| 245
|
py
|
Python
|
core/sdfrenderer/__init__.py
|
hyunynim/DIST-Renderer
|
4717ee8cea77f4f413b61f380a893c6800d0bde5
|
[
"MIT"
] | 176
|
2020-06-11T19:16:33.000Z
|
2022-03-29T01:38:28.000Z
|
core/sdfrenderer/__init__.py
|
hyunynim/DIST-Renderer
|
4717ee8cea77f4f413b61f380a893c6800d0bde5
|
[
"MIT"
] | 6
|
2020-06-26T05:26:56.000Z
|
2021-11-10T07:31:21.000Z
|
core/sdfrenderer/__init__.py
|
hyunynim/DIST-Renderer
|
4717ee8cea77f4f413b61f380a893c6800d0bde5
|
[
"MIT"
] | 23
|
2020-06-11T21:43:03.000Z
|
2022-02-18T00:16:16.000Z
|
import os, sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from renderer import SDFRenderer
from renderer_rgb import SDFRenderer_color
from renderer_warp import SDFRenderer_warp
from renderer_deepsdf import SDFRenderer_deepsdf
| 30.625
| 59
| 0.857143
| 35
| 245
| 5.714286
| 0.428571
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089796
| 245
| 7
| 60
| 35
| 0.896861
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7beb4f1b0eabd08673d0d2c931a63947bf8de3a2
| 34
|
py
|
Python
|
pystocket/__init__.py
|
EvilTeliportist/pystocket
|
d63e998109b26f1c31ba4232f2e9343f0bc8c6fb
|
[
"MIT"
] | null | null | null |
pystocket/__init__.py
|
EvilTeliportist/pystocket
|
d63e998109b26f1c31ba4232f2e9343f0bc8c6fb
|
[
"MIT"
] | null | null | null |
pystocket/__init__.py
|
EvilTeliportist/pystocket
|
d63e998109b26f1c31ba4232f2e9343f0bc8c6fb
|
[
"MIT"
] | null | null | null |
from pystocket.pystocket import *
| 17
| 33
| 0.823529
| 4
| 34
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7bfaadb388651e5073785fd639390c28f78aca7f
| 5,889
|
py
|
Python
|
log.py
|
Iangecko/arbys
|
5b2e4b4e511d5721d6a1cc447b8fbf9be43fa909
|
[
"MIT"
] | 9
|
2018-10-01T05:57:17.000Z
|
2019-11-10T22:46:26.000Z
|
log.py
|
Iangecko/arbys
|
5b2e4b4e511d5721d6a1cc447b8fbf9be43fa909
|
[
"MIT"
] | 7
|
2018-09-25T14:30:24.000Z
|
2019-07-26T02:30:15.000Z
|
log.py
|
Iangecko/arbys
|
5b2e4b4e511d5721d6a1cc447b8fbf9be43fa909
|
[
"MIT"
] | 8
|
2018-09-25T21:06:26.000Z
|
2019-07-24T17:15:56.000Z
|
# Custom logging module for the bot because I don't like Python's built in logging module.
# Why I don't like it is not for a comment line in a file, though, so ask me if you want to know why.
import traceback
import time
import sys
import config
import os
from datetime import datetime
logfile = "logs/" + time.strftime("%Y-%m-") + config.logfile
logfile_encoding = config.logfile_encoding
try:
os.mkdir("logs/")
except FileExistsError:
pass
def now():
return datetime.utcnow().__str__()
def debug(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n" + traceback.format_exc()
if config.file_loglevel >= 6:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[{timestamp}] [D] {message}\n")
except:
pass
if config.terminal_loglevel >= 6:
sys.stdout.write(f"[{timestamp}] [D] {message}\n")
sys.stdout.flush()
except Exception as e:
# There are some cases that we could end up having a problem writing out logged information.
# It's once happened to me where PyCharm crashed but disconnected my running bots, causing the stdio pipes to
# break and any stdio failed, returning an exception back to Discord for every single message the bot received.
# This is obviously *kinda* bad, but in reality the only cause for a broken pipe or a unwritable logfile is
# because of user error, so if the user broke the log system it's their fault. We tried, but you screwed up and
# we have no way to tell you, so we're simply going to ignore the log messages. You're on your own without them
# because it's your fault. Sorry.
pass
def msg(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n"+traceback.format_exc()
if config.file_loglevel >= 5:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[{timestamp}] [M] {message}\n")
except:
pass
if config.terminal_loglevel >= 5:
sys.stdout.write(f"[{timestamp}] [M] {message}\n")
sys.stdout.flush()
except Exception as e:
# See comment in debug() function here
pass
def info(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n" + traceback.format_exc()
if config.file_loglevel >= 4:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[{timestamp}] [I] {message}\n")
except:
pass
if config.terminal_loglevel >= 4:
sys.stdout.write(f"[{timestamp}] [I] {message}\n")
sys.stdout.flush()
except Exception as e:
# See comment in debug() function here
pass
def warn(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n" + traceback.format_exc()
if config.file_loglevel >= 3:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[{timestamp}] [W] {message}\n")
except:
pass
if config.terminal_loglevel >= 3:
if config.exc_to_stderr:
sys.stderr.write(f"[{timestamp}] [W] {message}\n")
sys.stderr.flush()
else:
sys.stdout.write(f"[{timestamp}] [W] {message}\n")
sys.stdout.flush()
except Exception as e:
# See comment in debug() function here
pass
warning = warn
def error(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n" + traceback.format_exc()
if config.file_loglevel >= 2:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[[{timestamp}]] [E] {message}\n")
except:
pass
if config.terminal_loglevel >= 2:
if config.exc_to_stderr:
sys.stderr.write(f"[{timestamp}] [E] {message}\n")
sys.stderr.flush()
else:
sys.stdout.write(f"[[{timestamp}]] [E] {message}\n")
sys.stdout.flush()
except Exception as e:
# See comment in debug() function here
pass
def critical(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n" + traceback.format_exc()
if config.file_loglevel >= 1:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[{timestamp}] [C] {message}\n")
except:
pass
if config.terminal_loglevel >= 1:
if config.exc_to_stderr:
sys.stderr.write(f"[{timestamp}] [C] {message}\n")
sys.stderr.flush()
else:
sys.stdout.write(f"[{timestamp}] [C] {message}\n")
sys.stdout.flush()
except Exception as e:
# See comment in debug() function here
pass
crit = critical
def fatal(message: str, ts: datetime = None, include_exception: bool = False) -> None:
try:
timestamp = now() if ts is None else ts.__str__()
if include_exception and (sys.exc_info()[2] is not None):
message += "\n"+traceback.format_exc()
if config.file_loglevel >= 0:
try:
with open(logfile, "a", encoding=logfile_encoding) as lf:
lf.write(f"[{timestamp}] [F] {message}\n")
except:
pass
if config.terminal_loglevel >= 0:
if config.exc_to_stderr:
sys.stderr.write(f"[{timestamp}] [F] {message}\n")
sys.stderr.flush()
else:
sys.stdout.write(f"[{timestamp}] [F] {message}\n")
sys.stdout.flush()
except Exception as e:
# See comment in debug() function here
pass
| 32.005435
| 113
| 0.674478
| 897
| 5,889
| 4.326644
| 0.19621
| 0.051533
| 0.06957
| 0.036073
| 0.747745
| 0.740788
| 0.726617
| 0.724556
| 0.648802
| 0.638495
| 0
| 0.004403
| 0.190015
| 5,889
| 183
| 114
| 32.180328
| 0.809224
| 0.182883
| 0
| 0.631944
| 0
| 0
| 0.117389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.104167
| 0.041667
| 0.006944
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7bfdec8c6c8621b998decffda1c965fe9890792a
| 16,192
|
py
|
Python
|
src/utils/utils_load_data.py
|
disiji/fc_differentiable
|
65d6cbb3e0e41862744366c045fa06b6dc2d99a5
|
[
"MIT"
] | 2
|
2019-11-23T03:14:33.000Z
|
2022-01-18T20:00:27.000Z
|
src/utils/utils_load_data.py
|
disiji/fc_differentiable
|
65d6cbb3e0e41862744366c045fa06b6dc2d99a5
|
[
"MIT"
] | null | null | null |
src/utils/utils_load_data.py
|
disiji/fc_differentiable
|
65d6cbb3e0e41862744366c045fa06b6dc2d99a5
|
[
"MIT"
] | 2
|
2019-04-09T00:18:44.000Z
|
2022-01-01T10:43:25.000Z
|
from __future__ import division
import os
import pickle
import numpy as np
import pandas as pd
import fcsparser
def load_cll_data_1p(diagnosis_filename, cytometry_dir, features):
X, y = [], []
diagnosis_df = pd.read_csv(diagnosis_filename, sep='\t')
for filename in sorted(os.listdir(cytometry_dir)):
sample_id = int(filename.split('_')[3])
# filter out PB1 samples that we do not have diagnosis information about
if sample_id in diagnosis_df['SampleID'].values:
X.append(pd.read_csv(os.path.join(cytometry_dir, filename), sep='\t')[features].values)
y.append(diagnosis_df.loc[diagnosis_df['SampleID'] == sample_id]['Diagnosis'].values[0])
d = {'no': 0, 'yes': 1}
y = [d[_] for _ in y]
return X, y
def load_cll_data_1p_fcs(diagnosis_filename, cytometry_dir, features):
X, y = [], []
diagnosis_df = pd.read_csv(diagnosis_filename, sep='\t')
for filename in sorted(os.listdir(cytometry_dir)):
if os.path.isdir(os.path.join(cytometry_dir, filename)):
continue
# filter out PB1 samples that we do not have diagnosis information about
file_path = os.path.join(cytometry_dir, filename)
if filename in diagnosis_df['FileName'].values:
meta_data, file_df = fcsparser.parse(file_path, meta_data_only=False, reformat_meta=True)
print(list(file_df))
X.append(file_df[features].values)
y.append(diagnosis_df.loc[diagnosis_df['FileName'] == filename]['Diagnosis'].values[0])
d = {'no': 0, 'yes': 1}
y = [d[_] for _ in y]
print(y)
return X, y
def load_cll_data_2p(diagnosis_filename, cytometry_dir_pb1, cytometry_dir_pb2, features_pb1, features_pb2):
X, y = [], []
diagnosis_df = pd.read_csv(diagnosis_filename, sep='\t')
sample_id_list_pb2 = [int(filename.split('_')[3]) for filename in sorted(os.listdir(cytometry_dir_pb2))]
id2filename_pb2 = dict(zip(sample_id_list_pb2, sorted(os.listdir(cytometry_dir_pb2))))
for filename_pb1 in sorted(os.listdir(cytometry_dir_pb1)):
sample_id = int(filename_pb1.split('_')[3])
if sample_id in diagnosis_df['SampleID'].values and sample_id in sample_id_list_pb2:
filename_pb2 = id2filename_pb2[sample_id]
x_pb1 = pd.read_csv(os.path.join(cytometry_dir_pb1, filename_pb1), sep='\t')[features_pb1].values
x_pb2 = pd.read_csv(os.path.join(cytometry_dir_pb2, filename_pb2), sep='\t')[features_pb2].values
X.append([x_pb1, x_pb2])
y.append(diagnosis_df.loc[diagnosis_df['SampleID'] == sample_id]['Diagnosis'].values[0])
d = {'no': 0, 'yes': 1}
y = [d[_] for _ in y]
return X, y
def load_cll_data_2p_fcs(diagnosis_filename, cytometry_dir_pb1, cytometry_dir_pb2, features_pb1, features_pb2):
X, y = [], []
diagnosis_df = pd.read_csv(diagnosis_filename, sep='\t')
#sample_id_list_pb2 = [int(filename.split('.')[0]) for filename in sorted(os.listdir(cytometry_dir_pb2))]
sample_id_list_pb2 = [filename for filename in sorted(os.listdir(cytometry_dir_pb2))]
id2filename_pb2 = dict(zip(sample_id_list_pb2, sorted(os.listdir(cytometry_dir_pb2))))
for filename_pb1 in sorted(os.listdir(cytometry_dir_pb1)):
#sample_id = int(filename_pb1.split('.')[0])
sample_id = filename_pb1
if os.path.isdir(os.path.join(cytometry_dir_pb1, filename_pb1)):
continue
if sample_id in diagnosis_df['FileName'].values and sample_id in sample_id_list_pb2:
filename_pb2 = id2filename_pb2[sample_id]
filepath_pb1 = os.path.join(cytometry_dir_pb1, filename_pb1)
filepath_pb2 = os.path.join(cytometry_dir_pb2, filename_pb2)
meta_data, file_df_pb1 = fcsparser.parse(filepath_pb1, meta_data_only=False, reformat_meta=True)
meta_data, file_df_pb2 = fcsparser.parse(filepath_pb2, meta_data_only=False, reformat_meta=True)
print(list(file_df_pb2))
x_pb1 = file_df_pb1[features_pb1].values
x_pb2 = file_df_pb2[features_pb2].values
X.append([x_pb1, x_pb2])
y.append(diagnosis_df.loc[diagnosis_df['FileName'] == sample_id]['Diagnosis'].values[0])
d = {'no': 0, 'yes': 1}
y = [d[_] for _ in y]
return X, y
def get_reference_tree(file):
"""
load gating hierarchy from a pickle file to a nested list
:param file: filename of the pickle file
:return:
"""
file = open(file, 'rb')
return pickle.load(file)
def filter_slope(data, dim1, dim2, x1, x2, y1, y2):
"""
return subset of datapoints in data that fall into the V slope formed by [(0,0),(x1, y1)] and [(0,0),(x2, y2)]
:param data: np.array (n_datapoints, n_features)
:param dim1: int
:param dim2: int
:param x1: float
:param x2: float
:param y1: float
:param y2: float
:return: (n_filtered_datapoints, n_features)
"""
if x1 < 0 or y1 < 0 or x2 < 0 or y2 < 0:
raise ValueError("x1 or x2 or y1 or y2 is negative.")
if dim1 > data.shape[1] or dim1 < 0 or dim2 > data.shape[1] or dim2 < 0:
raise ValueError("dim1 and dim2 should be an int between 0 and data.shape[0].")
if y1 / x1 < y2 / x2:
raise ValueError("Slope of [(0,0), (x1, y1)] should be greater than the slope of [(0,0), (x2, y2)].")
gradient = data[:, dim2] / data[:, dim1]
idx = (gradient < y1 / x1) & (gradient > y2 / x2)
return data[idx]
def filter_rectangle(data, dim1, dim2, x1, x2, y1, y2, return_idx=False):
"""
return subset of datapoints in data that fall into the rectangle formed by (x1, y1),(x2, y2),(x1, y2) and (x2, y1)
:param data: np.array (n_datapoints, n_features)
:param dim1: int
:param dim2: int
:param x1: float
:param x2: float
:param y1: float
:param y2: float
:return: (n_filtered_datapoints, n_features)
"""
if x1 < 0 or y1 < 0 or x2 < 0 or y2 < 0:
raise ValueError("x1 or x2 or y1 or y2 is negative.")
if dim1 > data.shape[1] or dim1 < 0 or dim2 > data.shape[1] or dim2 < 0:
raise ValueError("dim1 and dim2 should be an int between 0 and data.shape[0].")
if x1 > x2 or y1 > y2:
raise ValueError("x2 should be greater than x1, y2 should be greater than y1.")
idx = (data[:, dim1] > x1) & (data[:, dim1] < x2) & (data[:, dim2] > y1) & (data[:, dim2] < y2)
if return_idx:
return idx
return data[idx]
def filter_cll_4d_pb1(x_list):
"""
:param x_list:
:return:
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 2, 3, 102, 921, 2048, 3891) for x in filtered_x_list]
print('After second gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 0, 4, 921, 2150, 102, 921) for x in filtered_x_list]
print('After third gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list_4d = [x[:, 5:9] for x in filtered_x_list]
return filtered_x_list_4d
def filter_cll_8d_pb1(x_list):
"""
:param x_list:
:return:
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list_8d = [x[:, [0, 2, 3, 4, 5, 6, 7, 8]] for x in filtered_x_list]
return filtered_x_list_8d
def filter_cll_4d_pb2(x_list):
"""
:param x_list: list of numpy arrays per sample
:return: list of filtered numpy arrays per sample
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 2, 3, 102, 921, 2048, 3891) for x in filtered_x_list]
print('After second gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 0, 4, 921, 2150, 102, 921) for x in filtered_x_list]
print('After third gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 5, 6, 1638, 3891, 2150, 3891) for x in filtered_x_list]
print('After fourth gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list_4d = [x_list[:, 7:11] for x_list in filtered_x_list]
return filtered_x_list_4d
def filter_cll_10d_pb2(x_list):
"""
:param x_list: list of numpy arrays per sample
:return: list of filtered numpy arrays per sample
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list_10d = [x_list[:, [0, 2, 3, 4, 5, 6, 7, 8, 9, 10]] for x_list in filtered_x_list]
return filtered_x_list_10d
def filter_cll_leaf_pb1(x_list):
"""
:param x_list:
:return:
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 2, 3, 102, 921, 2048, 3891) for x in filtered_x_list]
print('After second gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 0, 4, 921, 2150, 102, 921) for x in filtered_x_list]
print('After third gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 5, 6, 1638, 3891, 2150, 3891) for x in filtered_x_list]
print('After fourth gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 7, 8, 0, 1228, 0, 1843) for x in filtered_x_list]
print('After fifth gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list_leaf = [x for x in filtered_x_list]
return filtered_x_list_leaf
def filter_cll_leaf_pb2(x_list):
"""
:param x_list: list of numpy arrays per sample
:return: list of filtered numpy arrays per sample
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 2, 3, 102, 921, 2048, 3891) for x in filtered_x_list]
print('After second gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 0, 4, 921, 2150, 102, 921) for x in filtered_x_list]
print('After third gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 5, 6, 1638, 3891, 2150, 3891) for x in filtered_x_list]
print('After fourth gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list = [filter_rectangle(x, 7, 8, 0, 1740, 614, 2252) for x in filtered_x_list]
print('After fifth gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
filtered_x_list_leaf = [x for x in filtered_x_list]
return filtered_x_list_leaf
def filter_cll(x_list):
"""
:param x_list:
:return:
"""
idx = 3
filtered_x_list = [filter_slope(x, 0, 1, 2048, 4096, 2048, 2560) for x in x_list]
print('After first slope gate %d remain in sample %s' % (filtered_x_list[idx].shape[0], idx))
return filtered_x_list
def normalize_x_list(x_list, offset=None, scale=None):
"""
x_list = normalized_x_list * scale + offset;
normalized_x_list = (x_list - offset) / scale
:param x_list: a list of numpy array, each of shape (, n_cell_features)
:param offset: a numpy array of shape (n_cell_features, )
:param scale: a numpy array of shape (n_cell_featuers, )
:return:
"""
n_features = x_list[0].shape[1]
if offset is None or scale is None:
x_min = np.min(np.array([x.min(axis=0) if x.shape[0] > 0
else [np.nan] * n_features for x in x_list]), axis=0)
x_max = np.max(np.array([x.max(axis=0) if x.shape[0] > 0
else [np.nan] * n_features for x in x_list]), axis=0)
offset = x_min
scale = x_max - x_min
normalized_x_list = [(x - offset) / scale for x in x_list]
return normalized_x_list, offset, scale
def normalize_x_list_multiple_panels(x_list, offset=None, scale=None):
"""
:param x_list: a list of a list of numpy arrays, each numpy array is the fc measurements of one panel for one sample
:param offset: a list of numpy arrays of shape (n_cell_features, ). The list is of length n_panels
:param scale: a list of numpy arrays of shape (n_cell_features, ). The list is of length n_panels
:return:
"""
n_panels = len(x_list[0])
x_list = list(map(list, zip(*x_list)))
if offset is None:
offset = [None] * n_panels
scale = [None] * n_panels
normalized_x_list = [None] * n_panels
for panel_idx in range(n_panels):
normalized_x_list[panel_idx], offset[panel_idx], scale[panel_idx] = normalize_x_list(x_list[panel_idx])
p1_mean_feats_in_both = np.mean(np.concatenate(normalized_x_list[0]), axis=0)[0:6]
p2_mean_feats_in_both = np.mean(np.concatenate(normalized_x_list[1]), axis=0)[0:6]
shift = p1_mean_feats_in_both - p2_mean_feats_in_both
for i, x in enumerate(normalized_x_list[1]):
x[:, 0:6] = x[:, 0:6] + shift
normalized_x_list[1][i][:, 0:6] = x[:, 0:6]
normalized_x_list = list(map(list, zip(*normalized_x_list)))
return normalized_x_list, offset, scale
def normalize_nested_tree_both_panels(nested_tree, offset, scale, feature2id):
"""
normalized_x = (x - offset) / scale
:param nested_tree:
:param offset: a list of numpy arrays of shape (n_cell_features, ) of length n_panels
:param scale: a list of numpy array of shape (n_cell_featuers, ) of length n_panels
:param feature2id: a list of dictionaries that maps feature names to column idx of length n_panels
:return:
"""
if nested_tree == []:
return []
# normalize the root node
gate = nested_tree[0]
panel = gate[2]
panel_id = 1 if panel == 'p2' else 0
dim1, dim2 = feature2id[panel_id][gate[0][0]], feature2id[panel_id][gate[1][0]]
gate[0][1] = (gate[0][1] - offset[panel_id][dim1]) / scale[panel_id][dim1]
gate[0][2] = (gate[0][2] - offset[panel_id][dim1]) / scale[panel_id][dim1]
gate[1][1] = (gate[1][1] - offset[panel_id][dim2]) / scale[panel_id][dim2]
gate[1][2] = (gate[1][2] - offset[panel_id][dim2]) / scale[panel_id][dim2]
return [gate, [normalize_nested_tree_both_panels(child, offset, scale, feature2id)
for child in nested_tree[1]]]
def normalize_nested_tree(nested_tree, offset, scale, feature2id):
"""
normalized_x = (x - offset) / scale
:param nested_tree:
:param offset: a numpy array of shape (n_cell_features, )
:param scale: a numpy array of shape (n_cell_featuers, )
:param feature2id: a dictionary that maps feature names to column idx
:return:
"""
if nested_tree == []:
return []
# normalize the root node
gate = nested_tree[0]
dim1, dim2 = feature2id[gate[0][0]], feature2id[gate[1][0]]
gate[0][1] = (gate[0][1] - offset[dim1]) / scale[dim1]
gate[0][2] = (gate[0][2] - offset[dim1]) / scale[dim1]
gate[1][1] = (gate[1][1] - offset[dim2]) / scale[dim2]
gate[1][2] = (gate[1][2] - offset[dim2]) / scale[dim2]
return [gate, [normalize_nested_tree(child, offset, scale, feature2id)
for child in nested_tree[1]]]
if __name__ == '__main__':
x_list = [[np.array([[1, 2], [3, 4]]), np.array([[5, 6], [7, 8]])],
[np.array([[1, 2], [3, 4]]), np.array([[5, 6], [7, 8]])]]
print(x_list)
print(normalize_x_list_multiple_panels(x_list))
| 43.880759
| 120
| 0.655077
| 2,638
| 16,192
| 3.797953
| 0.078848
| 0.065376
| 0.093423
| 0.037928
| 0.829324
| 0.786206
| 0.759657
| 0.74199
| 0.690788
| 0.664038
| 0
| 0.058297
| 0.213933
| 16,192
| 368
| 121
| 44
| 0.728865
| 0.163476
| 0
| 0.486364
| 0
| 0.004545
| 0.098996
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081818
| false
| 0
| 0.027273
| 0
| 0.204545
| 0.113636
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d052532ac1d7bcc98c212c36a47a7d010211c655
| 344
|
py
|
Python
|
logical/converter/qiskit/backends/basejob.py
|
malcolmregan/GateCircuit-to-AnnealerEmbedding
|
33a1a4ea2ebd707ade0677e0df468d5120a861db
|
[
"Apache-2.0"
] | null | null | null |
logical/converter/qiskit/backends/basejob.py
|
malcolmregan/GateCircuit-to-AnnealerEmbedding
|
33a1a4ea2ebd707ade0677e0df468d5120a861db
|
[
"Apache-2.0"
] | 1
|
2019-04-09T02:22:38.000Z
|
2019-04-09T02:22:38.000Z
|
logical/converter/qiskit/backends/basejob.py
|
malcolmregan/GateCircuit-to-AnnealerEmbedding
|
33a1a4ea2ebd707ade0677e0df468d5120a861db
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC, abstractmethod
class BaseJob(ABC):
pass
def __init__(self, backend, job_id):
pass
def job_id(self):
pass
def backend(self):
pass
def submit(self):
pass
def result(self):
pass
def cancel(self):
pass
def status(self):
pass
| 10.75
| 40
| 0.534884
| 41
| 344
| 4.341463
| 0.414634
| 0.275281
| 0.308989
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.386628
| 344
| 31
| 41
| 11.096774
| 0.843602
| 0
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.411765
| false
| 0.470588
| 0.058824
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
d0be96d14554eb0902b9485926cb2c9f66fffcc2
| 173
|
py
|
Python
|
accounts/admin.py
|
lawalkeyd/Law_Firm
|
02bf019929987e6a1b35b34bc60e5848a4a9ce35
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
lawalkeyd/Law_Firm
|
02bf019929987e6a1b35b34bc60e5848a4a9ce35
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
lawalkeyd/Law_Firm
|
02bf019929987e6a1b35b34bc60e5848a4a9ce35
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import UserAccount, UserProfile
# Register your models here.
admin.site.register(UserAccount)
admin.site.register(UserProfile)
| 28.833333
| 44
| 0.83237
| 22
| 173
| 6.545455
| 0.545455
| 0.125
| 0.236111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092486
| 173
| 6
| 45
| 28.833333
| 0.917197
| 0.150289
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d0c7b32134af7652bb72d13c50b0ea9aabb0006c
| 22,283
|
py
|
Python
|
grapefruit_test.py
|
xav/Grapefruit
|
b3d88375be727a3a1ec5839fbc462e0e8e0836e4
|
[
"Apache-2.0"
] | 71
|
2015-01-08T10:22:34.000Z
|
2022-03-14T00:57:58.000Z
|
grapefruit_test.py
|
cyanogen/Grapefruit
|
b3d88375be727a3a1ec5839fbc462e0e8e0836e4
|
[
"Apache-2.0"
] | 11
|
2015-02-03T15:55:26.000Z
|
2020-10-30T15:26:57.000Z
|
grapefruit_test.py
|
cyanogen/Grapefruit
|
b3d88375be727a3a1ec5839fbc462e0e8e0836e4
|
[
"Apache-2.0"
] | 17
|
2015-01-31T19:01:05.000Z
|
2022-03-14T00:30:00.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-#
# Copyright (c) 2008-2016, Xavier Basty
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the grapefruit module."""
import grapefruit
from nose.tools import *
def assert_items_almost_equal(first, second, places=3, msg=None, delta=None):
"""assert_almost_equal for iterables"""
assert_equal(len(first), len(second))
for f, s in zip(first, second):
assert_almost_equal(f, s, places, msg, delta)
class TestConversion():
"""Test the static color conversion methods."""
def test_rgb_to_hsl_tuple(self):
assert_items_almost_equal((30.0, 1.0, 0.5), grapefruit.rgb_to_hsl((1, 0.5, 0)))
def test_rgb_to_hsl(self):
assert_items_almost_equal((30.0, 1.0, 0.5), grapefruit.rgb_to_hsl(1, 0.5, 0))
assert_items_almost_equal((20.0, 1.0, 0.625), grapefruit.rgb_to_hsl(1, 0.5, 0.25)) #ff8040
assert_items_almost_equal((40.0, 1.0, 0.375), grapefruit.rgb_to_hsl(0.75, 0.5, 0)) #bf8000
def test_hsl_to_rgb_tuple(self):
assert_items_almost_equal((1, 0.5, 0), grapefruit.hsl_to_rgb(((30.0, 1.0, 0.5))))
def test_hsl_to_rgb(self):
assert_items_almost_equal((1, 0.5, 0), grapefruit.hsl_to_rgb(30.0, 1.0, 0.5))
assert_items_almost_equal((1, 0.5, 0.25), grapefruit.hsl_to_rgb(20.0, 1.0, 0.625))
assert_items_almost_equal((0.75, 0.5, 0), grapefruit.hsl_to_rgb(40.0, 1.0, 0.375))
def test_rgb_to_hsv_tuple(self):
assert_equal((30.0, 1.0, 1.0), grapefruit.rgb_to_hsv((1, 0.5, 0)))
def test_rgb_to_hsv(self):
assert_equal((30.0, 1.0, 1.0), grapefruit.rgb_to_hsv(1, 0.5, 0))
assert_equal((1, 0.5, 0), grapefruit.hsv_to_rgb(30.0, 1.0, 1.0))
def test_rgb_to_yiq_tuple(self):
assert_items_almost_equal((0.5923, 0.4589, -0.05), grapefruit.rgb_to_yiq((1, 0.5, 0)))
def test_rgb_to_yiq(self):
assert_items_almost_equal((0.5923, 0.4589, -0.05), grapefruit.rgb_to_yiq(1, 0.5, 0))
assert_items_almost_equal((1, 0.5, 0), grapefruit.yiq_to_rgb(0.5923, 0.4589, -0.05))
def test_rgb_to_yuv_tuple(self):
assert_items_almost_equal((0.5925, -0.2916, 0.3575), grapefruit.rgb_to_yuv((1, 0.5, 0)))
def test_rgb_to_yuv(self):
assert_items_almost_equal((0.5925, -0.2916, 0.3575), grapefruit.rgb_to_yuv(1, 0.5, 0))
assert_items_almost_equal((1, 0.5, 0), grapefruit.yuv_to_rgb(0.5925, -0.2916, 0.3575))
def test_rgb_to_xyz_tuple(self):
assert_items_almost_equal((0.4890, 0.3657, 0.04485), grapefruit.rgb_to_xyz((1, 0.5, 0)))
def test_rgb_to_xyz(self):
assert_items_almost_equal((0.4890, 0.3657, 0.04485), grapefruit.rgb_to_xyz(1, 0.5, 0))
assert_items_almost_equal((1, 0.5, 0), grapefruit.xyz_to_rgb(0.488941, 0.365682, 0.0448137))
def test_xyz_tolab_tuple(self):
assert_items_almost_equal((66.9518, 0.4308, 0.7397), grapefruit.xyz_to_lab((0.488941, 0.365682, 0.0448137)))
def test_xyz_tolab(self):
assert_items_almost_equal((66.9518, 0.4308, 0.7397), grapefruit.xyz_to_lab(0.488941, 0.365682, 0.0448137))
assert_items_almost_equal((66.9518, 0.4117, 0.6728), grapefruit.xyz_to_lab(0.488941, 0.365682, 0.0448137, grapefruit.WHITE_REFERENCE["std_D50"]))
def test_xyz_to_lab_tuple(self):
assert_items_almost_equal((0.4890, 0.3657, 0.0449), grapefruit.lab_to_xyz((66.9518, 0.4308, 0.7397)))
def test_xyz_to_lab(self):
assert_items_almost_equal((0.4890, 0.3657, 0.0449), grapefruit.lab_to_xyz(66.9518, 0.4308, 0.7397))
assert_items_almost_equal((0.4890, 0.3657, 0.0449), grapefruit.lab_to_xyz(66.9518, 0.4117, 0.6728, grapefruit.WHITE_REFERENCE["std_D50"]))
def test_cmy_to_cmyk_tuple(self):
assert_items_almost_equal((1, 0.32, 0, 0.5), grapefruit.cmy_to_cmyk((1.0, 0.66, 0.5)))
def test_cmy_to_cmyk(self):
assert_items_almost_equal((1, 0.32, 0, 0.5), grapefruit.cmy_to_cmyk(1.0, 0.66, 0.5))
def test_cmyk_to_cmy_tuple(self):
assert_items_almost_equal((1.0, 0.66, 0.5), grapefruit.cmyk_to_cmy((1, 0.32, 0, 0.5)))
def test_cmyk_to_cmy(self):
assert_items_almost_equal((1.0, 0.66, 0.5), grapefruit.cmyk_to_cmy(1, 0.32, 0, 0.5))
def test_rgb_to_cmy_tuple(self):
assert_equal((0, 0.5, 1), grapefruit.rgb_to_cmy((1, 0.5, 0)))
def test_rgb_to_cmy(self):
assert_equal((0, 0.5, 1), grapefruit.rgb_to_cmy(1, 0.5, 0))
assert_equal((1, 0.5, 0), grapefruit.cmy_to_rgb(0, 0.5, 1))
def test_rgb_to_html(self):
assert_equal("#ff8000", grapefruit.rgb_to_html(1, 0.5, 0))
assert_items_almost_equal((1.0, 0.5020, 0.0), grapefruit.html_to_rgb("#ff8000"))
assert_items_almost_equal((1.0, 0.5020, 0.0), grapefruit.html_to_rgb("ff8000"))
assert_items_almost_equal((1.0, 0.4, 0.0), grapefruit.html_to_rgb("#f60"))
assert_items_almost_equal((1.0, 0.4, 0.0), grapefruit.html_to_rgb("f60"))
assert_items_almost_equal((1.000000, 0.980392, 0.803922), grapefruit.html_to_rgb("lemonchiffon"))
def test_rgb_to_pil_tuple(self):
assert_almost_equal(0x0080ff, grapefruit.rgb_to_pil((1, 0.5, 0)))
def test_rgb_to_pil(self):
assert_almost_equal(0x0080ff, grapefruit.rgb_to_pil(1, 0.5, 0))
assert_items_almost_equal((1.0, 0.5020, 0), grapefruit.pil_to_rgb(0x0080ff))
def test_websafe_component(self):
assert_equal(0.2, grapefruit._websafe_component(0.2))
assert_equal(0.2, grapefruit._websafe_component(0.25))
assert_equal(0.4, grapefruit._websafe_component(0.3))
assert_equal(0.4, grapefruit._websafe_component(0.25, True))
assert_equal(0.2, grapefruit._websafe_component(0.2, True))
assert_equal(0.2, grapefruit._websafe_component(0.3, True))
def test_rgb_to_to_websafe_tuple(self):
assert_equal((1.0, 0.6, 0.0), grapefruit.rgb_to_websafe((1, 0.55, 0.0)))
def test_rgb_to_to_websafe(self):
assert_equal((1.0, 0.6, 0.0), grapefruit.rgb_to_websafe(1, 0.55, 0.0))
assert_equal((1.0, 0.4, 0.0), grapefruit.rgb_to_websafe(1, 0.55, 0.0, True))
assert_equal((1.0, 0.4, 0.0), grapefruit.rgb_to_websafe(1, 0.5, 0.0, True))
def test_rgb_to_greyscale_tuple(self):
assert_equal((0.6, 0.6, 0.6), grapefruit.rgb_to_greyscale((1, 0.8, 0)))
def test_rgb_to_greyscale(self):
assert_equal((0.6, 0.6, 0.6), grapefruit.rgb_to_greyscale(1, 0.8, 0))
class TestNewFrom():
def test_from_rgb(self):
c = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(c, (1.0, 0.5, 0.0, 1.0))
c = grapefruit.Color.from_rgb(1.0, 0.5, 0.0, 0.5)
assert_equal(c, (1.0, 0.5, 0.0, 0.5))
def test_from_hsl(self):
c = grapefruit.Color.from_hsl(30, 1, 0.5)
assert_equal(c, (1.0, 0.5, 0.0, 1.0))
c = grapefruit.Color.from_hsl(30, 1, 0.5, 0.5)
assert_equal(c, (1.0, 0.5, 0.0, 0.5))
def test_from_hsv(self):
c = grapefruit.Color.from_hsv(30, 1, 1)
assert_equal(c, (1.0, 0.5, 0.0, 1.0))
c = grapefruit.Color.from_hsv(30, 1, 1, 0.5)
assert_equal(c, (1.0, 0.5, 0.0, 0.5))
def test_from_yiq(self):
c = grapefruit.Color.from_yiq(0.5923, 0.4589, -0.0499818)
assert_items_almost_equal(c, (1, 0.5, 0, 1))
c = grapefruit.Color.from_yiq(0.5923, 0.4589,-0.05, 0.5)
assert_items_almost_equal(c, (1, 0.5, 0, 0.5))
def test_from_yuv(self):
c = grapefruit.Color.from_yuv(0.5925, -0.2916, 0.3575)
assert_items_almost_equal(c, (1, 0.5, 0, 1))
c = grapefruit.Color.from_yuv(0.5925, -0.2916, 0.3575, 0.5)
assert_items_almost_equal(c, (1, 0.5, 0, 0.5))
def test_from_xyz(self):
c = grapefruit.Color.from_xyz(0.488941, 0.365682, 0.0448137)
assert_items_almost_equal(c, (1, 0.5, 0, 1))
c = grapefruit.Color.from_xyz(0.488941, 0.365682, 0.0448137, 0.5)
assert_items_almost_equal(c, (1, 0.5, 0, 0.5))
def test_from_lab(self):
c = grapefruit.Color.from_lab(66.9518, 0.43084, 0.739692)
assert_items_almost_equal(c, (1, 0.5, 0, 1))
c = grapefruit.Color.from_lab(66.9518, 0.43084, 0.739692, wref=grapefruit.WHITE_REFERENCE["std_D50"])
assert_items_almost_equal(c, (1.0123754, 0.492012, -0.143110, 1))
c = grapefruit.Color.from_lab(66.9518, 0.43084, 0.739692, 0.5)
assert_items_almost_equal(c, (1, 0.5, 0, 0.5))
c = grapefruit.Color.from_lab(66.9518, 0.43084, 0.739692, 0.5, grapefruit.WHITE_REFERENCE["std_D50"])
assert_items_almost_equal(c, (1.0123754, 0.492012, -0.143110, 0.5))
def test_from_labInteger(self):
# Allow specifying lightness as an integer.
lab = (60, 0.3, 0.3)
c = grapefruit.Color.from_lab(*lab)
assert_items_almost_equal(c.lab, lab)
assert_true(c.is_legal)
def test_from_cmy(self):
c = grapefruit.Color.from_cmy(0, 0.5, 1)
assert_equal(c, (1, 0.5, 0, 1.0))
c = grapefruit.Color.from_cmy(0, 0.5, 1, 0.5)
assert_equal(c, (1, 0.5, 0, 0.5))
def test_from_cmyk(self):
c = grapefruit.Color.from_cmyk(1, 0.32, 0, 0.5)
assert_items_almost_equal(c, (0, 0.34, 0.5, 1))
c = grapefruit.Color.from_cmyk(1, 0.32, 0, 0.5, 0.5)
assert_items_almost_equal(c, (0, 0.34, 0.5, 0.5))
def test_from_html(self):
c = grapefruit.Color.from_html("#ff8000")
assert_items_almost_equal(c, (1, 0.5020, 0, 1))
c = grapefruit.Color.from_html("ff8000")
assert_items_almost_equal(c, (1, 0.5020, 0, 1))
c = grapefruit.Color.from_html("#f60")
assert_items_almost_equal(c, (1, 0.4, 0, 1))
c = grapefruit.Color.from_html("f60")
assert_items_almost_equal(c, (1, 0.4, 0, 1))
c = grapefruit.Color.from_html("lemonchiffon")
assert_items_almost_equal(c, (1, 0.9804, 0.8039, 1))
c = grapefruit.Color.from_html("#ff8000", 0.5)
assert_items_almost_equal(c, (1, 0.5020, 0, 0.5))
def test_from_pil(self):
c = grapefruit.Color.from_pil(0x0080ff)
assert_items_almost_equal(c, (1, 0.5020, 0, 1))
c = grapefruit.Color.from_pil(0x0080ff, 0.5)
assert_items_almost_equal(c, (1, 0.5020, 0, 0.5))
class TestColorProperties():
def test_get_alpha(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.alpha, 1.0)
def test_set_alpha(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.alpha = 0.5
assert_equal(col.alpha, 0.5)
def test_get_white_ref(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.white_ref, grapefruit.WHITE_REFERENCE['std_D65'])
def test_set_white_ref(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.white_ref = grapefruit.WHITE_REFERENCE['std_A']
assert_equal(grapefruit.WHITE_REFERENCE['std_A'], col.white_ref)
def test_get_rgb(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.rgb, (1, 0.5, 0))
def test_set_rgb(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.rgb = (0.1, 0.2, 0.3)
assert_equal(col.rgb, (0.1, 0.2, 0.3))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.1, 0.2, 0.3))
def test_get_rgba(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.rgba, (1, 0.5, 0, 1))
def test_set_rgba(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.rgba = (0.1, 0.2, 0.3, 0.5)
assert_equal(col.rgb, (0.1, 0.2, 0.3))
assert_equal(col.alpha, 0.5)
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.1, 0.2, 0.3))
def test_get_red(self):
col = grapefruit.Color.from_rgb(0.1, 0.2, 0.3)
assert_equal(col.red, 0.1)
def test_set_red(self):
col = grapefruit.Color.from_rgb(0.1, 0.2, 0.3)
col.red = 0.9
assert_equal(col.rgb, (0.9, 0.2, 0.3))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.9, 0.2, 0.3))
def test_get_green(self):
col = grapefruit.Color.from_rgb(0.1, 0.2, 0.3)
assert_equal(col.green, 0.2)
def test_set_green(self):
col = grapefruit.Color.from_rgb(0.1, 0.2, 0.3)
col.green = 0.9
assert_equal(col.rgb, (0.1, 0.9, 0.3))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.1, 0.9, 0.3))
def test_get_blue(self):
col = grapefruit.Color.from_rgb(0.1, 0.2, 0.3)
assert_equal(col.blue, 0.3)
def test_set_blue(self):
col = grapefruit.Color.from_rgb(0.1, 0.2, 0.3)
col.blue = 0.9
assert_equal(col.rgb, (0.1, 0.2, 0.9))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.1, 0.2, 0.9))
def test_get_hsl(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.hsl, (30, 1, 0.5))
def test_set_hsl(self):
col = grapefruit.Color.from_hsl(30, 1, 0.5)
col.hsl = (40, 0.5, 0.7)
assert_equal(col.hsl, (40, 0.5, 0.7))
assert_equal(col.rgb, grapefruit.hsl_to_rgb(40, 0.5, 0.7))
def test_get_hsl_hue(self):
col = grapefruit.Color.from_hsl(30, 1, 0.5)
assert_equal(col.hsl_hue, 30)
def test_set_hsl_hue(self):
col = grapefruit.Color.from_hsl(30, 1, 0.5)
col.hsl_hue = 40
assert_equal(col.hsl, (40, 1, 0.5))
assert_equal(col.rgb, grapefruit.hsl_to_rgb(40, 1, 0.5))
def test_get_hsv(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.hsv, (30, 1, 1))
def test_set_hsv(self):
col = grapefruit.Color.from_hsv(30, 1, 1)
col.hsv = (40, 0.5, 0.2)
assert_items_almost_equal(col.hsv, (40, 0.5, 0.2))
assert_equal(col.rgb, grapefruit.hsv_to_rgb(40, 0.5, 0.2))
assert_almost_equal(col.hsl, grapefruit.rgb_to_hsl(grapefruit.hsv_to_rgb(40, 0.5, 0.2)))
def test_get_yiq(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_items_almost_equal(col.yiq, (0.5923, 0.4589, -0.05))
def test_set_yiq(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.yiq = (0.1, 0.2, 0.3)
assert_items_almost_equal(col.yiq, (0.1, 0.2, 0.3))
assert_equal(col.rgb, grapefruit.yiq_to_rgb(0.1, 0.2, 0.3))
assert_almost_equal(col.hsl, grapefruit.rgb_to_hsl(grapefruit.yiq_to_rgb(0.1, 0.2, 0.3)))
def test_get_yuv(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_items_almost_equal(col.yuv, (0.5925, -0.2916, 0.3575))
def test_set_yuv(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.yuv = (0.1, 0.2, 0.3)
assert_items_almost_equal(col.yuv, (0.1, 0.2, 0.3))
assert_equal(col.rgb, grapefruit.yuv_to_rgb(0.1, 0.2, 0.3))
assert_almost_equal(col.hsl, grapefruit.rgb_to_hsl(grapefruit.yuv_to_rgb(0.1, 0.2, 0.3)))
def test_get_xyz(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_items_almost_equal(col.xyz, (0.4890, 0.3657, 0.04485))
def test_set_xyz(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.xyz = (0.1, 0.2, 0.3)
assert_items_almost_equal(col.xyz, (0.1, 0.2, 0.3))
assert_equal(col.rgb, grapefruit.xyz_to_rgb(0.1, 0.2, 0.3))
assert_almost_equal(col.hsl, grapefruit.rgb_to_hsl(grapefruit.xyz_to_rgb(0.1, 0.2, 0.3)))
def test_get_lab(self):
c = grapefruit.Color.from_rgb(1, 0.5, 0, wref=grapefruit.WHITE_REFERENCE['std_D50'])
assert_items_almost_equal(c.lab, (66.9518, 0.4117, 0.6728))
def test_get_cmy(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.cmy, (0, 0.5, 1))
def test_set_cmy(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.cmy = (0.1, 0.2, 0.3)
assert_items_almost_equal(col.cmy, (0.1, 0.2, 0.3))
assert_equal(col.rgb, grapefruit.cmy_to_rgb(0.1, 0.2, 0.3))
assert_almost_equal(col.hsl, grapefruit.rgb_to_hsl(grapefruit.cmy_to_rgb(0.1, 0.2, 0.3)))
def test_get_cmyk(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.cmyk, (0, 0.5, 1, 0))
def test_set_cmyk(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.cmyk = (0, 0.111, 0.222, 0.46)
assert_items_almost_equal(col.cmyk, (0, 0.111, 0.222, 0.46))
assert_equal(col.rgb, grapefruit.cmy_to_rgb(grapefruit.cmyk_to_cmy(0, 0.111, 0.222, 0.46)))
assert_almost_equal(col.hsl, grapefruit.rgb_to_hsl(grapefruit.cmy_to_rgb(grapefruit.cmyk_to_cmy(0, 0.111, 0.222, 0.46))))
def test_get_ints(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.ints, (255, 128, 0))
def test_set_ints(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.ints = (0, 128, 255)
assert_equal(col.rgb, (0.0, 128.0/255.0, 1.0))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.0, 128.0/255.0, 1.0))
def test_get_html(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.html, '#ff8000')
def test_set_html(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.html = '#0080ff'
assert_equal(col.rgb, (0.0, 128.0/255.0, 1.0))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.0, 128.0/255.0, 1.0))
def test_get_pil(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.pil, 0x0080ff)
def test_set_pil(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
col.pil = 0xff8000
assert_equal(col.rgb, (0.0, 128.0/255.0, 1.0))
assert_equal(col.hsl, grapefruit.rgb_to_hsl(0.0, 128.0/255.0, 1.0))
def test_get_websafe(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.websafe, (1, 0.6, 0))
def test_get_greyscale(self):
col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
assert_equal(col.greyscale, (0.5, 0.5, 0.5))
class TestColorMethods():
@classmethod
def setup_class(self):
self.rgb_col = grapefruit.Color.from_rgb(1.0, 0.5, 0.0)
self.hsl_col = grapefruit.Color.from_hsl(30, 1, 0.5)
self.hsl_col2 = grapefruit.Color.from_hsl(30, 0.5, 0.5)
def test_Init(self):
assert_equal(grapefruit.Color((1.0, 0.5, 0.0)), (1.0, 0.5, 0.0, 1.0))
assert_equal(grapefruit.Color((1.0, 0.5, 0.0), mode='rgb'), (1.0, 0.5, 0.0, 1.0))
assert_equal(grapefruit.Color((30, 1, 0.5), mode='hsl'), (1.0, 0.5, 0.0, 1.0))
assert_raises(ValueError, grapefruit.Color, (30, 1, 0.5), 'hsv')
def test_Eq(self):
assert_equal(self.rgb_col, self.hsl_col)
assert_equal(self.rgb_col, (1.0, 0.5, 0.0, 1.0))
assert_equal(self.rgb_col, [1.0, 0.5, 0.0, 1.0])
assert_equal([1.0, 0.5, 0.0, 1.0], self.rgb_col)
assert_not_equal(self.rgb_col, '(1.0, 0.5, 0.0, 1.0)')
def test_Repr(self):
assert_equal(repr(self.rgb_col), 'Color(1.0, 0.5, 0.0, 1.0)')
assert_equal(repr(self.hsl_col), 'Color(1.0, 0.5, 0.0, 1.0)')
def test_Str(self):
assert_equal(str(self.rgb_col), '(1.0, 0.5, 0.0, 1.0)')
assert_equal(str(self.hsl_col), '(1.0, 0.5, 0.0, 1.0)')
def test_Iter(self):
assert_equal([1, 0.5, 0, 1], list(iter(self.rgb_col)))
def test_with_alpha(self):
assert_equal(self.rgb_col.with_alpha(0.5), (1, 0.5, 0, 0.5))
def test_with_white_ref(self):
assert_equal(self.hsl_col.with_white_ref((0.1, 0.2, 0.3)).white_ref, (0.1, 0.2, 0.3))
def test_with_hue(self):
assert_equal(self.hsl_col.with_hue(60), (1.0, 1.0, 0.0, 1.0))
assert_equal(self.hsl_col.with_hue(60).hsl, (60, 1, 0.5))
def test_with_saturation(self):
assert_equal(self.hsl_col.with_saturation(0.5), (0.75, 0.5, 0.25, 1.0))
assert_equal(self.hsl_col.with_saturation(0.5).hsl, (30, 0.5, 0.5))
def test_with_lightness(self):
assert_equal(self.hsl_col.with_lightness(1), (1.0, 1.0, 1.0, 1.0))
assert_equal(self.hsl_col.with_lightness(1).hsl, (30, 1.0, 1.0))
def test_darker(self):
assert_items_almost_equal(self.hsl_col.darker(0.2), (0.6, 0.3, 0.0, 1.0))
assert_items_almost_equal(self.hsl_col.darker(0.2).hsl, (30, 1, 0.3))
def test_lighter(self):
assert_items_almost_equal(self.hsl_col.lighter(0.2), (1.0, 0.7, 0.4, 1.0))
assert_items_almost_equal(self.hsl_col.lighter(0.2).hsl, (30, 1, 0.7))
def test_saturate(self):
assert_items_almost_equal(self.hsl_col2.saturate(0.25), (0.875, 0.5, 0.125, 1.0))
assert_items_almost_equal(self.hsl_col2.saturate(0.25).hsl, (30, 0.75, 0.5))
def test_desaturate(self):
assert_items_almost_equal(self.hsl_col2.desaturate(0.25), (0.625, 0.5, 0.375, 1.0))
assert_items_almost_equal(self.hsl_col2.desaturate(0.25).hsl, (30, 0.25, 0.5))
def test_websafe_dither(self):
dithered = (
(1.0, 0.6, 0.0, 1.0),
(1.0, 0.4, 0.0, 1.0))
assert_equal(self.rgb_col.websafe_dither(), dithered)
def test_make_gradient(self):
gradient = [
(0.75, 0.25, 0.0, 1.0),
(0.5, 0.5, 0.0, 1.0),
(0.25, 0.75, 0.0, 1.0)]
c1 = grapefruit.Color.from_rgb(1.0, 0.0, 0.0)
c2 = grapefruit.Color.from_rgb(0.0, 1.0, 0.0)
assert_equal(gradient, c1.make_gradient(c2, 3))
def test_complementary_color(self):
assert_equal(self.hsl_col.complementary_color(mode='rgb').hsl, (210, 1, 0.5))
def test_make_monochrome_scheme(self):
monochrome = (
(0.94, 0.8, 0.66, 1.0), # hsl(30, 0.7, 0.8)
(0.6, 0.3, 0.0, 1.0), # hsl(30, 1, 0.3)
(0.88, 0.6, 0.32, 1.0), # hsl(30, 0.7, 0.6)
(1.0, 0.8, 0.6, 1.0)) # hsl(30, 1, 0.8)
scheme = self.rgb_col.make_monochrome_scheme()
for i in range(len(monochrome)):
assert_items_almost_equal(scheme[i], monochrome[i])
def test_make_triadic_scheme(self):
triad = (
(0.0, 1.0, 0.5, 1.0),
(0.5, 0.0, 1.0, 1.0))
assert_equal(self.rgb_col.make_triadic_scheme(mode='rgb'), triad)
def test_make_tetradic_scheme(self):
tetrad = (
(0.5, 1.0, 0.0, 1.0),
(0.0, 0.5, 1.0, 1.0),
(0.5, 0.0, 1.0, 1.0))
assert_equal(self.rgb_col.make_tetradic_scheme(mode='rgb'), tetrad)
def test_make_analogous_scheme(self):
scheme = (
(1.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 0.0, 1.0))
assert_equal(self.rgb_col.make_analogous_scheme(mode='rgb'), scheme)
def test_alpha_blend(self):
c1 = grapefruit.Color.from_rgb(1, 0.5, 0, alpha = 0.2)
c2 = grapefruit.Color.from_rgb(1, 1, 1, alpha = 0.8)
assert_items_almost_equal(c1.alpha_blend(c2), (1, 0.875, 0.75, 0.84))
def test_blend(self):
c1 = grapefruit.Color.from_rgb(1, 0.5, 0, alpha = 0.2)
c2 = grapefruit.Color.from_rgb(1, 1, 1, alpha = 0.6)
assert_equal(c1.blend(c2), (1, 0.75, 0.5, 0.4))
def test_nearest_legal(self):
c = grapefruit.Color.from_rgb(1.1, -0.1, 0.5, alpha=1.1)
assert_false(c.is_legal)
assert_items_almost_equal(c.nearest_legal().rgb, (1.0, 0.0, 0.5))
assert_almost_equal(c.nearest_legal().alpha, 1.0)
| 42.043396
| 149
| 0.668267
| 4,374
| 22,283
| 3.189986
| 0.057156
| 0.037985
| 0.025156
| 0.119831
| 0.81108
| 0.755393
| 0.716262
| 0.678349
| 0.628324
| 0.588117
| 0
| 0.138231
| 0.153301
| 22,283
| 529
| 150
| 42.122873
| 0.601314
| 0.037248
| 0
| 0.173709
| 0
| 0
| 0.012841
| 0
| 0
| 0
| 0.002615
| 0
| 0.448357
| 1
| 0.251174
| false
| 0
| 0.004695
| 0
| 0.265258
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d0d539bd6ae37bdbeae29f0eb5bf874a548b0cbd
| 1,021
|
py
|
Python
|
src/python/led_matrix_client/Util.py
|
janvdb-eu/rpi-rgb-led-matrix-server
|
71d6352a2172ec9dba5472262e6070d391e6a39c
|
[
"Apache-2.0"
] | 7
|
2018-04-30T21:05:23.000Z
|
2022-02-23T05:57:10.000Z
|
src/python/led_matrix_client/Util.py
|
janvdb-eu/rpi-rgb-led-matrix-server
|
71d6352a2172ec9dba5472262e6070d391e6a39c
|
[
"Apache-2.0"
] | 1
|
2019-02-27T09:23:55.000Z
|
2019-02-27T09:55:21.000Z
|
src/python/led_matrix_client/Util.py
|
janvdbergh/rpi-rgb-led-matrix-server
|
71d6352a2172ec9dba5472262e6070d391e6a39c
|
[
"Apache-2.0"
] | null | null | null |
def to8BitUnsigned(value):
return value.to_bytes(length=1, byteorder='little', signed=False)
def to16BitUnsigned(value):
return value.to_bytes(length=2, byteorder='little', signed=False)
def to16BitSigned(value):
return value.to_bytes(length=2, byteorder='little', signed=True)
def to32BitUnsigned(value):
return value.to_bytes(length=4, byteorder='little', signed=False)
def toString(text):
return to16BitUnsigned(len(text)) + bytearray(text, encoding='ascii')
def from16BitUnsigned(buf, offset):
bytes = buf[offset:offset + 2]
return int.from_bytes(bytes, byteorder='little', signed=False)
def from16BitSigned(buf, offset):
bytes = buf[offset:offset + 2]
return int.from_bytes(bytes, byteorder='little', signed=True)
def from32BitUnsigned(buf, offset):
bytes = buf[offset:offset + 4]
return int.from_bytes(bytes, byteorder='little', signed=False)
def fromString(buf, offset):
length = from16BitUnsigned(buf, offset)
bytes = buf[offset + 2:offset + 2 + length]
return bytes.decode('ascii')
| 25.525
| 70
| 0.750245
| 136
| 1,021
| 5.580882
| 0.242647
| 0.106719
| 0.193676
| 0.171278
| 0.703557
| 0.598155
| 0.408432
| 0.408432
| 0.408432
| 0.408432
| 0
| 0.028698
| 0.112635
| 1,021
| 39
| 71
| 26.179487
| 0.809051
| 0
| 0
| 0.173913
| 0
| 0
| 0.05093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.391304
| false
| 0
| 0
| 0.217391
| 0.782609
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d0e14c8c716c0230bfd4c58f0ea5d42233ed71ac
| 155
|
py
|
Python
|
CrawlingTest/requests_library.py
|
KrGil/Python-Practice
|
4842974fc7c36db035796504734c426cd6426bd2
|
[
"MIT"
] | null | null | null |
CrawlingTest/requests_library.py
|
KrGil/Python-Practice
|
4842974fc7c36db035796504734c426cd6426bd2
|
[
"MIT"
] | null | null | null |
CrawlingTest/requests_library.py
|
KrGil/Python-Practice
|
4842974fc7c36db035796504734c426cd6426bd2
|
[
"MIT"
] | null | null | null |
import requests
# response = requests.get("https://www.naver.com")
response = requests.get("https://www.law.go.kr/법령/민법")
html = response.text
print(html)
| 25.833333
| 54
| 0.722581
| 24
| 155
| 4.666667
| 0.666667
| 0.285714
| 0.339286
| 0.428571
| 0.482143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083871
| 155
| 6
| 55
| 25.833333
| 0.788732
| 0.309677
| 0
| 0
| 0
| 0
| 0.254717
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d0f9ac6e3fe1a6e74cf39fcc168eee1735d43163
| 59
|
py
|
Python
|
exercise_01/exercise_code/networks/__init__.py
|
Sihifu/i2dl
|
5beafccc85d29ebc62c37b42674df209b7995c39
|
[
"RSA-MD"
] | null | null | null |
exercise_01/exercise_code/networks/__init__.py
|
Sihifu/i2dl
|
5beafccc85d29ebc62c37b42674df209b7995c39
|
[
"RSA-MD"
] | null | null | null |
exercise_01/exercise_code/networks/__init__.py
|
Sihifu/i2dl
|
5beafccc85d29ebc62c37b42674df209b7995c39
|
[
"RSA-MD"
] | null | null | null |
"""Definition of all datasets"""
from .dummy import Dummy
| 14.75
| 32
| 0.728814
| 8
| 59
| 5.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 59
| 3
| 33
| 19.666667
| 0.86
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ef71caa17ca76c7b471101c4010166e23358e5f0
| 6,458
|
py
|
Python
|
standard-test-functions/StandardTestFunctions.py
|
Clymsw/xHVI-2020
|
548162f517d4b052c1f62224dabba566a1290a2e
|
[
"MIT"
] | null | null | null |
standard-test-functions/StandardTestFunctions.py
|
Clymsw/xHVI-2020
|
548162f517d4b052c1f62224dabba566a1290a2e
|
[
"MIT"
] | null | null | null |
standard-test-functions/StandardTestFunctions.py
|
Clymsw/xHVI-2020
|
548162f517d4b052c1f62224dabba566a1290a2e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 11 11:56:23 2020
@author: Clym Stock-Williams
"""
from deap import benchmarks
import numpy as np
import os
def get_function_definition(function_name: str, num_input_dims: int, num_objectives: int):
domain = []
fitnessfunc = None
d1x_opt = []
if function_name == "ZDT1":
fitnessfunc = lambda ind: benchmarks.zdt1(ind)
num_objectives = 2
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.0, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "ZDT2":
fitnessfunc = lambda ind: benchmarks.zdt2(ind)
num_objectives = 2
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.0, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "ZDT3":
fitnessfunc = lambda ind: benchmarks.zdt3(ind)
num_objectives = 2
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.0, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "ZDT4":
fitnessfunc = lambda ind: benchmarks.zdt4(ind)
num_objectives = 2
domain.append({'name': "x_{}]".format(1), 'type': 'continuous', 'domain': (0.0, 1.0)})
for i in range(1, num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (-5.0, 5.0)})
d1x_opt = np.repeat(0.0, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "ZDT6":
fitnessfunc = lambda ind: benchmarks.zdt6(ind)
num_objectives = 2
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.0, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "KURSAWE":
fitnessfunc = lambda ind: benchmarks.kursawe(ind)
elif function_name == "POLONI":
fitnessfunc = lambda ind: benchmarks.kursawe(ind)
elif function_name == "DTLZ1":
fitnessfunc = lambda ind: benchmarks.dtlz1(ind, num_objectives)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.5, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "DTLZ2":
fitnessfunc = lambda ind: benchmarks.dtlz2(ind, num_objectives)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.5, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "DTLZ3":
fitnessfunc = lambda ind: benchmarks.dtlz3(ind, num_objectives)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.5, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "DTLZ4":
fitnessfunc = lambda ind: benchmarks.dtlz4(ind, num_objectives, 100)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.5, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "DTLZ5":
fitnessfunc = lambda ind: benchmarks.dtlz5(ind, num_objectives)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.5, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "DTLZ6":
fitnessfunc = lambda ind: benchmarks.dtlz6(ind, num_objectives)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.5, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
elif function_name == "DTLZ7":
fitnessfunc = lambda ind: benchmarks.dtlz7(ind, num_objectives)
for i in range(num_input_dims):
domain.append({'name': "x_{}]".format(i+1), 'type': 'continuous', 'domain': (0.0, 1.0)})
d1x_opt = np.repeat(0.0, num_input_dims - 1).reshape([1,-1])
d1x_opt = np.repeat(d1x_opt, 1000, 0)
d1x_opt = np.hstack((np.linspace(0.0, 1.0, 1000).reshape([-1,1]), d1x_opt))
return domain, fitnessfunc, d1x_opt, num_input_dims, num_objectives
def get_M2_pareto_front(function_name: str):
folder_path = 'Pareto_Fronts_M2'
file_name = 'PF_' + function_name + '.csv'
return np.genfromtxt(os.path.join(folder_path, file_name),
delimiter=',', skip_header=0)
| 49.29771
| 101
| 0.587798
| 968
| 6,458
| 3.744835
| 0.094008
| 0.102621
| 0.079448
| 0.026483
| 0.736552
| 0.730207
| 0.730207
| 0.730207
| 0.72331
| 0.692414
| 0
| 0.08382
| 0.231496
| 6,458
| 131
| 102
| 49.29771
| 0.646585
| 0.013627
| 0
| 0.607477
| 0
| 0
| 0.073707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018692
| false
| 0
| 0.028037
| 0
| 0.065421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
efadd5142e854761317cf954f2ea5cf90bb5ff90
| 6,522
|
py
|
Python
|
src/productbert/model/model.py
|
wbsg-uni-mannheim/productbert-intermediate
|
4d77b1f82dc57f1716649f94c80db77d080777d3
|
[
"BSD-3-Clause"
] | 7
|
2021-06-18T01:48:27.000Z
|
2022-03-23T01:46:08.000Z
|
src/productbert/model/model.py
|
Web-based-Systems-Group/productbert-intermediate
|
4d77b1f82dc57f1716649f94c80db77d080777d3
|
[
"BSD-3-Clause"
] | 5
|
2021-06-08T22:13:04.000Z
|
2022-03-12T00:44:19.000Z
|
src/productbert/model/model.py
|
Web-based-Systems-Group/productbert-intermediate
|
4d77b1f82dc57f1716649f94c80db77d080777d3
|
[
"BSD-3-Clause"
] | 2
|
2021-06-29T18:12:10.000Z
|
2021-08-18T16:42:22.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
from transformers import BertModel, BertConfig
from transformers.modeling_bert import BertOnlyMLMHead
class MnistModel(BaseModel):
def __init__(self, num_classes=10):
super().__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, num_classes)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x, dim=1)
class BertModelPoolerLogit(BaseModel):
def __init__(self, num_classes=1, freeze_bert=False):
super().__init__()
self.bert_layer = BertModel.from_pretrained('bert-base-uncased')
# Freeze bert layers
if freeze_bert:
for p in self.bert_layer.parameters():
p.requires_grad = False
# Classification layer
self.cls_layer = nn.Linear(768, num_classes)
self._init_weights_bert(self.cls_layer)
def forward(self, seq, token_ids, attn_masks):
'''
Inputs:
-seq : Tensor of shape [B, T] containing token ids of sequences
-attn_masks : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens
'''
# Feeding the input to BERT model to obtain contextualized representations
_, pooler_output = self.bert_layer(seq, attention_mask=attn_masks, token_type_ids=token_ids)
# Feeding cls_rep to the classifier layer
logits = self.cls_layer(pooler_output)
return logits
class BertModelSupplementaryPoolerComputers(BaseModel):
def __init__(self, num_classes=1, freeze_bert=False):
super().__init__()
self.bert_layer = BertModel.from_pretrained('bert-base-uncased')
# Freeze bert layers
if freeze_bert:
for p in self.bert_layer.parameters():
p.requires_grad = False
# Classification layer
self.cls_layer = nn.Linear(768, num_classes)
# computers_new
self.load_state_dict(torch.load('saved/models/BERT-computers-pretrain/0708_185939/model_with_head.bin'))
def forward(self, seq, token_ids, attn_masks):
'''
Inputs:
-seq : Tensor of shape [B, T] containing token ids of sequences
-attn_masks : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens
'''
# Feeding the input to BERT model to obtain contextualized representations
_, pooler_output = self.bert_layer(seq, attention_mask=attn_masks, token_type_ids=token_ids)
# Feeding cls_rep to the classifier layer
logits = self.cls_layer(pooler_output)
return logits
class BertModelSupplementaryPooler4Cat(BaseModel):
def __init__(self, num_classes=1, freeze_bert=False):
super().__init__()
self.bert_layer = BertModel.from_pretrained('bert-base-uncased')
# Freeze bert layers
if freeze_bert:
for p in self.bert_layer.parameters():
p.requires_grad = False
# Classification layer
self.cls_layer = nn.Linear(768, num_classes)
# computers_new
self.load_state_dict(torch.load('saved/models/BERT-4cat-pretrain/0710_094316/model_with_head.bin'))
def forward(self, seq, token_ids, attn_masks):
'''
Inputs:
-seq : Tensor of shape [B, T] containing token ids of sequences
-attn_masks : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens
'''
# Feeding the input to BERT model to obtain contextualized representations
_, pooler_output = self.bert_layer(seq, attention_mask=attn_masks, token_type_ids=token_ids)
# Feeding cls_rep to the classifier layer
logits = self.cls_layer(pooler_output)
return logits
class BertModelSupplementaryPoolerMLM(BaseModel):
def __init__(self, num_classes=1, freeze_bert=False):
super().__init__()
self.bert_layer = BertModel.from_pretrained('bert-base-uncased')
# Freeze bert layers
if freeze_bert:
for p in self.bert_layer.parameters():
p.requires_grad = False
# Classification layer
self.cls = nn.Linear(768, num_classes)
# computers_new + mlm
self.load_state_dict(torch.load('saved/models/BERT-computers-pretrain-mlm/0627_114018/model_with_head.bin'))
def forward(self, seq, token_ids, attn_masks):
'''
Inputs:
-seq : Tensor of shape [B, T] containing token ids of sequences
-attn_masks : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens
'''
# Feeding the input to BERT model to obtain contextualized representations
_, pooler_output = self.bert_layer(seq, attention_mask=attn_masks, token_type_ids=token_ids)
# Feeding cls_rep to the classifier layer
logits = self.cls(pooler_output)
return logits
class BertModelSupplementaryMLM(BaseModel):
def __init__(self, num_classes=1, freeze_bert=False):
super().__init__()
self.config = BertConfig()
self.bert_layer = BertModel.from_pretrained('bert-base-uncased')
self.mlm = BertOnlyMLMHead(self.config)
self.cls = nn.Linear(768, num_classes)
self._init_weights_bert(self.mlm)
self._init_weights_bert(self.cls)
# Freeze bert layers
if freeze_bert:
for p in self.bert_layer.parameters():
p.requires_grad = False
def forward(self, seq, token_ids, attn_masks):
outputs = self.bert_layer(
seq,
attention_mask=attn_masks,
token_type_ids=token_ids
)
sequence_output, pooled_output = outputs[:2]
prediction_scores = self.mlm(sequence_output)
seq_relationship_score = self.cls(pooled_output)
outputs = seq_relationship_score, prediction_scores
return outputs # (loss), prediction_scores, seq_relationship_score, (hidden_states), (attentions)
| 36.435754
| 121
| 0.659614
| 836
| 6,522
| 4.899522
| 0.167464
| 0.036621
| 0.047607
| 0.027344
| 0.779541
| 0.762695
| 0.748291
| 0.72876
| 0.720459
| 0.708008
| 0
| 0.019085
| 0.252837
| 6,522
| 179
| 122
| 36.435754
| 0.821465
| 0.233057
| 0
| 0.536082
| 0
| 0
| 0.059677
| 0.042064
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123711
| false
| 0
| 0.061856
| 0
| 0.309278
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
efdbb6539d804cdfd4872ccf509d04baae8ac20b
| 13,319
|
py
|
Python
|
tests/native/test_evaluate.py
|
bitner/pygeofilter
|
140aee2f3197044cc18dd111c71e2fcdc516a200
|
[
"MIT"
] | 19
|
2021-03-30T18:18:10.000Z
|
2022-03-23T13:53:55.000Z
|
tests/native/test_evaluate.py
|
bitner/pygeofilter
|
140aee2f3197044cc18dd111c71e2fcdc516a200
|
[
"MIT"
] | 20
|
2021-04-25T10:32:41.000Z
|
2022-01-21T10:48:30.000Z
|
tests/native/test_evaluate.py
|
bitner/pygeofilter
|
140aee2f3197044cc18dd111c71e2fcdc516a200
|
[
"MIT"
] | 6
|
2021-06-09T01:07:17.000Z
|
2022-02-27T16:29:55.000Z
|
from datetime import date, datetime
from dataclasses import dataclass
from typing import Optional, List
import math
from shapely.geometry import Point
import pytest
from pygeofilter.parsers.ecql import parse
from pygeofilter.backends.native.evaluate import NativeEvaluator
from pygeofilter import ast
@dataclass
class Nested:
str_attr: str
@dataclass
class Record:
str_attr: str
maybe_str_attr: Optional[str]
int_attr: int
float_attr: float
date_attr: date
datetime_attr: datetime
point_attr: Point
array_attr: List[int]
nested_attr: Nested
@pytest.fixture
def data():
data = [
Record(
'this is a test',
None,
5,
5.5,
date(2010, 1, 1),
datetime(2010, 1, 1),
Point(1, 1),
[2, 3],
Nested(
'this is a test'
)
),
Record(
'this is another test',
'not null',
8,
8.5,
date(2010, 1, 10),
datetime(2010, 1, 10),
Point(2, 2),
[1, 2, 3, 4, 5],
Nested(
'this is another test'
)
)
]
data[0].extra_attr = 123
return data
def filter_(ast, data):
filter_expr = NativeEvaluator(
math.__dict__,
allow_nested_attributes=True,
).evaluate(ast)
return [
record
for record in data
if filter_expr(record)
]
@pytest.fixture
def data_json():
data = [
{
'type': 'Feature',
'geometry': {
'type': 'Point',
'coordinates': (1, 1)
},
'properties': {
'str_attr': 'this is a test',
'maybe_str_attr': None,
'int_attr': 5,
'float_attr': 5.5,
'date_attr': '2010-01-01',
'datetime_attr': '2010-01-01T00:00:00Z',
'array_attr': [2, 3],
'extra_attr': 123,
}
},
{
'type': 'Feature',
'geometry': {
'type': 'Point',
'coordinates': (2, 2)
},
'properties': {
'str_attr': 'this is another test',
'maybe_str_attr': 'not null',
'int_attr': 8,
'float_attr': 8.5,
'date_attr': '2010-01-10',
'datetime_attr': '2010-01-10T00:00:00Z',
'array_attr': [1, 2, 3, 4, 5],
}
},
]
return data
def filter_json(ast, data):
attr_map = {
'point_attr': 'geometry',
'*': 'properties.*'
}
filter_expr = NativeEvaluator(
math.__dict__, attr_map, use_getattr=False
).evaluate(ast)
return [
record
for record in data
if filter_expr(record)
]
def test_comparison(data):
result = filter_(parse('int_attr = 5'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('int_attr < 6'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('int_attr > 6'), data)
assert len(result) == 1 and result[0] is data[1]
result = filter_(parse('int_attr <= 5'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('int_attr >= 8'), data)
assert len(result) == 1 and result[0] is data[1]
result = filter_(parse('int_attr <> 5'), data)
assert len(result) == 1 and result[0] is data[1]
def test_comparison_json(data_json):
result = filter_json(parse('int_attr = 5'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('int_attr < 6'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('int_attr > 6'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
result = filter_json(parse('int_attr <= 5'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('int_attr >= 8'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
result = filter_json(parse('int_attr <> 5'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
def test_combination(data):
result = filter_(parse('int_attr = 5 AND float_attr < 6.0'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('int_attr = 5 AND float_attr < 6.0'), data)
assert len(result) == 1 and result[0] is data[0]
def test_combination_json(data_json):
result = filter_json(parse('int_attr = 5 AND float_attr < 6.0'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('int_attr = 5 AND float_attr < 6.0'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
def test_between(data):
result = filter_(parse('float_attr BETWEEN 4 AND 6'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('int_attr NOT BETWEEN 4 AND 6'), data)
assert len(result) == 1 and result[0] is data[1]
def test_between_json(data_json):
result = filter_json(parse('float_attr BETWEEN 4 AND 6'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('int_attr NOT BETWEEN 4 AND 6'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
def test_like(data):
result = filter_(parse('str_attr LIKE \'this is . test\''), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('str_attr LIKE \'this is % test\''), data)
assert len(result) == 2
result = filter_(parse('str_attr NOT LIKE \'% another test\''), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('str_attr NOT LIKE \'this is . test\''), data)
assert len(result) == 1 and result[0] is data[1]
result = filter_(parse('str_attr ILIKE \'THIS IS . TEST\''), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('str_attr ILIKE \'THIS IS % TEST\''), data)
assert len(result) == 2
def test_like_json(data_json):
result = filter_json(parse('str_attr LIKE \'this is . test\''), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('str_attr LIKE \'this is % test\''), data_json)
assert len(result) == 2
result = filter_json(
parse('str_attr NOT LIKE \'% another test\''), data_json
)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(
parse('str_attr NOT LIKE \'this is . test\''), data_json
)
assert len(result) == 1 and result[0] is data_json[1]
result = filter_json(parse('str_attr ILIKE \'THIS IS . TEST\''), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('str_attr ILIKE \'THIS IS % TEST\''), data_json)
assert len(result) == 2
def test_in(data):
result = filter_(parse('int_attr IN ( 1, 2, 3, 4, 5 )'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('int_attr NOT IN ( 1, 2, 3, 4, 5 )'), data)
assert len(result) == 1 and result[0] is data[1]
def test_in_json(data_json):
result = filter_json(parse('int_attr IN ( 1, 2, 3, 4, 5 )'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('int_attr NOT IN ( 1, 2, 3, 4, 5 )'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
def test_null(data):
result = filter_(parse('maybe_str_attr IS NULL'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('maybe_str_attr IS NOT NULL'), data)
assert len(result) == 1 and result[0] is data[1]
def test_null_json(data_json):
result = filter_json(parse('maybe_str_attr IS NULL'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('maybe_str_attr IS NOT NULL'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
def test_has_attr(data):
result = filter_(parse('extra_attr EXISTS'), data)
assert len(result) == 1 and result[0] is data[0]
result = filter_(parse('extra_attr DOES-NOT-EXIST'), data)
assert len(result) == 1 and result[0] is data[1]
def test_has_attr_json(data_json):
result = filter_json(parse('extra_attr EXISTS'), data_json)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(parse('extra_attr DOES-NOT-EXIST'), data_json)
assert len(result) == 1 and result[0] is data_json[1]
def test_temporal(data):
result = filter_(
parse('date_attr BEFORE 2010-01-08T00:00:00.00Z'),
data
)
assert len(result) == 1 and result[0] is data[0]
result = filter_(
parse('date_attr AFTER 2010-01-08T00:00:00.00+01:00'),
data
)
assert len(result) == 1 and result[0] is data[1]
def test_temporal_json(data_json):
result = filter_json(
parse('date_attr BEFORE 2010-01-08T00:00:00.00Z'),
data_json
)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(
parse('date_attr AFTER 2010-01-08T00:00:00.00+01:00'),
data_json
)
assert len(result) == 1 and result[0] is data_json[1]
def test_array(data):
result = filter_(
ast.ArrayEquals(
ast.Attribute('array_attr'),
[2, 3],
),
data
)
assert len(result) == 1 and result[0] is data[0]
result = filter_(
ast.ArrayContains(
ast.Attribute('array_attr'),
[1, 2, 3, 4],
),
data
)
assert len(result) == 1 and result[0] is data[1]
result = filter_(
ast.ArrayContainedBy(
ast.Attribute('array_attr'),
[1, 2, 3, 4],
),
data
)
assert len(result) == 1 and result[0] is data[0]
result = filter_(
ast.ArrayOverlaps(
ast.Attribute('array_attr'),
[5, 6, 7],
),
data
)
assert len(result) == 1 and result[0] is data[1]
def test_array_json(data_json):
result = filter_json(
ast.ArrayEquals(
ast.Attribute('array_attr'),
[2, 3],
),
data_json
)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(
ast.ArrayContains(
ast.Attribute('array_attr'),
[1, 2, 3, 4],
),
data_json
)
assert len(result) == 1 and result[0] is data_json[1]
result = filter_json(
ast.ArrayContainedBy(
ast.Attribute('array_attr'),
[1, 2, 3, 4],
),
data_json
)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(
ast.ArrayOverlaps(
ast.Attribute('array_attr'),
[5, 6, 7],
),
data_json
)
assert len(result) == 1 and result[0] is data_json[1]
def test_spatial(data):
result = filter_(
parse('INTERSECTS(point_attr, ENVELOPE (0 1 0 1))'),
data,
)
assert len(result) == 1 and result[0] is data[0]
result = filter_(
parse('EQUALS(point_attr, POINT(2 2))'),
data,
)
assert len(result) == 1 and result[0] is data[1]
result = filter_(
parse('BBOX(point_attr, 0.5, 0.5, 1.5, 1.5)'),
data,
)
assert len(result) == 1 and result[0] is data[0]
def test_spatial_json(data_json):
result = filter_json(
parse('INTERSECTS(point_attr, ENVELOPE (0 1 0 1))'),
data_json,
)
assert len(result) == 1 and result[0] is data_json[0]
result = filter_json(
parse('EQUALS(point_attr, POINT(2 2))'),
data_json,
)
assert len(result) == 1 and result[0] is data_json[1]
result = filter_json(
parse('BBOX(point_attr, 0.5, 0.5, 1.5, 1.5)'),
data_json,
)
assert len(result) == 1 and result[0] is data_json[0]
def test_arithmetic(data):
result = filter_(
parse('int_attr = float_attr - 0.5'),
data,
)
assert len(result) == 2
result = filter_(
parse('int_attr = 5 + 20 / 2 - 10'),
data,
)
assert len(result) == 1 and result[0] is data[0]
def test_arithmetic_json(data_json):
result = filter_json(
parse('int_attr = float_attr - 0.5'),
data_json,
)
assert len(result) == 2
result = filter_json(
parse('int_attr = 5 + 20 / 2 - 10'),
data_json,
)
assert len(result) == 1 and result[0] is data_json[0]
def test_function(data):
result = filter_(
parse('sin(float_attr) BETWEEN -0.75 AND -0.70'),
data,
)
assert len(result) == 1 and result[0] is data[0]
def test_function_json(data_json):
result = filter_json(
parse('sin(float_attr) BETWEEN -0.75 AND -0.70'),
data_json,
)
assert len(result) == 1 and result[0] is data_json[0]
def test_nested(data):
result = filter_(
parse('"nested_attr.str_attr" = \'this is a test\''),
data,
)
assert len(result) == 1 and result[0] is data[0]
| 26.907071
| 79
| 0.57647
| 1,907
| 13,319
| 3.861038
| 0.059255
| 0.084748
| 0.140568
| 0.136901
| 0.829146
| 0.801847
| 0.773869
| 0.739916
| 0.699307
| 0.679071
| 0
| 0.054054
| 0.288836
| 13,319
| 494
| 80
| 26.961538
| 0.72329
| 0
| 0
| 0.54712
| 0
| 0
| 0.152339
| 0.012614
| 0
| 0
| 0
| 0
| 0.180628
| 1
| 0.075916
| false
| 0
| 0.02356
| 0
| 0.141361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5602f9231245540b7cca0ac1cf03b8c68d0445b5
| 377
|
py
|
Python
|
tomes_darcmail/lib/eaxs/MessageIdType.py
|
StateArchivesOfNorthCarolina/docker_dmc
|
c387a4bcd91346bedf3a4c4f242fb7f35d5a81c1
|
[
"MIT"
] | null | null | null |
tomes_darcmail/lib/eaxs/MessageIdType.py
|
StateArchivesOfNorthCarolina/docker_dmc
|
c387a4bcd91346bedf3a4c4f242fb7f35d5a81c1
|
[
"MIT"
] | 1
|
2018-11-23T17:04:52.000Z
|
2018-11-23T17:04:52.000Z
|
tomes_darcmail/lib/eaxs/MessageIdType.py
|
StateArchivesOfNorthCarolina/docker_dmc
|
c387a4bcd91346bedf3a4c4f242fb7f35d5a81c1
|
[
"MIT"
] | null | null | null |
#############################################################
# 2016-09-26: MessageIdType.py
# Author: Jeremy M. Gibson (State Archives of North Carolina)
#
# Description: Implementation of message-id-type
##############################################################
class MessageId:
""""""
def __init__(self):
"""Constructor for MessageId"""
pass
| 25.133333
| 62
| 0.440318
| 29
| 377
| 5.586207
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.151194
| 377
| 14
| 63
| 26.928571
| 0.48125
| 0.429708
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
ef05d2aa740bdec1ebb7b20fa40918561cb67d13
| 20
|
py
|
Python
|
checkov/version.py
|
honza1a/checkov
|
3ce607e1cb36dd877eaccb11c351529858c85831
|
[
"Apache-2.0"
] | 1
|
2021-01-26T12:46:32.000Z
|
2021-01-26T12:46:32.000Z
|
checkov/version.py
|
honza1a/checkov
|
3ce607e1cb36dd877eaccb11c351529858c85831
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
honza1a/checkov
|
3ce607e1cb36dd877eaccb11c351529858c85831
|
[
"Apache-2.0"
] | null | null | null |
version = '1.0.303'
| 10
| 19
| 0.6
| 4
| 20
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.15
| 20
| 1
| 20
| 20
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ef17362ad4d3532e07bc9a1952fac5063a669b18
| 337
|
py
|
Python
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_02_03CrackerBox_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 33
|
2021-12-15T07:11:47.000Z
|
2022-03-29T08:58:32.000Z
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_02_03CrackerBox_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 3
|
2021-12-15T11:39:54.000Z
|
2022-03-29T07:24:23.000Z
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_02_03CrackerBox_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | null | null | null |
_base_ = (
"./FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_01_02MasterChefCan_bop_test.py"
)
OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/02_03CrackerBox"
DATASETS = dict(TRAIN=("ycbv_003_cracker_box_train_pbr",))
| 56.166667
| 143
| 0.878338
| 45
| 337
| 5.866667
| 0.711111
| 0.083333
| 0.19697
| 0.280303
| 0.515152
| 0.515152
| 0.515152
| 0.515152
| 0.515152
| 0.515152
| 0
| 0.096273
| 0.04451
| 337
| 5
| 144
| 67.4
| 0.723602
| 0
| 0
| 0
| 0
| 0
| 0.807122
| 0.807122
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ef2e8047d01d664419a4c155b9d5c8cbaa1c2239
| 81,172
|
py
|
Python
|
flink-ai-flow/ai_flow/test/rest_endpoint/test_client.py
|
MarvinMiao/flink-ai-extended
|
e45eecf2deea6976ba3d7ba821ffb8d9ce0a17f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-06-03T05:37:21.000Z
|
2021-06-03T05:37:21.000Z
|
flink-ai-flow/ai_flow/test/rest_endpoint/test_client.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-01-30T11:28:37.000Z
|
2021-01-30T11:28:37.000Z
|
flink-ai-flow/ai_flow/test/rest_endpoint/test_client.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import time
import unittest
from typing import List
from unittest import TestCase
from ai_flow.project.project_config import ProjectConfig
from notification_service.base_notification import EventWatcher
from ai_flow.common.properties import Properties
from ai_flow.common.status import Status
from ai_flow.meta.example_meta import ExampleMeta, DataType, Schema, ExampleSupportType
from ai_flow.meta.job_meta import State
from ai_flow.meta.metric_meta import MetricType, MetricMeta, MetricSummary
from ai_flow.meta.model_meta import ModelType
from ai_flow.model_center.entity.model_version_stage import ModelVersionStage
from ai_flow.rest_endpoint.protobuf.message_pb2 import RESOURCE_ALREADY_EXISTS
from ai_flow.rest_endpoint.service.client.aiflow_client import AIFlowClient
from ai_flow.rest_endpoint.service.exception import AIFlowException
from ai_flow.rest_endpoint.service.server import AIFlowServer, HighAvailableAIFlowServer
from ai_flow.store.db.base_model import base
from ai_flow.test.store.test_sqlalchemy_store import _get_store
_SQLITE_DB_FILE = 'aiflow.db'
_SQLITE_DB_URI = '%s%s' % ('sqlite:///', _SQLITE_DB_FILE)
_PORT = '50051'
client = None
client1 = None
client2 = None
class AIFlowClientTestCases(object):
"""test example"""
def test_save_example_get_example_by_id_and_name(self):
example = client.register_example(name='example', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_type='pandas', data_format='csv', description='it is mq data',
stream_uri='mysql://', batch_uri='mysql://', create_time=None,
update_time=1000,
properties=Properties({'a': 'b'}), name_list=['a'],
type_list=[DataType.INT32])
example_id = client.get_example_by_id(2)
self.assertIsNone(example_id)
example_name = client.get_example_by_name('example')
self.assertEqual('example', example.name)
self.assertEqual('example', example_name.name)
self.assertEqual('pandas', example.data_type)
self.assertEqual('pandas', example_name.data_type)
def test_save_example_with_catalog_by_id_and_name(self):
client.register_example_with_catalog(name='example', support_type=ExampleSupportType.EXAMPLE_BATCH,
catalog_name='my_hive', catalog_connection_uri='/path/to/conf',
catalog_type='hive', catalog_database='my_db', catalog_table='my_table',
catalog_version='2.3.4')
example_id = client.get_example_by_id(2)
self.assertIsNone(example_id)
example_name = client.get_example_by_name('example')
self.assertEqual('my_hive', example_name.catalog_name)
self.assertEqual('hive', example_name.catalog_type)
self.assertEqual('my_db', example_name.catalog_database)
self.assertEqual('my_table', example_name.catalog_table)
self.assertEqual('/path/to/conf', example_name.catalog_connection_uri)
self.assertEqual('2.3.4', example_name.catalog_version)
def test_double_register_example(self):
example_1 = client.register_example(name='example', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_type='pandas', data_format='csv', description='it is mq data',
stream_uri='mysql://', properties=Properties({'a': 'b'}), name_list=['a'],
type_list=[DataType.INT32])
example_2 = client.register_example(name='example', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_type='pandas', data_format='csv', description='it is mq data',
stream_uri='mysql://', properties=Properties({'a': 'b'}), name_list=['a'],
type_list=[DataType.INT32])
self.assertEqual(example_1.uuid, example_2.uuid)
self.assertEqual(example_1.schema.to_json_dict(), example_2.schema.to_json_dict())
self.assertRaises(AIFlowException, client.register_example, name='example',
support_type=ExampleSupportType.EXAMPLE_BATCH, data_format='csv',
description='it is mq data', stream_uri='mysql://', create_time=round(time.time()),
properties=Properties({'a': 'b'}), name_list=['a'], type_list=[DataType.INT32])
def test_list_examples(self):
client.register_example(name='example_1', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_type='pandas', data_format='csv', description='it is mq data',
stream_uri='mysql://', properties=Properties({'a': 'b'}), name_list=['a'],
type_list=[DataType.INT32])
client.register_example(name='example_2', support_type=ExampleSupportType.EXAMPLE_STREAM,
data_type='numpy', data_format='npz', description='it is',
stream_uri='mysql://', properties=Properties({'a': 'b'}), name_list=['a'],
type_list=[DataType.INT32])
response_list = client.list_example(5, 0)
self.assertEqual(len(response_list), 2)
self.assertEqual('example_1', response_list[0].name)
self.assertEqual('example_2', response_list[1].name)
def test_save_examples_list_example(self):
example_1 = ExampleMeta(name='example1', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_format='csv',
create_time=None, update_time=1000,
properties=Properties({'a': 'b'}))
schema = Schema(name_list=['a', 'b'],
type_list=[DataType.STRING, DataType.INT32])
example_2 = ExampleMeta(name='example2', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_format='csv',
create_time=None, update_time=1000,
properties=Properties({'a': 'b'}), schema=schema)
response = client.register_examples([example_1, example_2])
self.assertEqual(len(response), 2)
self.assertEqual(1, response[0].uuid)
self.assertEqual(2, response[1].uuid)
response_list = client.list_example(2, 0)
self.assertEqual(2, len(response_list))
self.assertEqual('example1', response_list[0].name)
self.assertEqual('example2', response_list[1].name)
def test_delete_example(self):
example = client.register_example(name='example', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_format='csv',
description='it is mq data',
stream_uri='mysql://',
batch_uri='mysql://',
create_time=None, update_time=1000,
properties=Properties({'a': 'b'}), name_list=['a'],
type_list=[DataType.INT32])
self.assertEqual(Status.OK, client.delete_example_by_name(example.name))
self.assertIsNone(client.get_example_by_name(example.name))
self.assertIsNone(client.list_example(1, 0))
def test_update_example(self):
client.register_example(name='example', support_type=ExampleSupportType.EXAMPLE_BATCH,
data_type='pandas', data_format='csv', description='it is mq data',
stream_uri='mysql://', batch_uri='mysql://', create_time=None, update_time=1000,
properties=Properties({'a': 'b'}), name_list=['a'], type_list=[DataType.INT32])
update_example = client.update_example(example_name='example', data_type='numpy', data_format='npz',
properties=Properties({'kafka': 'localhost:9092'}),
name_list=['b'], type_list=[DataType.STRING])
example = client.get_example_by_name('example')
self.assertEqual(example.support_type, update_example.support_type)
self.assertEqual(example.schema.name_list, update_example.schema.name_list)
self.assertEqual(example.schema.type_list, update_example.schema.type_list)
self.assertEqual(example.data_type, 'numpy')
update_example_1 = client.update_example(example_name='example', catalog_type='hive', catalog_name='my_hive',
catalog_database='my_db', catalog_table='my_table')
self.assertEqual(update_example_1.catalog_type, 'hive')
self.assertEqual(update_example_1.catalog_name, 'my_hive')
self.assertEqual(update_example_1.catalog_database, 'my_db')
self.assertEqual(update_example_1.catalog_table, 'my_table')
"""test project"""
def test_save_project_get_project_by_id_and_name(self):
response = client.register_project(name='project', uri='www.code.com', project_type='GIT')
project_id = client.get_project_by_id(response.uuid)
project_name = client.get_project_by_name('project')
self.assertEqual(project_id.name, 'project')
self.assertEqual(project_name.name, 'project')
print(project_id)
def test_double_register_project(self):
client.register_project(name='project', uri='www.code.com', project_type='GIT')
client.register_project(name='project', uri='www.code.com', project_type='GIT')
self.assertRaises(AIFlowException, client.register_project, name='project',
uri='www.code.com', project_type='NOTEBOOK')
self.assertRaises(AIFlowException, client.register_project, name='project',
user='tom', uri='www.code.com', project_type='GIT')
def test_list_project(self):
response = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
client.register_project(name='project1', uri='www.code.com',
project_type='GIT')
project_list = client.list_project(2, response.uuid - 1)
self.assertEqual(2, len(project_list))
self.assertEqual('project', project_list[0].name)
self.assertEqual('project1', project_list[1].name)
def test_delete_project_by_id(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model_relation(name='model', project_id=project.uuid)
work_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='hdfs://')
job = client.register_job(name='job', workflow_execution_id=work_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
client.register_model_version_relation(version='1', model_id=model.uuid,
workflow_execution_id=work_execution.uuid)
self.assertEqual(client.get_project_by_id(project.uuid).name, 'project')
self.assertEqual(client.get_model_relation_by_id(model.uuid).name, 'model')
self.assertEqual(client.get_workflow_execution_by_id(work_execution.uuid).name, 'execution')
self.assertEqual(client.get_model_version_relation_by_version('1', 1).version, '1')
self.assertEqual(client.get_job_by_id(job.uuid).name, 'job')
self.assertEqual(Status.OK, client.delete_project_by_id(project.uuid))
self.assertIsNone(client.get_project_by_id(project.uuid))
self.assertIsNone(client.get_model_relation_by_id(model.uuid))
self.assertIsNone(client.get_workflow_execution_by_id(work_execution.uuid))
self.assertIsNone(client.get_model_version_relation_by_version('1', model.uuid))
self.assertIsNone(client.get_job_by_id(job.uuid))
self.assertIsNone(client.list_project(1, 0))
self.assertIsNone(client.list_model_relation(1, 0))
self.assertIsNone(client.list_workflow_execution(1, 0))
self.assertIsNone(client.list_model_version_relation(1, 1, 0))
self.assertIsNone(client.list_job(1, 0))
def test_delete_project_by_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model_relation(name='model', project_id=project.uuid)
work_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='hdfs://')
job = client.register_job(name='job', workflow_execution_id=work_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
client.register_model_version_relation(version='1', model_id=model.uuid,
workflow_execution_id=work_execution.uuid)
self.assertEqual(client.get_project_by_id(project.uuid).name, 'project')
self.assertEqual(client.get_model_relation_by_id(model.uuid).name, 'model')
self.assertEqual(client.get_workflow_execution_by_id(work_execution.uuid).name, 'execution')
self.assertEqual(client.get_model_version_relation_by_version('1', 1).version, '1')
self.assertEqual(client.get_job_by_id(job.uuid).name, 'job')
self.assertEqual(Status.OK, client.delete_project_by_id(project.uuid))
self.assertIsNone(client.get_project_by_name('project'))
self.assertIsNone(client.get_model_relation_by_id(model.uuid))
self.assertIsNone(client.get_workflow_execution_by_id(work_execution.uuid))
self.assertIsNone(client.get_model_version_relation_by_version('1', model.uuid))
self.assertIsNone(client.get_job_by_id(job.uuid))
self.assertIsNone(client.list_project(1, 0))
self.assertIsNone(client.list_model_relation(1, 0))
self.assertIsNone(client.list_workflow_execution(1, 0))
self.assertIsNone(client.list_model_version_relation(1, 1, 0))
self.assertIsNone(client.list_job(1, 0))
def test_update_project(self):
client.register_project(name='project', uri='www.code.com',
project_type='GIT')
update_project = client.update_project(project_name='project', uri='git@alibaba.com')
project = client.get_project_by_name('project')
self.assertEqual(update_project.uri, project.uri)
self.assertEqual(update_project.project_type, project.project_type)
"""test workflow execution"""
def test_save_workflow_execution_get_by_id_and_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='hdfs://')
execution_id = client.get_workflow_execution_by_id(workflow_execution.uuid)
execution_name = client.get_workflow_execution_by_name('execution')
self.assertEqual('execution', execution_id.name)
self.assertEqual('execution', execution_name.name)
print(execution_id)
def test_list_workflow_execution(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
client.register_workflow_execution(name='execution1', project_id=1,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
execution_list = client.list_workflow_execution(2, 0)
self.assertEqual(2, len(execution_list))
self.assertEqual('execution', execution_list[0].name)
self.assertEqual('execution1', execution_list[1].name)
for execution in execution_list:
print(execution)
def test_update_workflow_execution(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
now = int(time.time() * 1000)
client.update_workflow_execution(execution_name='execution',
execution_state=State.FINISHED, end_time=now)
execution = client.get_workflow_execution_by_name('execution')
self.assertEqual(execution.execution_state, State.FINISHED)
self.assertEqual(execution.end_time, now)
client.update_workflow_execution(execution_name='execution')
execution = client.get_workflow_execution_by_name('execution')
self.assertEqual(execution.execution_state, State.FINISHED)
self.assertEqual(execution.end_time, now)
print(execution.to_json_dict())
def test_update_workflow_execution_end_time(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
now = int(time.time() * 1000)
self.assertEqual(1, client.update_workflow_execution_end_time(now, 'execution'))
self.assertEqual(now, client.get_workflow_execution_by_name('execution').end_time)
print(client.get_workflow_execution_by_name('execution'))
def test_update_workflow_execution_state(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.STARTING,
workflow_json='workflow.yaml',
signature='git://')
self.assertEqual(State.STARTING, client.get_workflow_execution_by_name('execution').execution_state)
self.assertEqual(1, client.update_workflow_execution_state(State.FINISHED, 'execution'))
self.assertEqual(State.FINISHED, client.get_workflow_execution_by_name('execution').execution_state)
print(client.get_workflow_execution_by_name('execution'))
def test_delete_workflow_execution_by_id(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
start_time=122,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
job = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
self.assertEqual(client.get_workflow_execution_by_id(workflow_execution.uuid).workflow_json, 'workflow.yaml')
self.assertEqual(client.get_job_by_id(job.uuid).name, 'job')
self.assertEqual(Status.OK, client.delete_workflow_execution_by_id(workflow_execution.uuid))
self.assertIsNone(client.get_workflow_execution_by_name('execution'))
self.assertIsNone(client.get_job_by_name('job'))
def test_delete_workflow_execution_by_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
client.register_job(name='job', workflow_execution_id=workflow_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
self.assertEqual(client.get_workflow_execution_by_name('execution').name, 'execution')
self.assertEqual(client.get_job_by_name('job').name, 'job')
self.assertEqual(Status.OK, client.delete_workflow_execution_by_name('execution'))
self.assertIsNone(client.get_workflow_execution_by_name('execution'))
self.assertIsNone(client.get_job_by_name('job'))
"""test job"""
def test_save_job_get_by_id_and_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
response = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid,
job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
job_id = client.get_job_by_id(response.uuid)
job_name = client.get_job_by_name('job')
self.assertEqual('job', job_id.name)
self.assertEqual('job', job_name.name)
print(job_id)
def test_list_job(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
response = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid,
job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
client.register_job(name='job1', workflow_execution_id=1, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
job_list = client.list_job(2, response.uuid - 1)
self.assertEqual(2, len(job_list))
self.assertEqual('job', job_list[0].name)
self.assertEqual('job1', job_list[1].name)
for job in job_list:
print(job)
def test_update_Job(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
job = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid,
job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
client.update_job(job_name='job', job_state=State.FINISHED)
self.assertEqual(client.get_job_by_name('job').job_state, State.FINISHED)
self.assertIsNone(client.get_job_by_name('job').end_time)
now = int(time.time() * 1000)
client.update_job(job_name='job', job_state=None, end_time=now)
self.assertEqual(client.get_job_by_name('job').job_state, State.FINISHED)
self.assertEqual(client.get_job_by_name('job').end_time, now)
def test_update_job(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
job = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
job_id = client.get_job_by_id(job.uuid)
self.assertEqual(State.STARTING, job_id.job_state)
print(client.update_job_state(State.FAILED, 'job'))
job_response = client.get_job_by_name('job')
self.assertEqual(job_response.job_state, State.FAILED)
self.assertEqual(None, job_response.end_time)
print(client.update_job_end_time(1000, 'job'))
job_response = client.get_job_by_name('job')
self.assertEqual(1000, job_response.end_time)
def test_delete_job_by_id(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
job = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
self.assertEqual(client.get_job_by_id(job.uuid).name, 'job')
self.assertEqual(Status.OK, client.delete_job_by_id(job.uuid))
self.assertIsNone(client.get_job_by_id(job.uuid))
def test_delete_job_by_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
job = client.register_job(name='job', workflow_execution_id=workflow_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
self.assertEqual(client.get_job_by_id(job.uuid).name, 'job')
self.assertEqual(Status.OK, client.delete_job_by_name('job'))
self.assertIsNone(client.get_job_by_id(job.uuid))
"""test model"""
def test_model_api(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model(model_name='test_register_model1', model_type=ModelType.SAVED_MODEL,
model_desc='test register model1', project_id=project.uuid)
self.assertIsNone(client.get_model_by_name('no'))
self.assertIsNone(client.get_model_by_id(2))
self.assertEqual(client.get_model_by_id(model.uuid).name, 'test_register_model1')
self.assertEqual(client.get_model_by_name('test_register_model1').name, 'test_register_model1')
self.assertEqual(client.get_model_by_name('test_register_model1').model_desc, 'test register model1')
client.register_model(model_name='test_register_model2', model_type=ModelType.SAVED_MODEL,
model_desc='test register model2', project_id=1)
self.assertEqual(len(client.list_model_relation(10, 0)), 2)
client.delete_model_by_id(model.uuid)
client.delete_model_by_name('test_register_model2')
self.assertIsNone(client.list_model_relation(10, 0))
self.assertEqual(len(client.list_registered_models()), 0)
def test_get_deployed_model_version(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model(model_name='test_register_model1', model_type=ModelType.SAVED_MODEL,
model_desc='test register model1', project_id=project.uuid)
model_version = client.register_model_version(model=model.uuid, model_path='/path/to/your/model/version')
deployed_model_version = client.get_deployed_model_version(model_name=model.name)
self.assertIsNone(deployed_model_version)
client.update_model_version(model_name=model.name, model_version=model_version.version,
current_stage=ModelVersionStage.DEPLOYED)
deployed_model_version = client.get_deployed_model_version(model_name=model.name)
self.assertEqual(deployed_model_version.version, model_version.version)
self.assertRaises(AIFlowException,
client.update_model_version, model_name=model.name, model_version=model_version.version,
current_stage=ModelVersionStage.DEPLOYED)
def test_save_model_get_id_and_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
response = client.register_model_relation(name='model', project_id=project.uuid)
model_id = client.get_model_relation_by_id(response.uuid)
model_name = client.get_model_relation_by_name('model')
self.assertEqual(model_id.name, model_name.name)
self.assertEqual(1, len(client.list_model_relation(2, response.uuid - 1)))
print(model_id)
def test_list_model(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
client.register_model_relation(name='model', project_id=project.uuid)
client.register_model_relation(name='model1', project_id=project.uuid)
self.assertEqual(2, len(client.list_model_relation(2, 0)))
self.assertEqual('model', client.list_model_relation(2, 0)[0].name)
self.assertEqual('model1', client.list_model_relation(2, 0)[1].name)
def test_delete_model_by_id(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model_relation = client.register_model_relation(name='model', project_id=project.uuid)
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
client.register_model_version_relation(version='1', model_id=model_relation.uuid,
workflow_execution_id=workflow_execution.uuid)
self.assertEqual(client.get_model_version_relation_by_version('1', model_relation.uuid).version, '1')
self.assertEqual(client.get_model_relation_by_name('model').name, 'model')
self.assertEqual(Status.OK, client.delete_model_relation_by_id(model_relation.uuid))
self.assertIsNone(client.get_model_version_relation_by_version('1', model_relation.uuid))
self.assertIsNone(client.get_model_relation_by_name('model'))
def test_delete_model_by_name(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model_relation = client.register_model_relation(name='model', project_id=project.uuid)
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
client.register_model_version_relation(version='1', model_id=model_relation.uuid,
workflow_execution_id=workflow_execution.uuid)
self.assertEqual(client.get_model_version_relation_by_version('1', model_relation.uuid).version, '1')
self.assertEqual(client.get_model_relation_by_name('model').name, 'model')
self.assertEqual(Status.OK, client.delete_model_relation_by_name('model'))
self.assertIsNone(client.get_model_version_relation_by_version('1', model_relation.uuid))
self.assertIsNone(client.get_model_relation_by_name('model'))
"""test model version"""
def test_model_version_api(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model(model_name='test_register_model', model_type=ModelType.SAVED_MODEL,
model_desc='test register model', project_id=project.uuid)
self.assertIsNone(client.get_model_version_by_version('1', model.uuid))
self.assertEqual(client.get_model_by_id(model.uuid).name, 'test_register_model')
self.assertEqual(client.get_model_by_name('test_register_model').name, 'test_register_model')
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
response = client.register_model_version(model=model.uuid,
workflow_execution_id=workflow_execution.uuid,
model_path='fs://source1.pkl', model_metric='http://metric1',
version_desc='test model version 1',
current_stage=ModelVersionStage.GENERATED)
self.assertEqual(response.version, '1')
model_version_meta = client.get_model_version_by_version(response.version, model.uuid)
self.assertEqual(model_version_meta.version, '1')
self.assertEqual(model_version_meta.model_path, 'fs://source1.pkl')
self.assertEqual(model_version_meta.model_metric, 'http://metric1')
self.assertIsNone(model_version_meta.model_flavor)
self.assertEqual(model_version_meta.version_desc, 'test model version 1')
response = client.update_model_version(model_name=model.name, model_version='1',
current_stage=ModelVersionStage.DEPLOYED)
self.assertEqual(response.current_stage, ModelVersionStage.DEPLOYED)
response = client.get_deployed_model_version(model.name)
self.assertEqual(response.version, '1')
response = client.register_model_version(model=model.uuid,
workflow_execution_id=workflow_execution.uuid,
model_path='fs://source2.pkl', model_metric='http://metric2',
model_flavor='{"flavor.version":2}',
version_desc='test model version 2')
self.assertEqual(response.version, '2')
self.assertEqual(len(client.list_model_version_relation(1, 10, 0)), 2)
client.delete_model_version_by_version(version='2', model_id=1)
self.assertEqual(len(client.list_model_version_relation(1, 10, 0)), 1)
# register model version with deleted model version name
response = client.register_model_version(model=model.uuid,
workflow_execution_id=workflow_execution.uuid,
model_path='fs://source1.pkl', model_metric='http://metric1',
version_desc='test model version 1')
self.assertEqual(response.version, '2')
model_version_meta = client.get_model_version_by_version(response.version, model.uuid)
self.assertEqual(model_version_meta.version, '2')
self.assertEqual(model_version_meta.model_path, 'fs://source1.pkl')
self.assertEqual(model_version_meta.model_metric, 'http://metric1')
self.assertIsNone(model_version_meta.model_flavor)
self.assertEqual(model_version_meta.version_desc, 'test model version 1')
def test_get_latest_model_version(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model(model_name='test_register_model', model_type=ModelType.SAVED_MODEL,
model_desc='test register model', project_id=project.uuid)
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
response_1 = client.register_model_version(model=model.uuid,
workflow_execution_id=workflow_execution.uuid,
model_path='fs://source1.pkl', model_metric='http://metric1',
version_desc='test model version 1',
current_stage=ModelVersionStage.GENERATED)
new_generated_model_version_1 = client.get_latest_generated_model_version(model.name)
new_validated_model_version_1 = client.get_latest_validated_model_version(model.name)
self.assertIsNone(new_validated_model_version_1)
self.assertEqual(response_1.version, new_generated_model_version_1.version)
client.update_model_version(model_name=model.name, model_version=response_1.version,
current_stage=ModelVersionStage.VALIDATED)
new_validated_model_version_2 = client.get_latest_validated_model_version(model.name)
self.assertEqual(new_validated_model_version_2.version, response_1.version)
response_2 = client.register_model_version(model=model.uuid,
workflow_execution_id=workflow_execution.uuid,
model_path='fs://source1.pkl', model_metric='http://metric1',
version_desc='test model version 1',
current_stage=ModelVersionStage.GENERATED)
new_generated_model_version_2 = client.get_latest_generated_model_version(model.name)
client.update_model_version(model_name=model.name, model_version=response_2.version,
current_stage=ModelVersionStage.VALIDATED)
new_validated_model_version_2 = client.get_latest_validated_model_version(model.name)
self.assertEqual(new_validated_model_version_2.version, response_2.version)
self.assertEqual(response_2.version, new_generated_model_version_2.version)
def test_save_model_version_get_by_version(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model_relation(name='model', project_id=project.uuid)
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
response = client.register_model_version_relation(version='1', model_id=model.uuid,
workflow_execution_id=workflow_execution.uuid)
self.assertEqual(response.version, '1')
self.assertEqual(client.get_model_version_relation_by_version(response.version, model.uuid).version, '1')
self.assertEqual(len(client.list_model_version_relation(model.uuid, 2, 0)), 1)
print(client.get_model_version_relation_by_version(response.version, model.uuid))
def test_list_model_version(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model_relation(name='model', project_id=project.uuid)
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
client.register_model_version_relation(version='1', model_id=model.uuid,
workflow_execution_id=workflow_execution.uuid)
client.register_model_version_relation(version='2', model_id=model.uuid,
workflow_execution_id=workflow_execution.uuid)
self.assertEqual(len(client.list_model_version_relation(1, 2, 0)), 2)
self.assertEqual(client.list_model_version_relation(1, 2, 0)[0].version, '1')
self.assertEqual(client.list_model_version_relation(1, 2, 0)[1].version, '2')
def test_delete_model_version_by_version(self):
project = client.register_project(name='project', uri='www.code.com',
project_type='GIT')
model = client.register_model_relation(name='model', project_id=project.uuid)
workflow_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='git://')
client.register_model_version_relation(version='1', model_id=model.uuid,
workflow_execution_id=workflow_execution.uuid)
self.assertEqual(client.get_model_version_relation_by_version('1', model.uuid).version, '1')
client.delete_model_version_relation_by_version('1', model.uuid)
self.assertIsNone(client.get_model_version_relation_by_version('1', model.uuid))
"""test artifact"""
def test_save_artifact_get_artifact_by_id_and_name(self):
artifact = client.register_artifact(name='artifact', data_format='json', batch_uri='./artifact.json')
artifact_id = client.get_artifact_by_id(artifact.uuid)
artifact_name = client.get_artifact_by_name(artifact.name)
self.assertEqual(artifact.data_format, artifact_id.data_format)
self.assertEqual('artifact', artifact_name.name)
def test_double_save_artifact(self):
artifact_1 = client.register_artifact(name='artifact', data_format='json', batch_uri='./artifact.json')
artifact_2 = client.register_artifact(name='artifact', data_format='json', batch_uri='./artifact.json')
self.assertEqual(artifact_1.to_json_dict(), artifact_2.to_json_dict())
self.assertRaises(AIFlowException, client.register_artifact, name='artifact', data_format='json',
batch_uri='./artifact.json', stream_uri='./artifact.json')
def test_save_artifact_list_artifact(self):
client.register_artifact(name='artifact', data_format='json', batch_uri='./artifact.json')
client.register_artifact(name='artifact_1', data_format='json', batch_uri='./artifact.json')
self.assertEqual(2, len(client.list_artifact(2, 0)))
def test_delete_artifact_by_id_and_name(self):
client.register_artifact(name='artifact', data_format='json', batch_uri='./artifact.json')
client.register_artifact(name='artifact_1', data_format='json', batch_uri='./artifact.json')
self.assertIsNotNone(client.get_artifact_by_id(1))
self.assertIsNotNone(client.get_artifact_by_name('artifact_1'))
self.assertEqual(Status.OK, client.delete_artifact_by_id(1))
self.assertEqual(Status.OK, client.delete_artifact_by_name('artifact_1'))
self.assertEqual(Status.ERROR, client.delete_artifact_by_name('no artifact'))
self.assertIsNone(client.get_artifact_by_id(1))
self.assertIsNone(client.get_artifact_by_name('artifact_1'))
def test_update_artifact(self):
client.register_artifact(name='artifact', data_format='json', batch_uri='./artifact.json')
artifact = client.update_artifact(artifact_name='artifact', data_format='csv', batch_uri='../..')
artifact_id = client.get_artifact_by_id(artifact.uuid)
self.assertEqual(artifact_id.data_format, 'csv')
self.assertEqual(artifact_id.batch_uri, '../..')
def test_create_registered_model(self):
model_name = 'test_create_registered_model'
model_type1 = ModelType.CHECKPOINT
model_desc = 'test create registered model'
response = client.create_registered_model(model_name=model_name, model_type=model_type1, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_type, model_type1)
self.assertEqual(response.model_desc, model_desc)
model_type2 = ModelType.SAVED_MODEL
with self.assertRaises(AIFlowException) as exception_context:
client.create_registered_model(model_name=model_name, model_type=model_type2)
assert exception_context.exception.error_code == str(RESOURCE_ALREADY_EXISTS)
def test_double_register_model(self):
model_name = 'test_create_registered_model'
model_type = ModelType.CHECKPOINT
model_desc = 'test create registered model'
client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertRaises(AIFlowException, client.create_registered_model, model_name=model_name,
model_type=model_type,
model_desc='')
project = client.register_project(name='project')
client.register_model(model_name=model_name, project_id=project.uuid, model_type=model_type,
model_desc=model_desc)
client.register_model(model_name=model_name, project_id=project.uuid, model_type=model_type,
model_desc=model_desc)
self.assertRaises(AIFlowException, client.register_model, model_name=model_name,
project_id=project.uuid, model_type=model_type,
model_desc='')
def test_update_registered_model(self):
model_name1 = 'test_update_registered_model1'
model_type1 = ModelType.CHECKPOINT
model_desc1 = 'test update registered model1'
response = client.create_registered_model(model_name=model_name1, model_type=model_type1,
model_desc=model_desc1)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name1)
self.assertEqual(response.model_type, model_type1)
model_name2 = 'test_update_registered_model2'
model_type2 = ModelType.SAVED_MODEL
model_desc2 = 'test update registered model2'
response = client.update_registered_model(model_name=model_name1, new_name=model_name2, model_type=model_type2,
model_desc=model_desc2)
self.assertEqual(response.model_name, model_name2)
self.assertEqual(response.model_type, model_type2)
self.assertEqual(response.model_desc, model_desc2)
def test_delete_registered_model(self):
model_name = 'test_delete_registered_model'
model_type = ModelType.CHECKPOINT
model_desc = 'test delete registered model'
response = client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
client.delete_registered_model(model_name=model_name)
response = client.get_registered_model_detail(model_name=model_name)
self.assertIsNone(response)
def test_list_registered_model(self):
model_name1 = 'test_list_registered_model1'
model_type1 = ModelType.CHECKPOINT
model_desc1 = 'test list registered model1'
response = client.create_registered_model(model_name=model_name1, model_type=model_type1,
model_desc=model_desc1)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name1)
self.assertEqual(response.model_type, model_type1)
model_name2 = 'test_list_registered_model2'
model_type2 = ModelType.SAVED_MODEL
model_desc2 = 'test list registered model2'
response = client.create_registered_model(model_name=model_name2, model_type=model_type2,
model_desc=model_desc2)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name2)
self.assertEqual(response.model_type, model_type2)
response = client.list_registered_models()
self.assertEqual(len(response), 2)
self.assertEqual(response[0].model_name, model_name1)
self.assertEqual(response[0].model_type, model_type1)
self.assertEqual(response[1].model_name, model_name2)
self.assertEqual(response[1].model_type, model_type2)
def test_get_registered_model_detail(self):
model_name = 'test_get_registered_model_detail'
model_type = ModelType.CHECKPOINT
model_desc = 'test get registered model detail'
response = client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
response = client.get_registered_model_detail(model_name=model_name)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_type, model_type)
self.assertEqual(response.model_desc, model_desc)
model_path1 = 'fs://source1.pkl'
model_metric1 = 'http://metric1'
model_flavor1 = '{"flavor.version":1}'
version_desc1 = 'test get registered model detail1'
response = client.create_model_version(model_name=model_name, model_path=model_path1,
model_metric=model_metric1, model_flavor=model_flavor1,
version_desc=version_desc1)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path1)
self.assertEqual(response.model_metric, model_metric1)
self.assertEqual(response.model_flavor, model_flavor1)
self.assertEqual(response.version_desc, version_desc1)
response = client.get_registered_model_detail(model_name=model_name)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_type, model_type)
self.assertEqual(response.model_desc, model_desc)
model_version = response.model_version
self.assertEqual(model_version.model_version, '1')
self.assertEqual(model_version.model_path, model_path1)
self.assertEqual(model_version.model_metric, model_metric1)
self.assertEqual(model_version.model_flavor, model_flavor1)
self.assertEqual(model_version.version_desc, version_desc1)
model_path2 = 'fs://source2.pkl'
model_metric2 = 'http://metric2'
model_flavor2 = '{"flavor.version":2}'
version_desc2 = 'test get registered model detail2'
response = client.create_model_version(model_name=model_name, model_path=model_path2,
model_metric=model_metric2, model_flavor=model_flavor2,
version_desc=version_desc2)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '2')
self.assertEqual(response.model_path, model_path2)
self.assertEqual(response.model_metric, model_metric2)
self.assertEqual(response.model_flavor, model_flavor2)
self.assertEqual(response.version_desc, version_desc2)
response = client.get_registered_model_detail(model_name=model_name)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_type, model_type)
self.assertEqual(response.model_desc, model_desc)
model_version = response.model_version
self.assertEqual(model_version.model_version, '2')
self.assertEqual(model_version.model_path, model_path2)
self.assertEqual(model_version.model_metric, model_metric2)
self.assertEqual(model_version.model_flavor, model_flavor2)
self.assertEqual(model_version.version_desc, version_desc2)
def test_create_model_version(self):
model_name = 'test_create_model_version'
model_type = ModelType.CHECKPOINT
model_desc = 'test create model version'
response = client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
model_path1 = 'fs://source1.pkl'
model_metric1 = 'http://metric1'
model_flavor1 = '{"flavor.version":1}'
version_desc1 = 'test create model version1'
response = client.create_model_version(model_name=model_name, model_path=model_path1,
model_metric=model_metric1, model_flavor=model_flavor1,
version_desc=version_desc1)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path1)
self.assertEqual(response.model_metric, model_metric1)
self.assertEqual(response.model_flavor, model_flavor1)
self.assertEqual(response.version_desc, version_desc1)
model_path2 = 'fs://source2.pkl'
model_metric2 = 'http://metric2'
model_flavor2 = '{"flavor.version":2}'
version_desc2 = 'test create model version2'
response = client.create_model_version(model_name=model_name, model_path=model_path2,
model_metric=model_metric2, model_flavor=model_flavor2,
version_desc=version_desc2)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '2')
self.assertEqual(response.model_path, model_path2)
self.assertEqual(response.model_metric, model_metric2)
self.assertEqual(response.model_flavor, model_flavor2)
self.assertEqual(response.version_desc, version_desc2)
def test_update_model_version(self):
model_name = 'test_update_model_version'
model_type = ModelType.CHECKPOINT
model_desc = 'test update model version'
response = client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
model_path1 = 'fs://source1.pkl'
model_metric1 = 'http://metric1'
model_flavor1 = '{"flavor.version":1}'
version_desc1 = 'test update model version1'
version_stage1 = ModelVersionStage.GENERATED
response = client.create_model_version(model_name=model_name, model_path=model_path1,
model_metric=model_metric1, model_flavor=model_flavor1,
version_desc=version_desc1, current_stage=version_stage1)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path1)
self.assertEqual(response.model_metric, model_metric1)
self.assertEqual(response.model_flavor, model_flavor1)
self.assertEqual(response.version_desc, version_desc1)
self.assertEqual(response.current_stage, version_stage1)
model_path2 = 'fs://source2.pkl'
model_metric2 = 'http://metric2'
model_flavor2 = '{"flavor.version":2}'
version_desc2 = 'test update model version2'
version_stage2 = ModelVersionStage.VALIDATED
response = client.update_model_version(model_name=model_name, model_version='1',
model_path=model_path2, model_metric=model_metric2,
model_flavor=model_flavor2, version_desc=version_desc2,
current_stage=version_stage2)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path2)
self.assertEqual(response.model_metric, model_metric2)
self.assertEqual(response.model_flavor, model_flavor2)
self.assertEqual(response.version_desc, version_desc2)
self.assertEqual(response.current_stage, version_stage2)
response = client.update_model_version(model_name=model_name, model_version='1',
current_stage=ModelVersionStage.DEPLOYED)
self.assertEqual(response.current_stage, ModelVersionStage.DEPLOYED)
def test_delete_model_version(self):
model_name = 'test_delete_model_version'
model_type = ModelType.CHECKPOINT
model_desc = 'test delete model version'
response = client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
model_path = 'fs://source.pkl'
model_metric = 'http://metric'
model_flavor = '{"flavor.version":1}'
version_desc = 'test delete model version'
response = client.create_model_version(model_name=model_name, model_path=model_path, model_metric=model_metric,
model_flavor=model_flavor, version_desc=version_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path)
self.assertEqual(response.model_metric, model_metric)
self.assertEqual(response.model_flavor, model_flavor)
self.assertEqual(response.version_desc, version_desc)
client.delete_model_version(model_name, '1')
response = client.get_model_version_detail(model_name, '1')
self.assertIsNone(response)
def test_get_model_version_detail(self):
model_name = 'test_get_model_version_detail'
model_type = ModelType.CHECKPOINT
model_desc = 'test get model version detail'
response = client.create_registered_model(model_name=model_name, model_type=model_type, model_desc=model_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
model_path = 'fs://source.pkl'
model_metric = 'http://metric'
model_flavor = '{"flavor.version":1}'
version_desc = 'test get model version detail'
response = client.create_model_version(model_name=model_name, model_path=model_path, model_metric=model_metric,
model_flavor=model_flavor, version_desc=version_desc)
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path)
self.assertEqual(response.model_metric, model_metric)
self.assertEqual(response.model_flavor, model_flavor)
self.assertEqual(response.version_desc, version_desc)
response = client.get_model_version_detail(model_name, '1')
self.assertIsNotNone(response)
self.assertEqual(response.model_name, model_name)
self.assertEqual(response.model_version, '1')
self.assertEqual(response.model_path, model_path)
self.assertEqual(response.model_metric, model_metric)
self.assertEqual(response.model_flavor, model_flavor)
self.assertEqual(response.version_desc, version_desc)
def test_update_and_list_notification(self):
key = 'test_publish_event_key'
value1 = 'test_publish_event_value1'
response = client.publish_event(key=key, value=value1)
self.assertIsNotNone(response)
self.assertEqual(response.key, key)
self.assertEqual(response.value, value1)
self.assertTrue(response.version > 0)
notifications = client.list_events(key=key)
self.assertEqual(len(notifications), 1)
self.assertEqual(notifications[0].key, key)
self.assertEqual(notifications[0].value, value1)
self.assertEqual(notifications[0].version, response.version)
notifications = client.list_events(key=key, version=0)
self.assertEqual(len(notifications), 1)
self.assertEqual(notifications[0].key, key)
self.assertEqual(notifications[0].value, value1)
value2 = 'test_publish_event_value2'
old_response = response
response = client.publish_event(key=key, value=value2)
self.assertIsNotNone(response)
self.assertEqual(response.version, old_response.version + 1)
notifications = client.list_events(key=key)
self.assertEqual(len(notifications), 2)
self.assertEqual(notifications[1].key, key)
self.assertEqual(notifications[1].value, value2)
self.assertEqual(notifications[1].version, old_response.version + 1)
notifications = client.list_events(key=key, version=old_response.version)
self.assertEqual(len(notifications), 1)
self.assertEqual(notifications[0].key, key)
self.assertEqual(notifications[0].value, value2)
old_response = response
response = client.publish_event(key=key, value=value2)
self.assertIsNotNone(response)
self.assertEqual(response.version, old_response.version + 1)
notifications = client.list_events(key=key)
self.assertEqual(len(notifications), 3)
self.assertEqual(notifications[2].key, key)
self.assertEqual(notifications[2].value, value2)
self.assertEqual(notifications[2].version, old_response.version + 1)
notifications = client.list_events(key=key, version=old_response.version)
self.assertEqual(len(notifications), 1)
self.assertEqual(notifications[0].key, key)
self.assertEqual(notifications[0].value, value2)
def test_listen_notification(self):
class TestWatcher(EventWatcher):
def __init__(self, event_type, test_case: TestCase):
super(TestWatcher, self).__init__()
self.event_type = event_type
self.test_case = test_case
def process(self, notifications):
self.test_case.assertNotEqual(len(notifications), 0)
for notification in notifications:
print(notification)
event_type1 = 'test_listen_notification1'
key1 = 'test_listen_notification_key1'
client.start_listen_event(key=key1,
watcher=TestWatcher(event_type1, self))
client.start_listen_event(key=key1,
watcher=TestWatcher(event_type1, self))
client1.start_listen_event(key=key1,
watcher=TestWatcher(event_type1, self))
client2.start_listen_event(key=key1,
watcher=TestWatcher(event_type1, self))
value1 = 'test_listen_notification_value1'
client.publish_event(key=key1, value=value1)
value2 = 'test_listen_notification_value2'
client.publish_event(key=key1, value=value2)
time.sleep(10)
value3 = 'test_listen_notification_value3'
client.publish_event(key=key1, value=value3)
time.sleep(1)
client.stop_listen_event(key1)
client1.stop_listen_event(key1)
client2.stop_listen_event(key1)
key2 = 'test_listen_notification_key2'
client.publish_event(key=key2, value=value1)
client.publish_event(key=key2, value=value2)
event_type2 = 'test_listen_notification2'
client.start_listen_event(key=key2,
watcher=TestWatcher(event_type2, self))
client1.start_listen_event(key=key2,
watcher=TestWatcher(event_type2, self))
client2.start_listen_event(key=key2,
watcher=TestWatcher(event_type2, self))
time.sleep(10)
client.publish_event(key=key2, value=value3)
time.sleep(1)
client.stop_listen_event(key2)
client1.stop_listen_event(key2)
client2.stop_listen_event(key2)
# def test_submit_workflow(self):
#
# def create_job(index) -> BaseJob:
# job: BaseJob = LocalCMDJob(exec_cmd='echo "hello {}" && sleep 1'.format(str(index)),
# job_context=JobContext(),
# job_config=BaseJobConfig(engine="cmd_line", platform="local"))
# job.instance_id = str(index)
# return job
#
# def create_workflow() -> Workflow:
# ex_workflow = Workflow()
# for i in range(3):
# job = create_job(i)
# ex_workflow.add_job(job)
# deps = [JobControlEdge(target_node_id='0', source_node_id='2',
# signal_config=SignalConfig(signal_key=generate_job_status_key('0'),
# signal_value=State.FINISHED.value)),
# JobControlEdge(target_node_id='1', source_node_id='2',
# signal_config=SignalConfig(signal_key=generate_job_status_key('1'),
# signal_value=State.FINISHED.value))]
# ex_workflow.add_edges("2", deps)
# workflow_meta = client.register_workflow_execution(name=generate_time_str(),
# project_id=None,
# execution_state=State.INIT,
# workflow_json=dumps(ex_workflow))
# ex_workflow.workflow_id = workflow_meta.uuid
# return ex_workflow
#
# workflow = create_workflow()
# res = client.submit_workflow(json_utils.dumps(workflow))
# self.assertEqual(0, res[0])
# workflow_id = res[1]
# res = client.stop_workflow(workflow_id=workflow_id)
# self.assertEqual(0, res[0])
# while client.is_alive_workflow(workflow_id)[1]:
# time.sleep(1)
# self.assertEqual(1, res[0])
# execution_meta = client.get_workflow_execution_by_id(workflow_id)
# self.assertEqual(State.FINISHED, execution_meta.execution_state)
def test_dataset_metric_meta(self):
project, work_execution, job = self.register_workflow_job()
start = round(time.time())
end = start + 1
res = client.register_metric_meta(name='a', dataset_id=1, model_name=None, model_version=None, job_id=job.uuid,
start_time=start, end_time=end, uri='/tmp/metric_1',
metric_type=MetricType.DATASET,
tags='', metric_description='', properties=Properties({'a': 'a'}))
client.update_metric_meta(uuid=res[2].uuid, job_id=5)
metric_meta_result = client.get_dataset_metric_meta(dataset_id=1)
self.assertTrue(isinstance(metric_meta_result[2], MetricMeta))
self.assertEqual(5, metric_meta_result[2].job_id)
res = client.register_metric_meta(name='b', dataset_id=1, model_name=None, model_version=None, job_id=job.uuid,
start_time=start, end_time=end, uri='/tmp/metric_2',
metric_type=MetricType.DATASET,
tags='flink', metric_description='', properties=Properties({'b': 'b'}))
metric_meta_result = client.get_dataset_metric_meta(dataset_id=1)
get_metric_meta = client.get_metric_meta(name=res[2].name)
self.assertEqual(res[2].tags, get_metric_meta[2].tags)
self.assertTrue(isinstance(metric_meta_result[2], List))
self.assertEqual(2, len(metric_meta_result[2]))
@staticmethod
def register_workflow_job():
project = client.register_project(name='project')
work_execution = client.register_workflow_execution(name='execution', project_id=project.uuid,
execution_state=State.INIT,
workflow_json='workflow.yaml',
signature='hdfs://')
job = client.register_job(name='job', workflow_execution_id=work_execution.uuid, job_state=State.STARTING,
properties=Properties({'a': 'b'}), signature='offset1')
return project, work_execution, job
@staticmethod
def register_model_and_version(project, work_execution):
model_name = 'test_create_registered_model'
model_type = ModelType.CHECKPOINT
model_desc = 'test create registered model'
model = client.register_model(model_name=model_name, project_id=project.uuid, model_type=model_type,
model_desc=model_desc)
version = client.register_model_version(model=model.uuid,
model_path="/tmp",
workflow_execution_id=work_execution.uuid)
return model, version
def test_model_metric_meta(self):
project, work_execution, job = self.register_workflow_job()
model, version = self.register_model_and_version(project, work_execution)
start = round(time.time())
end = start + 1
client.register_metric_meta(name='a', dataset_id=1, model_name=model.name,
model_version=version.version, job_id=job.uuid,
start_time=start, end_time=end, uri='/tmp/metric_1',
metric_type=MetricType.MODEL,
tags='', metric_description='', properties=Properties({'a': 'a'}))
metric_meta_result = client.get_model_metric_meta(model_name=model.name, model_version=version.version)
self.assertTrue(isinstance(metric_meta_result[2], MetricMeta))
client.register_metric_meta(name='b', dataset_id=2, model_name=model.name,
model_version=version.version, job_id=3,
start_time=start, end_time=end, uri='/tmp/metric_2',
metric_type=MetricType.MODEL,
tags='', metric_description='', properties=Properties({'b': 'b'}))
metric_meta_result = client.get_model_metric_meta(model_name=model.name, model_version=version.version)
self.assertTrue(isinstance(metric_meta_result[2], List))
self.assertEqual(2, len(metric_meta_result[2]))
client.delete_metric_meta(metric_meta_result[2][0].uuid)
metric_meta_result = client.get_model_metric_meta(model_name=model.name, model_version=version.version)
self.assertTrue(isinstance(metric_meta_result[2], MetricMeta))
def test_metric_summary(self):
metric_summary_result = client.register_metric_summary(metric_id=1, metric_key='a', metric_value='1.0')
self.assertTrue(isinstance(metric_summary_result[2], MetricSummary))
client.update_metric_summary(uuid=metric_summary_result[2].uuid, metric_value='5.0')
metric_summary_result = client.get_metric_summary(metric_id=1)
self.assertTrue(isinstance(metric_summary_result[2], List))
self.assertEqual('5.0', metric_summary_result[2][0].metric_value)
client.register_metric_summary(metric_id=1, metric_key='b', metric_value='2.0')
metric_summary_result = client.get_metric_summary(metric_id=1)
self.assertEqual(2, len(metric_summary_result[2]))
client.delete_metric_summary(metric_summary_result[2][0].uuid)
metric_summary_result = client.get_metric_summary(metric_id=1)
self.assertEqual(1, len(metric_summary_result[2]))
class TestAIFlowClientSqlite(AIFlowClientTestCases, unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
global client, client1, client2
print("TestAIFlowClientSqlite setUpClass")
if os.path.exists(_SQLITE_DB_FILE):
os.remove(_SQLITE_DB_FILE)
cls.server = AIFlowServer(store_uri=_SQLITE_DB_URI, port=_PORT)
cls.server.run()
client = AIFlowClient(server_uri='localhost:' + _PORT)
client1 = AIFlowClient(server_uri='localhost:' + _PORT)
client2 = AIFlowClient(server_uri='localhost:' + _PORT)
@classmethod
def tearDownClass(cls) -> None:
client.stop_listen_event()
client1.stop_listen_event()
client2.stop_listen_event()
cls.server.stop()
os.remove(_SQLITE_DB_FILE)
def setUp(self) -> None:
_get_store(_SQLITE_DB_URI)
def tearDown(self) -> None:
store = _get_store(_SQLITE_DB_URI)
base.metadata.drop_all(store.db_engine)
class TestAIFlowClientSqliteWithSingleHighAvailableServer(
AIFlowClientTestCases, unittest.TestCase):
"""
Used to ensure the high available server has the same functionality with normal server.
"""
@classmethod
def setUpClass(cls) -> None:
global client, client1, client2
print("TestAIFlowClientSqlite setUpClass")
if os.path.exists(_SQLITE_DB_FILE):
os.remove(_SQLITE_DB_FILE)
cls.server = HighAvailableAIFlowServer(store_uri=_SQLITE_DB_URI, port=_PORT,
server_uri='localhost:' + _PORT)
cls.server.run()
config = ProjectConfig()
config.set_enable_ha(True)
client = AIFlowClient(server_uri='localhost:' + _PORT, project_config=config)
client1 = AIFlowClient(server_uri='localhost:' + _PORT, project_config=config)
client2 = AIFlowClient(server_uri='localhost:' + _PORT, project_config=config)
@classmethod
def tearDownClass(cls) -> None:
client.stop_listen_event()
client.disable_high_availability()
client1.stop_listen_event()
client1.disable_high_availability()
client2.stop_listen_event()
client2.disable_high_availability()
cls.server.stop()
os.remove(_SQLITE_DB_FILE)
def setUp(self) -> None:
_get_store(_SQLITE_DB_URI)
def tearDown(self) -> None:
store = _get_store(_SQLITE_DB_URI)
base.metadata.drop_all(store.db_engine)
if __name__ == '__main__':
unittest.main()
| 60.350929
| 119
| 0.62917
| 8,798
| 81,172
| 5.494885
| 0.041941
| 0.085016
| 0.030697
| 0.041701
| 0.821157
| 0.784151
| 0.734445
| 0.701493
| 0.674127
| 0.636149
| 0
| 0.01211
| 0.274664
| 81,172
| 1,344
| 120
| 60.395833
| 0.808992
| 0.036109
| 0
| 0.583699
| 0
| 0
| 0.073575
| 0.009021
| 0
| 0
| 0
| 0
| 0.318142
| 1
| 0.060473
| false
| 0
| 0.017528
| 0
| 0.08326
| 0.013146
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ef3518326d775a8a01c6a8e6950c8a4b3dbbcab2
| 302
|
py
|
Python
|
bin/rdbox/rdbox_node_report.py
|
rdbox-intec/rdbox-middleware
|
e7469c68ba745187d1db5514494663a3593164df
|
[
"MIT"
] | 4
|
2019-04-25T06:30:18.000Z
|
2020-03-21T05:46:38.000Z
|
bin/rdbox/rdbox_node_report.py
|
fukuta-tatsuya-intec/rdbox-middleware
|
fc1612c655438aaebe5ec289764854b5fa887e09
|
[
"MIT"
] | 21
|
2020-01-10T02:22:55.000Z
|
2021-09-27T07:30:46.000Z
|
bin/rdbox/rdbox_node_report.py
|
rdbox-intec/rdbox-middleware
|
e7469c68ba745187d1db5514494663a3593164df
|
[
"MIT"
] | 5
|
2019-06-21T06:18:05.000Z
|
2020-09-18T02:28:25.000Z
|
#!/usr/bin/env python3
# coding: utf-8
class RdboxNodeReport(object):
def __init__(self, rdbox_node_list, formatter):
self.rdbox_node_list = rdbox_node_list
self.formatter = formatter
def output_report(self):
return self.formatter.output_report(self.rdbox_node_list)
| 25.166667
| 65
| 0.721854
| 40
| 302
| 5.1
| 0.5
| 0.176471
| 0.254902
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.18543
| 302
| 11
| 66
| 27.454545
| 0.821138
| 0.115894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ef65a9bf4c1e969fa347350934e1b39795f08e07
| 3,480
|
py
|
Python
|
helpers/gen_bc6h_parser.py
|
darkuranium/tclib
|
eb934ec93887eabe5c8a2847d7b86a9511dde260
|
[
"CC0-1.0"
] | 42
|
2016-09-11T22:05:35.000Z
|
2022-01-01T14:00:56.000Z
|
helpers/gen_bc6h_parser.py
|
darkuranium/tclib
|
eb934ec93887eabe5c8a2847d7b86a9511dde260
|
[
"CC0-1.0"
] | 15
|
2016-09-08T13:53:52.000Z
|
2016-09-11T12:31:52.000Z
|
helpers/gen_bc6h_parser.py
|
darkuranium/tclib
|
eb934ec93887eabe5c8a2847d7b86a9511dde260
|
[
"CC0-1.0"
] | 1
|
2018-05-12T13:45:55.000Z
|
2018-05-12T13:45:55.000Z
|
# source: https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_texture_compression_bptc.txt
MODEREF = '''
0 m[1:0],g2[4],b2[4],b3[4],r0[9:0],g0[9:0],b0[9:0],r1[4:0],g3[4],g2[3:0],g1[4:0],b3[0],g3[3:0],b1[4:0],b3[1],b2[3:0],r2[4:0],b3[2],r3[4:0],b3[3]
1 m[1:0],g2[5],g3[4],g3[5],r0[6:0],b3[0],b3[1],b2[4],g0[6:0],b2[5],b3[2],g2[4],b0[6:0],b3[3],b3[5],b3[4],r1[5:0],g2[3:0],g1[5:0],g3[3:0],b1[5:0],b2[3:0],r2[5:0],r3[5:0]
2 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[4:0],r0[10],g2[3:0],g1[3:0],g0[10],b3[0],g3[3:0],b1[3:0],b0[10],b3[1],b2[3:0],r2[4:0],b3[2],r3[4:0],b3[3]
6 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[3:0],r0[10],g3[4],g2[3:0],g1[4:0],g0[10],g3[3:0],b1[3:0],b0[10],b3[1],b2[3:0],r2[3:0],b3[0],b3[2],r3[3:0],g2[4],b3[3]
10 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[3:0],r0[10],b2[4],g2[3:0],g1[3:0],g0[10],b3[0],g3[3:0],b1[4:0],b0[10],b2[3:0],r2[3:0],b3[1],b3[2],r3[3:0],b3[4],b3[3]
14 m[4:0],r0[8:0],b2[4],g0[8:0],g2[4],b0[8:0],b3[4],r1[4:0],g3[4],g2[3:0],g1[4:0],b3[0],g3[3:0],b1[4:0],b3[1],b2[3:0],r2[4:0],b3[2],r3[4:0],b3[3]
18 m[4:0],r0[7:0],g3[4],b2[4],g0[7:0],b3[2],g2[4],b0[7:0],b3[3],b3[4],r1[5:0],g2[3:0],g1[4:0],b3[0],g3[3:0],b1[4:0],b3[1],b2[3:0],r2[5:0],r3[5:0]
22 m[4:0],r0[7:0],b3[0],b2[4],g0[7:0],g2[5],g2[4],b0[7:0],g3[5],b3[4],r1[4:0],g3[4],g2[3:0],g1[5:0],g3[3:0],b1[4:0],b3[1],b2[3:0],r2[4:0],b3[2],r3[4:0],b3[3]
26 m[4:0],r0[7:0],b3[1],b2[4],g0[7:0],b2[5],g2[4],b0[7:0],b3[5],b3[4],r1[4:0],g3[4],g2[3:0],g1[4:0],b3[0],g3[3:0],b1[5:0],b2[3:0],r2[4:0],b3[2],r3[4:0],b3[3]
30 m[4:0],r0[5:0],g3[4],b3[0],b3[1],b2[4],g0[5:0],g2[5],b2[5],b3[2],g2[4],b0[5:0],g3[5],b3[3],b3[5],b3[4],r1[5:0],g2[3:0],g1[5:0],g3[3:0],b1[5:0],b2[3:0],r2[5:0],r3[5:0]
3 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[9:0],g1[9:0],b1[9:0]
7 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[8:0],r0[10],g1[8:0],g0[10],b1[8:0],b0[10]
11 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[7:0],r0[10:11],g1[7:0],g0[10:11],b1[7:0],b0[10:11]
15 m[4:0],r0[9:0],g0[9:0],b0[9:0],r1[3:0],r0[10:15],g1[3:0],g0[10:15],b1[3:0],b0[10:15]
'''
INDENT = ' '
FORMAT = 'colors[{endpoint}].c.{channel} |= TCTEX_H_GETBITS(bdata, {offset}, {numbits}) << {firstbit};'
import re
print('switch(mode)')
print('{')
for line in MODEREF.splitlines():
line = line.strip()
if not line:
continue
mode, assigns = line.split()
mode = int(mode)
assigns = assigns.split(',')
print('case %u:' % mode)
offset = 0
for assign in assigns:
m = re.match(r'(\w)(\d?)\[(\d+)(?::(\d+))?\]', assign)
assert m
data = {}
data['channel'] = m.group(1)
data['offset'] = offset
head = int(m.group(3))
tail = int(m.group(4)) if m.group(4) else head
if head >= tail:
reverse = False
head, tail = tail, head
else:
reverse = True
numbits = tail - head + 1
if m.group(1) == 'm':
offset += numbits
continue
data['endpoint'] = int(m.group(2))
data['numbits'] = numbits
data['firstbit'] = head
if reverse:
data['numbits'] = 1
print(INDENT + '/* begin reverse read */')
for i in range(tail - head + 1):
data['offset'] = offset + i
data['firstbit'] = tail - i
print(INDENT + FORMAT.format(**data))
print(INDENT + '/* end reverse read */')
else:
print(INDENT + FORMAT.format(**data))
offset += numbits
print(INDENT + 'break;')
print('}')
| 48.333333
| 169
| 0.504885
| 853
| 3,480
| 2.053927
| 0.112544
| 0.049087
| 0.041096
| 0.034247
| 0.450913
| 0.391553
| 0.368721
| 0.325342
| 0.317922
| 0.310502
| 0
| 0.238683
| 0.162069
| 3,480
| 71
| 170
| 49.014085
| 0.36214
| 0.027299
| 0
| 0.126984
| 0
| 0.238095
| 0.626367
| 0.557198
| 0
| 0
| 0
| 0
| 0.015873
| 1
| 0
| false
| 0
| 0.015873
| 0
| 0.015873
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
322c2469fb2374edc92dc36b2ab96b03b241279d
| 247
|
py
|
Python
|
ambra_sdk/service/entrypoints/case.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 9
|
2020-04-20T23:45:44.000Z
|
2021-04-18T11:22:17.000Z
|
ambra_sdk/service/entrypoints/case.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 13
|
2020-02-08T16:15:05.000Z
|
2021-09-13T22:55:28.000Z
|
ambra_sdk/service/entrypoints/case.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 6
|
2020-03-25T17:47:45.000Z
|
2021-04-18T11:22:19.000Z
|
from ambra_sdk.service.entrypoints.generated.case import \
AsyncCase as GAsyncCase
from ambra_sdk.service.entrypoints.generated.case import Case as GCase
class Case(GCase):
"""Case."""
class AsyncCase(GAsyncCase):
"""AsyncCase."""
| 20.583333
| 70
| 0.740891
| 30
| 247
| 6.033333
| 0.433333
| 0.099448
| 0.132597
| 0.209945
| 0.541436
| 0.541436
| 0.541436
| 0.541436
| 0
| 0
| 0
| 0
| 0.1417
| 247
| 11
| 71
| 22.454545
| 0.853774
| 0.064777
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
326020739936657e23d816035d7843ef864e8ceb
| 116
|
py
|
Python
|
py/protocols/util/server.py
|
marin-jovanovic/power-grid-simulator
|
e618588a3315c9d15b66290ffb86def6daa1dca4
|
[
"Apache-2.0"
] | null | null | null |
py/protocols/util/server.py
|
marin-jovanovic/power-grid-simulator
|
e618588a3315c9d15b66290ffb86def6daa1dca4
|
[
"Apache-2.0"
] | null | null | null |
py/protocols/util/server.py
|
marin-jovanovic/power-grid-simulator
|
e618588a3315c9d15b66290ffb86def6daa1dca4
|
[
"Apache-2.0"
] | null | null | null |
class Server:
def __init__(self, host_name, port):
self.host_name = host_name
self.port = port
| 19.333333
| 40
| 0.62931
| 16
| 116
| 4.125
| 0.5
| 0.363636
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.284483
| 116
| 5
| 41
| 23.2
| 0.795181
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
328da3e2a01a3c77391364f9474e6577528f3cdc
| 120
|
py
|
Python
|
graphgallery/nn/layers/pytorch/conv/__init__.py
|
kisekizzz/GraphGallery
|
fd4a1f474c244f774397460ae95935638ef48f5b
|
[
"MIT"
] | 1
|
2020-11-22T10:14:58.000Z
|
2020-11-22T10:14:58.000Z
|
graphgallery/nn/layers/th_layers/__init__.py
|
mengliu1998/GraphGallery
|
025ac09e883f3e1e1b02000e086830c935884a6e
|
[
"MIT"
] | null | null | null |
graphgallery/nn/layers/th_layers/__init__.py
|
mengliu1998/GraphGallery
|
025ac09e883f3e1e1b02000e086830c935884a6e
|
[
"MIT"
] | 1
|
2020-11-22T10:14:59.000Z
|
2020-11-22T10:14:59.000Z
|
from .gcn import GraphConvolution
from .gat import GraphAttention, SparseGraphAttention
from .sgc import SGConvolution
| 24
| 53
| 0.85
| 13
| 120
| 7.846154
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 120
| 4
| 54
| 30
| 0.962264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
32bed033a8e47e38c50ef282d82842765cb112a5
| 266
|
py
|
Python
|
tests/test_no_config.py
|
intel/oneContainer-Cloud-Tool
|
843fd4f6aa2e168ac7a7b544e05b92a7495e1121
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_no_config.py
|
intel/oneContainer-Cloud-Tool
|
843fd4f6aa2e168ac7a7b544e05b92a7495e1121
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_no_config.py
|
intel/oneContainer-Cloud-Tool
|
843fd4f6aa2e168ac7a7b544e05b92a7495e1121
|
[
"BSD-3-Clause"
] | 3
|
2021-01-04T10:29:16.000Z
|
2022-02-19T00:07:15.000Z
|
"""test no config file."""
import os
import onecontainer_cloud_tool.config as config
def test_no_config():
"""test if config file exists, config_file is created when init is run, this
test should fail."""
assert not os.path.isfile(config.CONFIG_FILE)
| 24.181818
| 80
| 0.733083
| 42
| 266
| 4.5
| 0.595238
| 0.21164
| 0.126984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172932
| 266
| 10
| 81
| 26.6
| 0.859091
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
08661490a7fbb99a4db92bed366d13d03ddd8ed4
| 38
|
py
|
Python
|
Code/lecture 6/print.py
|
capacitybuilding/Introduction-to-Computng
|
77639e53ed53896581c4a8431f32198237ac70dc
|
[
"MIT"
] | null | null | null |
Code/lecture 6/print.py
|
capacitybuilding/Introduction-to-Computng
|
77639e53ed53896581c4a8431f32198237ac70dc
|
[
"MIT"
] | null | null | null |
Code/lecture 6/print.py
|
capacitybuilding/Introduction-to-Computng
|
77639e53ed53896581c4a8431f32198237ac70dc
|
[
"MIT"
] | null | null | null |
print 'Hello world'
print 5
print 4.0
| 9.5
| 19
| 0.736842
| 8
| 38
| 3.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0.184211
| 38
| 3
| 20
| 12.666667
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0.289474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
0899b4458687cdca80f8a4b25965261dadb86056
| 195
|
py
|
Python
|
DeepLearning/Deep-Learning-with-PyTorch-Tutorials/lesson46-时间序列表示/main.py
|
Shadowalker1995/Tutorial-Resource
|
71fe3d521cf9971f708fa9978e9c685c0dda6ba6
|
[
"Apache-2.0"
] | null | null | null |
DeepLearning/Deep-Learning-with-PyTorch-Tutorials/lesson46-时间序列表示/main.py
|
Shadowalker1995/Tutorial-Resource
|
71fe3d521cf9971f708fa9978e9c685c0dda6ba6
|
[
"Apache-2.0"
] | null | null | null |
DeepLearning/Deep-Learning-with-PyTorch-Tutorials/lesson46-时间序列表示/main.py
|
Shadowalker1995/Tutorial-Resource
|
71fe3d521cf9971f708fa9978e9c685c0dda6ba6
|
[
"Apache-2.0"
] | 1
|
2021-01-08T20:22:26.000Z
|
2021-01-08T20:22:26.000Z
|
import torch
import torchnlp
from torchnlp import word_to_vector
def main():
# vec = word_to_vector.GloVe()
vec = word_to_vector.BPEmb()
if __name__ == '__main__':
main()
| 10.263158
| 35
| 0.666667
| 26
| 195
| 4.461538
| 0.538462
| 0.155172
| 0.310345
| 0.258621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235897
| 195
| 19
| 36
| 10.263158
| 0.778523
| 0.14359
| 0
| 0
| 0
| 0
| 0.048193
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.428571
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
08c1fefe323dc769710a79bf86acce51495f3081
| 1,242
|
py
|
Python
|
open_vsdcli/vsdcli.py
|
maxiterr/openvsd
|
5b0b50c67863498cb7972670ea6cdcc79c12624f
|
[
"Apache-2.0"
] | 9
|
2015-10-15T14:11:03.000Z
|
2016-11-24T20:35:30.000Z
|
open_vsdcli/vsdcli.py
|
maxiterr/openvsd
|
5b0b50c67863498cb7972670ea6cdcc79c12624f
|
[
"Apache-2.0"
] | 24
|
2015-10-15T14:37:15.000Z
|
2020-02-28T10:23:11.000Z
|
open_vsdcli/vsdcli.py
|
maxiterr/openvsd
|
5b0b50c67863498cb7972670ea6cdcc79c12624f
|
[
"Apache-2.0"
] | 6
|
2015-10-25T21:11:17.000Z
|
2020-03-05T16:16:05.000Z
|
# Copyright 2015 Maxime Terras <maxime.terras@numergy.com>
# Copyright 2015 Pierre Padrixe <pierre.padrixe@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from open_vsdcli.vsd_license import *
from open_vsdcli.vsd_enterprise import *
from open_vsdcli.vsd_domain import *
from open_vsdcli.vsd_subnet import *
from open_vsdcli.vsd_user import *
from open_vsdcli.vsd_gateway import *
from open_vsdcli.vsd_vsp import *
from open_vsdcli.vsd_vm import *
from open_vsdcli.vsd_vport import *
from open_vsdcli.vsd_policy import *
from open_vsdcli.vsd_dhcp import *
from open_vsdcli.vsd_metadata import *
from open_vsdcli.vsd_route import *
def main():
vsdcli(obj={})
if __name__ == '__main__':
main()
| 33.567568
| 78
| 0.761675
| 188
| 1,242
| 4.851064
| 0.478723
| 0.114035
| 0.199561
| 0.242325
| 0.302632
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011594
| 0.166667
| 1,242
| 36
| 79
| 34.5
| 0.869565
| 0.533816
| 0
| 0
| 0
| 0
| 0.014184
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| true
| 0
| 0.764706
| 0
| 0.823529
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ecde0b9662df9cf4c0f7b9c6b5d2671e099c452
| 174
|
py
|
Python
|
fastmot/__init__.py
|
hejinIzhanghao/FastMOT
|
1428a1994c7c3fb041ed04d1cc6051a4a4fae8b0
|
[
"MIT"
] | null | null | null |
fastmot/__init__.py
|
hejinIzhanghao/FastMOT
|
1428a1994c7c3fb041ed04d1cc6051a4a4fae8b0
|
[
"MIT"
] | null | null | null |
fastmot/__init__.py
|
hejinIzhanghao/FastMOT
|
1428a1994c7c3fb041ed04d1cc6051a4a4fae8b0
|
[
"MIT"
] | null | null | null |
from .videoio import VideoIO
from .mot_cv import MOT
from .tracker import MultiTracker
from .kalman_filter import KalmanFilter
from .flow import Flow
from .track import Track
| 29
| 39
| 0.833333
| 26
| 174
| 5.5
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132184
| 174
| 6
| 40
| 29
| 0.94702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4117b353f111ba28cb42914c51ee38f1d228b972
| 120
|
py
|
Python
|
pic-to-ascii.py
|
TomJamesGray/pic-to-ascii
|
9bed680050919263c1cc418c4827847a461215e5
|
[
"MIT"
] | null | null | null |
pic-to-ascii.py
|
TomJamesGray/pic-to-ascii
|
9bed680050919263c1cc418c4827847a461215e5
|
[
"MIT"
] | null | null | null |
pic-to-ascii.py
|
TomJamesGray/pic-to-ascii
|
9bed680050919263c1cc418c4827847a461215e5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
from pic_to_ascii import main
if __name__ == "__main__":
main.main(sys.argv[1:])
| 17.142857
| 29
| 0.708333
| 20
| 120
| 3.75
| 0.75
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.15
| 120
| 6
| 30
| 20
| 0.715686
| 0.175
| 0
| 0
| 0
| 0
| 0.081633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
412be2de29843dbe8d33c7f65ccfe2520c9a0c79
| 127
|
py
|
Python
|
test_project/member/models.py
|
Yemsheng/goldencage
|
835f71c7ab882eccb87f42d2bc138d26b6432860
|
[
"BSD-3-Clause"
] | null | null | null |
test_project/member/models.py
|
Yemsheng/goldencage
|
835f71c7ab882eccb87f42d2bc138d26b6432860
|
[
"BSD-3-Clause"
] | null | null | null |
test_project/member/models.py
|
Yemsheng/goldencage
|
835f71c7ab882eccb87f42d2bc138d26b6432860
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
from django.contrib.auth.tests.custom_user import CustomUser
class MyUser(CustomUser):
pass
| 18.142857
| 60
| 0.80315
| 18
| 127
| 5.611111
| 0.777778
| 0.19802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133858
| 127
| 6
| 61
| 21.166667
| 0.918182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
f5ad5d3330c9b3335cd61e6c196e777dedcbade3
| 21,483
|
py
|
Python
|
third_party/spider/baselines/seq2seq_attention_copy/seq2seq/decoders/schema_attention_decoder.py
|
chenyangh/tensor2struct-public
|
d3257cba6d76d3c658a58a78f687d986bdc755cf
|
[
"MIT"
] | 69
|
2021-04-14T06:35:07.000Z
|
2022-03-31T18:35:05.000Z
|
third_party/spider/baselines/seq2seq_attention_copy/seq2seq/decoders/schema_attention_decoder.py
|
chenyangh/tensor2struct-public
|
d3257cba6d76d3c658a58a78f687d986bdc755cf
|
[
"MIT"
] | 19
|
2018-12-17T20:42:11.000Z
|
2020-02-12T21:29:51.000Z
|
third_party/spider/baselines/seq2seq_attention_copy/seq2seq/decoders/schema_attention_decoder.py
|
chenyangh/tensor2struct-public
|
d3257cba6d76d3c658a58a78f687d986bdc755cf
|
[
"MIT"
] | 22
|
2019-03-16T05:57:27.000Z
|
2020-10-25T04:34:54.000Z
|
# Modified from attention_decoder.py
"""
A sequence decoder with attention to schema that performs
a softmax based on the RNN state.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.ops import variable_scope
from seq2seq.decoders.rnn_decoder import RNNDecoder
from seq2seq.contrib.seq2seq.helper import CustomHelper
class SchemaAttentionDecoderOutput(
# added namedtuple: schema_attention_scores", "schema_attention_context"
namedtuple("DecoderOutput", [
"logits", "predicted_ids", "cell_output", "attention_scores",
"attention_context", "schema_attention_scores",
"schema_attention_context"
])):
"""Augmented decoder output that also includes the attention scores.
"""
pass
class SchemaCopyingAttentionDecoderOutput(
# added namedtuple: schema_attention_copy_vals
namedtuple("DecoderOutput", [
"logits", "predicted_ids", "cell_output", "attention_scores",
"attention_context", "schema_attention_scores",
"schema_attention_context", "schema_attention_copy_vals"
])):
"""Augmented decoder output that also includes the attention scores
and copy vals.
"""
pass
class SchemaMapAttentionDecoderOutput(
# added namedtuple: "schema_map_attention_scores", "schema_map_attention_context"
namedtuple("DecoderOutput", [
"logits", "predicted_ids", "cell_output", "attention_scores",
"attention_context", "schema_attention_scores",
"schema_attention_context","schema_map_attention_scores",
"schema_map_attention_context"
])):
"""Augmented decoder output that also includes the attention scores.
"""
pass
class SchemaAttentionDecoder(RNNDecoder):
"""An RNN Decoder that uses attention over an input sequence and a schema.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
helper: An instance of `tf.contrib.seq2seq.Helper` to assist decoding
initial_state: A tensor or tuple of tensors used as the initial cell
state.
vocab_size: Output vocabulary size, i.e. number of units
in the softmax layer
attention_keys: The sequence used to calculate attention scores.
A tensor of shape `[B, T, ...]`.
attention_values: The sequence to attend over.
A tensor of shape `[B, T, input_dim]`.
attention_values_length: Sequence length of the attention values.
An int32 Tensor of shape `[B]`.
attention_fn: The attention function to use. This function map from
`(state, inputs)` to `(attention_scores, attention_context)`.
For an example, see `seq2seq.decoder.attention.AttentionLayer`.
reverse_scores: Optional, an array of sequence length. If set,
reverse the attention scores in the output. This is used for when
a reversed source sequence is fed as an input but you want to
return the scores in non-reversed order.
"""
# the definition of schema_attention_function is in models/schema_attention_seq2seq.py
def __init__(self,
params,
mode,
vocab_size,
attention_keys,
attention_values,
attention_values_length,
attention_fn,
# 4 extra values
reverse_scores_lengths=None,
schema_attention_keys=None,
schema_attention_values=None,
schema_attention_values_length=None,
schema_attention_fn=None,
name="schema_attention_decoder"):
super(SchemaAttentionDecoder, self).__init__(params, mode, name)
self.vocab_size = vocab_size
self.attention_keys = attention_keys
self.attention_values = attention_values
self.attention_values_length = attention_values_length
self.attention_fn = attention_fn
self.reverse_scores_lengths = reverse_scores_lengths
self.schema_attention_keys = schema_attention_keys
self.schema_attention_values = schema_attention_values
self.schema_attention_values_length = schema_attention_values_length
if schema_attention_fn:
self.schema_attention_fn = schema_attention_fn
else:
self.schema_attention_fn = attention_fn
@property
def output_size(self):
return SchemaAttentionDecoderOutput(
logits=self.vocab_size,
predicted_ids=tf.TensorShape([]),
cell_output=self.cell.output_size,
attention_scores=tf.shape(self.attention_values)[1:-1],
attention_context=self.attention_values.get_shape()[-1],
schema_attention_scores=tf.shape(self.schema_attention_values)[1:-1],
schema_attention_context=self.schema_attention_values.get_shape()[-1])
@property
def output_dtype(self):
return SchemaAttentionDecoderOutput(
logits=tf.float32,
predicted_ids=tf.int32,
cell_output=tf.float32,
attention_scores=tf.float32,
attention_context=tf.float32,
schema_attention_scores=tf.float32,
schema_attention_context=tf.float32)
def initialize(self, name=None):
finished, first_inputs = self.helper.initialize()
# Concat empty attention context
attention_context = tf.zeros([
tf.shape(first_inputs)[0],
self.attention_values.get_shape().as_list()[-1]
])
schema_attention_context = tf.zeros([
tf.shape(first_inputs)[0],
self.schema_attention_values.get_shape().as_list()[-1]
])
first_inputs = tf.concat([first_inputs, attention_context, schema_attention_context], 1)
return finished, first_inputs, self.initial_state
def compute_output(self, cell_output, calculate_softmax=True):
"""Computes the decoder outputs."""
# Compute attention
att_scores, attention_context = self.attention_fn(
query=cell_output,
keys=self.attention_keys,
values=self.attention_values,
values_length=self.attention_values_length)
# there is a key and a schema attention value
# which is key? where to find the schema attention function?
schema_att_scores, schema_attention_context = self.schema_attention_fn(
query=cell_output,
keys=self.schema_attention_keys,
values=self.schema_attention_values,
values_length=self.schema_attention_values_length)
softmax_input = None
logits = None
if calculate_softmax:
softmax_input, logits = self._calculate_softmax(
[cell_output, attention_context, schema_attention_context])
return softmax_input, logits, att_scores, attention_context, schema_att_scores, schema_attention_context
def _calculate_softmax(self, list_of_contexts):
softmax_input = tf.contrib.layers.fully_connected(
inputs=tf.concat(list_of_contexts, 1),
num_outputs=self.cell.output_size,
activation_fn=tf.nn.tanh,
scope="attention_mix")
# Softmax computation
logits = tf.contrib.layers.fully_connected(
inputs=softmax_input,
num_outputs=self.vocab_size,
activation_fn=None,
scope="logits")
return softmax_input, logits
def _setup(self, initial_state, helper):
self.initial_state = initial_state
def att_next_inputs(time, outputs, state, sample_ids, name=None):
"""Wraps the original decoder helper function to append the attention
context.
"""
finished, next_inputs, next_state = helper.next_inputs(
time=time,
outputs=outputs,
state=state,
sample_ids=sample_ids,
name=name)
next_inputs = tf.concat([next_inputs, outputs.attention_context, outputs.schema_attention_context], 1)
return (finished, next_inputs, next_state)
self.helper = CustomHelper(
initialize_fn=helper.initialize,
sample_fn=helper.sample,
next_inputs_fn=att_next_inputs)
def step(self, time_, inputs, state, name=None):
cell_output, cell_state = self.cell(inputs, state)
(cell_output_new, logits, attention_scores, attention_context,
schema_attention_scores, schema_attention_context) = \
self.compute_output(cell_output)
if self.reverse_scores_lengths is not None:
attention_scores = tf.reverse_sequence(
input=attention_scores,
seq_lengths=self.reverse_scores_lengths,
seq_dim=1,
batch_dim=0)
sample_ids = self.helper.sample(
time=time_, outputs=logits, state=cell_state)
outputs = SchemaAttentionDecoderOutput(
logits=logits,
predicted_ids=sample_ids,
cell_output=cell_output_new,
attention_scores=attention_scores,
attention_context=attention_context,
schema_attention_scores=schema_attention_scores,
schema_attention_context=schema_attention_context)
finished, next_inputs, next_state = self.helper.next_inputs(
time=time_, outputs=outputs, state=cell_state, sample_ids=sample_ids)
return (outputs, next_state, next_inputs, finished)
class SchemaAttentionCopyingDecoder(SchemaAttentionDecoder):
"""
The version of SchemaAttentionCopyingDecoder that uses
F(score_n, rowembedding_n, h, c, W) to generate a score for the
n-th field in the schema.
"""
def __init__(self,
params,
mode,
vocab_size,
attention_keys,
attention_values,
attention_values_length,
attention_fn,
reverse_scores_lengths=None,
schema_attention_keys=None,
schema_attention_values=None,
schema_attention_values_length=None,
schema_attention_fn=None,
name="schema_attention_copying_decoder"):
super(SchemaAttentionCopyingDecoder, self).__init__(
params, mode, vocab_size, attention_keys, attention_values,
attention_values_length, attention_fn, reverse_scores_lengths,
schema_attention_keys, schema_attention_values,
schema_attention_values_length, schema_attention_fn, name)
self.schema_embs = schema_attention_values
@property
def output_size(self):
return SchemaCopyingAttentionDecoderOutput(
logits=self.vocab_size,
predicted_ids=tf.TensorShape([]),
cell_output=self.cell.output_size,
attention_scores=tf.shape(self.attention_values)[1:-1],
attention_context=self.attention_values.get_shape()[-1],
schema_attention_scores=tf.shape(self.schema_attention_values)[1:-1],
schema_attention_context=self.schema_attention_values.get_shape()[-1],
schema_attention_copy_vals=tf.shape(self.schema_attention_values)[1:-1])
@property
def output_dtype(self):
return SchemaCopyingAttentionDecoderOutput(
logits=tf.float32,
predicted_ids=tf.int32,
cell_output=tf.float32,
attention_scores=tf.float32,
attention_context=tf.float32,
schema_attention_scores=tf.float32,
schema_attention_context=tf.float32,
schema_attention_copy_vals=tf.float32)
def compute_output(self, cell_output):
(softmax_input, logits, att_scores,
attention_context, schema_att_scores,
schema_attention_context) = super(
SchemaAttentionCopyingDecoder, self).compute_output(cell_output)
schema_attention_copy_vals = schema_att_scores
weighted_schema_embs_size = self.cell.output_size + \
self.attention_values.get_shape().as_list()[-1]
weighted_schema_embs = tf.contrib.layers.fully_connected(
inputs=self.schema_embs,
num_outputs=weighted_schema_embs_size,
activation_fn=None,
scope="weighted_schema_embs")
concatenated = tf.expand_dims(
tf.concat([cell_output, attention_context], 1), axis=2)
schema_attention_copy_vals = schema_att_scores * tf.squeeze(tf.matmul(
weighted_schema_embs, concatenated), axis=2)
return softmax_input, logits, att_scores, attention_context, schema_att_scores, schema_attention_context, schema_attention_copy_vals
def _setup(self, initial_state, helper):
#TODO: Take advantage of inheritance rather than copy-paste
self.initial_state = initial_state
def att_next_inputs(time, outputs, state, sample_ids, name=None):
"""Wraps the original decoder helper function to append the attention
context.
"""
finished, next_inputs, next_state = helper.next_inputs(
time=time,
outputs=outputs,
state=state,
sample_ids=sample_ids,
name=name)
next_inputs = tf.concat([next_inputs, outputs.attention_context, outputs.schema_attention_context], 1)
return (finished, next_inputs, next_state)
self.helper = CustomHelper(
initialize_fn=helper.initialize,
sample_fn=helper.sample,
next_inputs_fn=att_next_inputs)
def step(self, time_, inputs, state, name=None):
cell_output, cell_state = self.cell(inputs, state)
(cell_output_new, logits, attention_scores, attention_context,
schema_attention_scores, schema_attention_context,
schema_attention_copy_vals) = \
self.compute_output(cell_output)
if self.reverse_scores_lengths is not None:
attention_scores = tf.reverse_sequence(
input=attention_scores,
seq_lengths=self.reverse_scores_lengths,
seq_dim=1,
batch_dim=0)
sample_ids = self.helper.sample(
time=time_, outputs=logits, state=cell_state)
outputs = SchemaCopyingAttentionDecoderOutput(
logits=logits,
predicted_ids=sample_ids,
cell_output=cell_output_new,
attention_scores=attention_scores,
attention_context=attention_context,
schema_attention_scores=schema_attention_scores,
schema_attention_context=schema_attention_context,
schema_attention_copy_vals=schema_attention_copy_vals)
finished, next_inputs, next_state = self.helper.next_inputs(
time=time_, outputs=outputs, state=cell_state, sample_ids=sample_ids)
return (outputs, next_state, next_inputs, finished)
class SchemaMapAttentionDecoder(SchemaAttentionDecoder):
"""An RNN Decoder that uses attention over an input sequence and a schema
and a schema map.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
helper: An instance of `tf.contrib.seq2seq.Helper` to assist decoding
initial_state: A tensor or tuple of tensors used as the initial cell
state.
vocab_size: Output vocabulary size, i.e. number of units
in the softmax layer
attention_keys: The sequence used to calculate attention scores.
A tensor of shape `[B, T, ...]`.
attention_values: The sequence to attend over.
A tensor of shape `[B, T, input_dim]`.
attention_values_length: Sequence length of the attention values.
An int32 Tensor of shape `[B]`.
attention_fn: The attention function to use. This function map from
`(state, inputs)` to `(attention_scores, attention_context)`.
For an example, see `seq2seq.decoder.attention.AttentionLayer`.
reverse_scores: Optional, an array of sequence length. If set,
reverse the attention scores in the output. This is used for when
a reversed source sequence is fed as an input but you want to
return the scores in non-reversed order.
"""
def __init__(self,
params,
mode,
vocab_size,
attention_keys,
attention_values,
attention_values_length,
attention_fn,
reverse_scores_lengths=None,
schema_attention_keys=None,
schema_attention_values=None,
schema_attention_values_length=None,
schema_attention_fn=None,
schema_map_attention_keys=None,
schema_map_attention_values=None,
schema_map_attention_values_length=None,
schema_map_attention_fn=None,
name="schema_map_attention_decoder"):
super(SchemaMapAttentionDecoder, self).__init__(
params, mode, vocab_size, attention_keys, attention_values,
attention_values_length, attention_fn, reverse_scores_lengths,
schema_attention_keys, schema_attention_values,
schema_attention_values_length, schema_attention_fn, name)
self.schema_map_attention_keys = schema_attention_keys
self.schema_map_attention_values = schema_attention_values
self.schema_map_attention_values_length = schema_attention_values_length
if schema_map_attention_fn:
self.schema_map_attention_fn = schema_map_attention_fn
else:
self.schema_map_attention_fn = attention_fn
@property
def output_size(self):
return SchemaMapAttentionDecoderOutput(
logits=self.vocab_size,
predicted_ids=tf.TensorShape([]),
cell_output=self.cell.output_size,
attention_scores=tf.shape(self.attention_values)[1:-1],
attention_context=self.attention_values.get_shape()[-1],
schema_attention_scores=tf.shape(self.schema_attention_values)[1:-1],
schema_attention_context=self.schema_attention_values.get_shape()[-1],
schema_map_attention_scores=tf.shape(self.schema_map_attention_values)[1:-1],
schema_map_attention_context=self.schema_map_attention_values.get_shape()[-1])
@property
def output_dtype(self):
return SchemaMapAttentionDecoderOutput(
logits=tf.float32,
predicted_ids=tf.int32,
cell_output=tf.float32,
attention_scores=tf.float32,
attention_context=tf.float32,
schema_attention_scores=tf.float32,
schema_attention_context=tf.float32,
schema_map_attention_scores=tf.float32,
schema_map_attention_context=tf.float32)
def initialize(self, name=None):
(finished, first_inputs,
initial_state) = super(
SchemaMapAttentionDecoder, self).initialize(name=name)
# Concat empty schema map attention context
schema_map_attention_context = tf.zeros([
tf.shape(first_inputs)[0],
self.schema_attention_values.get_shape().as_list()[-1]
])
first_inputs = tf.concat([first_inputs, schema_map_attention_context], 1)
return finished, first_inputs, initial_state
def compute_output(self, cell_output, calculate_softmax=True):
(softmax_input, logits, att_scores, attention_context,
schema_att_scores, schema_attention_context) = super(
SchemaMapAttentionDecoder, self).compute_output(cell_output,
calculate_softmax=False)
with tf.variable_scope("schema_map_att"):
(schema_map_att_scores,
schema_map_attention_context) = self.schema_map_attention_fn(
query=cell_output,
keys=self.schema_map_attention_keys,
values=self.schema_map_attention_values,
values_length=self.schema_map_attention_values_length)
if calculate_softmax:
softmax_input, logits = self._calculate_softmax(
[cell_output, attention_context, schema_attention_context,
schema_map_attention_context])
return (softmax_input, logits, att_scores, attention_context,
schema_att_scores, schema_attention_context,
schema_map_att_scores, schema_map_attention_context)
# TODO: Can we use inheritance to make this simpler?
def _setup(self, initial_state, helper):
self.initial_state = initial_state
def att_next_inputs(time, outputs, state, sample_ids, name=None):
"""Wraps the original decoder helper function to append the attention
context.
"""
finished, next_inputs, next_state = helper.next_inputs(
time=time,
outputs=outputs,
state=state,
sample_ids=sample_ids,
name=name)
next_inputs = tf.concat([next_inputs, outputs.attention_context, outputs.schema_attention_context, outputs.schema_map_attention_context], 1)
return (finished, next_inputs, next_state)
self.helper = CustomHelper(
initialize_fn=helper.initialize,
sample_fn=helper.sample,
next_inputs_fn=att_next_inputs)
def step(self, time_, inputs, state, name=None):
cell_output, cell_state = self.cell(inputs, state)
(cell_output_new, logits, attention_scores, attention_context,
schema_attention_scores, schema_attention_context,
schema_map_attention_scores, schema_map_attention_context) = \
self.compute_output(cell_output)
if self.reverse_scores_lengths is not None:
attention_scores = tf.reverse_sequence(
input=attention_scores,
seq_lengths=self.reverse_scores_lengths,
seq_dim=1,
batch_dim=0)
sample_ids = self.helper.sample(
time=time_, outputs=logits, state=cell_state)
outputs = SchemaMapAttentionDecoderOutput(
logits=logits,
predicted_ids=sample_ids,
cell_output=cell_output_new,
attention_scores=attention_scores,
attention_context=attention_context,
schema_attention_scores=schema_attention_scores,
schema_attention_context=schema_attention_context,
schema_map_attention_scores=schema_map_attention_scores,
schema_map_attention_context=schema_map_attention_context)
finished, next_inputs, next_state = self.helper.next_inputs(
time=time_, outputs=outputs, state=cell_state, sample_ids=sample_ids)
return (outputs, next_state, next_inputs, finished)
| 39.931227
| 146
| 0.716939
| 2,573
| 21,483
| 5.642829
| 0.085115
| 0.117777
| 0.04835
| 0.040568
| 0.813417
| 0.779117
| 0.749019
| 0.7183
| 0.687857
| 0.676286
| 0
| 0.006226
| 0.207513
| 21,483
| 537
| 147
| 40.005587
| 0.846579
| 0.168319
| 0
| 0.659794
| 0
| 0
| 0.033256
| 0.017336
| 0
| 0
| 0
| 0.003724
| 0
| 1
| 0.061856
| false
| 0.007732
| 0.023196
| 0.015464
| 0.146907
| 0.002577
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f5cb5f3190c199b0116ae2b82b945f54eb010a1d
| 82
|
py
|
Python
|
privex/steem/__main__.py
|
brianoflondon/steem-async
|
ece69cb11e44b73a9a9bd8989e3742677c390b19
|
[
"X11"
] | 4
|
2019-09-14T19:21:07.000Z
|
2021-11-28T01:44:02.000Z
|
privex/steem/__main__.py
|
brianoflondon/steem-async
|
ece69cb11e44b73a9a9bd8989e3742677c390b19
|
[
"X11"
] | null | null | null |
privex/steem/__main__.py
|
brianoflondon/steem-async
|
ece69cb11e44b73a9a9bd8989e3742677c390b19
|
[
"X11"
] | 2
|
2021-06-30T08:23:12.000Z
|
2021-10-01T23:27:50.000Z
|
from privex.steem.cli import cli_main
if __name__ == '__main__':
cli_main()
| 13.666667
| 37
| 0.707317
| 12
| 82
| 4
| 0.666667
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182927
| 82
| 5
| 38
| 16.4
| 0.716418
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
eb33993c0a48c121dc3f54b2cbe7a82e999e3ca3
| 657
|
py
|
Python
|
2017/Day05.py
|
hrmorley34/AdventofCode
|
74590422717fb5c6b80ef3fca226359d354c4aec
|
[
"MIT"
] | null | null | null |
2017/Day05.py
|
hrmorley34/AdventofCode
|
74590422717fb5c6b80ef3fca226359d354c4aec
|
[
"MIT"
] | null | null | null |
2017/Day05.py
|
hrmorley34/AdventofCode
|
74590422717fb5c6b80ef3fca226359d354c4aec
|
[
"MIT"
] | null | null | null |
# Part 1
def jump1(jumps):
j = jumps.split("\n")
l = []
for item in j:
l += [int(item)]
length = len(l)
loc = 0
counter = 0
while 0 <= loc and loc < length:
counter += 1
jump = l[loc]
l[loc] += 1
loc += jump
return(counter)
# Part 2
def jump2(jumps):
j = jumps.split("\n")
l = []
for item in j:
l += [int(item)]
length = len(l)
loc = 0
counter = 0
while 0 <= loc and loc < length:
counter += 1
jump = l[loc]
if jump >= 3:
l[loc] -= 1
else:
l[loc] += 1
loc += jump
return(counter)
| 18.771429
| 36
| 0.429224
| 91
| 657
| 3.098901
| 0.296703
| 0.099291
| 0.053191
| 0.113475
| 0.851064
| 0.851064
| 0.851064
| 0.673759
| 0.673759
| 0.673759
| 0
| 0.042553
| 0.427702
| 657
| 34
| 37
| 19.323529
| 0.707447
| 0.019787
| 0
| 0.83871
| 0
| 0
| 0.00624
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
eb3c3f0a4eeec8c0f59896334820de61936aaac3
| 46
|
py
|
Python
|
python/testData/paramInfo/ReassignedLambda.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/paramInfo/ReassignedLambda.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/paramInfo/ReassignedLambda.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
z = lambda x,y: x+y
y = z
y(<arg1>1, <arg2>2)
| 11.5
| 19
| 0.521739
| 13
| 46
| 1.846154
| 0.615385
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0.217391
| 46
| 3
| 20
| 15.333333
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
eb4619d81ca6a2926f65533e5eb61f9a65095e5b
| 8,024
|
py
|
Python
|
apps/links/tests/test_usage_api.py
|
dstl/lighthouse
|
b810742d9f4cbfac02bf99096542499d25c88b58
|
[
"MIT"
] | 5
|
2016-05-12T13:47:38.000Z
|
2020-06-22T07:33:35.000Z
|
apps/links/tests/test_usage_api.py
|
dstl/lighthouse
|
b810742d9f4cbfac02bf99096542499d25c88b58
|
[
"MIT"
] | 7
|
2016-10-24T12:41:09.000Z
|
2016-12-08T21:58:18.000Z
|
apps/links/tests/test_usage_api.py
|
dstl/lighthouse
|
b810742d9f4cbfac02bf99096542499d25c88b58
|
[
"MIT"
] | 4
|
2016-05-12T21:53:21.000Z
|
2021-04-10T22:02:26.000Z
|
# (c) Crown Owned Copyright, 2016. Dstl.
from datetime import datetime
from os import getenv
from unittest import mock, skipIf
import requests
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
from django.utils.timezone import make_aware
from ..models import Link
from testing.common import make_user
class LinkUsageAPITest(LiveServerTestCase):
def setUp(self):
self.user = make_user()
self.link = Link.objects.create(
name='Link Linkerly',
destination='link.com',
owner=self.user,
is_external=False,
)
def test_get_usage(self):
with mock.patch('django.utils.timezone.now') as mock_now:
# register usage on a specific day
mock_now.return_value = make_aware(datetime(2016, 3, 1, 10, 0, 0))
self.link.register_usage(self.user)
mock_now.return_value = make_aware(datetime(2016, 3, 2, 10, 0, 0))
self.link.register_usage(self.user)
mock_now.return_value = make_aware(datetime(2016, 3, 2, 10, 15, 0))
self.link.register_usage(self.user)
expected_response = [
{
'date': '2016-03-01T10:00:00Z',
'user': self.user.userid,
'duration': 0,
},
{
'date': '2016-03-02T10:00:00Z',
'user': self.user.userid,
'duration': 900,
}
]
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.get(link_api_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), expected_response)
def test_cannot_get_invalid_link(self):
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk + 1000}),
)
response = requests.get(link_api_url)
self.assertEqual(response.status_code, 404)
def test_cannot_update_usage_without_user(self):
self.assertEquals(self.link.usage_total(), 0)
expected_response = {'error': 'user required'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(link_api_url)
self.assertEqual(response.status_code, 400)
self.assertEquals(self.link.usage_total(), 0)
self.assertEquals(response.json(), expected_response)
def test_cannot_update_usage_with_invalid_user(self):
self.assertEquals(self.link.usage_total(), 0)
expected_response = {'error': 'no such user'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(link_api_url, data={'user': 'easter-bunny'})
self.assertEqual(response.status_code, 400)
self.assertEquals(self.link.usage_total(), 0)
self.assertEquals(response.json(), expected_response)
def test_update_usage(self):
self.assertEquals(self.link.usage_total(), 0)
expected_response = {'status': 'ok'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(link_api_url, data={'user': self.user.slug})
self.assertEqual(response.status_code, 201)
self.assertEquals(self.link.usage_total(), 1)
self.assertEquals(response.json(), expected_response)
def test_update_usage_extends_duration(self):
self.assertEquals(self.link.usage_total(), 0)
with mock.patch('django.utils.timezone.now') as mock_now:
# register usage on a specific day
mock_now.return_value = make_aware(datetime(2016, 3, 1, 10, 0, 0))
expected_response = {'status': 'ok'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(
link_api_url, data={'user': self.user.slug})
self.assertEqual(response.status_code, 201)
self.assertEquals(self.link.usage_total(), 1)
self.assertEquals(response.json(), expected_response)
# register usage shortly after
mock_now.return_value = make_aware(datetime(2016, 3, 1, 10, 15, 0))
expected_response = {'status': 'ok'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(
link_api_url, data={'user': self.user.slug})
self.assertEqual(response.status_code, 201)
self.assertEquals(self.link.usage_total(), 1)
self.assertEquals(response.json(), expected_response)
def test_update_usage_creates_new_usage(self):
self.assertEquals(self.link.usage_total(), 0)
with mock.patch('django.utils.timezone.now') as mock_now:
# register usage on a specific day
mock_now.return_value = make_aware(datetime(2016, 3, 1, 10, 0, 0))
expected_response = {'status': 'ok'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(
link_api_url, data={'user': self.user.slug})
self.assertEqual(response.status_code, 201)
self.assertEquals(self.link.usage_total(), 1)
self.assertEquals(response.json(), expected_response)
# register usage after one hour, triggers new usage stat
mock_now.return_value = make_aware(datetime(2016, 3, 1, 11, 15, 0))
expected_response = {'status': 'ok'}
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
response = requests.post(
link_api_url, data={'user': self.user.slug})
self.assertEqual(response.status_code, 201)
self.assertEquals(self.link.usage_total(), 2)
self.assertEquals(response.json(), expected_response)
# TODO - fix in later v of django
# this test is commented out because of a bug in django
# https://code.djangoproject.com/ticket/25251
# which means tests will fail because the Link object doesn't exist
# after the first TransactionTestCase has happened
@skipIf(
getenv('TEST_API_USAGE', None) is None,
'Skipping tests that has to be run in isolation because of django bug'
)
def test_update_usage_creates_api_usage(self):
api = Link.objects.get(pk=2)
self.assertEquals(self.link.usage_total(), 0)
self.assertEquals(api.usage_total(), 0)
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': self.link.pk}),
)
expected_response = {'status': 'ok'}
response = requests.post(
link_api_url, data={'user': self.user.slug})
self.assertEqual(response.status_code, 201)
self.assertEquals(self.link.usage_total(), 1)
self.assertEquals(response.json(), expected_response)
self.assertEquals(api.usage_total(), 1)
def test_cannot_update_usage_on_nonexistent_link(self):
link_api_url = '%s%s' % (
self.live_server_url,
reverse('api-link-usage', kwargs={'pk': (self.link.pk + 1000)}),
)
response = requests.post(
link_api_url, data={'user': self.user.userid})
self.assertEqual(response.status_code, 404)
| 37.148148
| 79
| 0.599202
| 977
| 8,024
| 4.733879
| 0.157625
| 0.053622
| 0.047568
| 0.072649
| 0.780541
| 0.751784
| 0.733838
| 0.703784
| 0.703784
| 0.691676
| 0
| 0.02966
| 0.277293
| 8,024
| 215
| 80
| 37.32093
| 0.767891
| 0.058076
| 0
| 0.567901
| 0
| 0
| 0.079115
| 0.009939
| 0
| 0
| 0
| 0.004651
| 0.222222
| 1
| 0.061728
| false
| 0
| 0.055556
| 0
| 0.123457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
de2ad897350c8980ffcc843bc66b4d7a96ace1ec
| 136
|
py
|
Python
|
core/__init__.py
|
JiaMingLin/dlcv_adda
|
e292eb0e61fc79eb15a16bcf786e834436b09089
|
[
"Apache-2.0"
] | null | null | null |
core/__init__.py
|
JiaMingLin/dlcv_adda
|
e292eb0e61fc79eb15a16bcf786e834436b09089
|
[
"Apache-2.0"
] | null | null | null |
core/__init__.py
|
JiaMingLin/dlcv_adda
|
e292eb0e61fc79eb15a16bcf786e834436b09089
|
[
"Apache-2.0"
] | null | null | null |
from .adapt import train_tgt
from .pretrain import train_src
from .test import evaluation
__all__ = (train_src, train_tgt, evaluation)
| 22.666667
| 44
| 0.808824
| 20
| 136
| 5.1
| 0.5
| 0.215686
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132353
| 136
| 5
| 45
| 27.2
| 0.864407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
de5cf7cb4e6ceb1c3b0417d84addfff1a1ab537a
| 44
|
py
|
Python
|
shapes/__init__.py
|
haudren/shapes
|
c00581424396b8e9c187381c72b2efd1dae2eb6b
|
[
"MIT"
] | 4
|
2018-01-16T15:12:47.000Z
|
2021-10-06T14:17:58.000Z
|
shapes/__init__.py
|
haudren/shapes
|
c00581424396b8e9c187381c72b2efd1dae2eb6b
|
[
"MIT"
] | null | null | null |
shapes/__init__.py
|
haudren/shapes
|
c00581424396b8e9c187381c72b2efd1dae2eb6b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from shapes import *
| 11
| 21
| 0.704545
| 7
| 44
| 4.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 3
| 22
| 14.666667
| 0.837838
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
de5eff9a4b6d5405a6d5c54738713cbf72ac222b
| 114
|
py
|
Python
|
Python/max/01.py
|
zSucrilhos/programming
|
aa0076a4a7084a6064e1e5df258ba0c90cf8ceeb
|
[
"MIT"
] | null | null | null |
Python/max/01.py
|
zSucrilhos/programming
|
aa0076a4a7084a6064e1e5df258ba0c90cf8ceeb
|
[
"MIT"
] | 4
|
2020-07-18T03:27:03.000Z
|
2020-07-18T03:28:37.000Z
|
Python/max/01.py
|
zSucrilhos/programming
|
aa0076a4a7084a6064e1e5df258ba0c90cf8ceeb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
a = [1,2,3,4,5,6,7,8, 48, 876,4568,124,355,87,98,68769,98879879,9999999999]
print(max(a))
| 28.5
| 75
| 0.614035
| 25
| 114
| 2.8
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.495146
| 0.096491
| 114
| 4
| 76
| 28.5
| 0.184466
| 0.184211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
de9547ec1a98f2ef2c1eac559a183f10667ae824
| 2,879
|
py
|
Python
|
makingVideo.py
|
schnipdip/MASCALOGICS-Automatic-Reddit-text-to-Video-Generator-and-youtube-uploader
|
f87676f0e5fca3b720fff42f816e166cb59d8653
|
[
"MIT"
] | 33
|
2020-08-18T18:48:31.000Z
|
2022-03-13T15:40:13.000Z
|
makingVideo.py
|
schnipdip/MASCALOGICS-Automatic-Reddit-text-to-Video-Generator-and-youtube-uploader
|
f87676f0e5fca3b720fff42f816e166cb59d8653
|
[
"MIT"
] | 5
|
2020-08-27T00:56:22.000Z
|
2022-02-23T10:47:13.000Z
|
makingVideo.py
|
schnipdip/MASCALOGICS-Automatic-Reddit-text-to-Video-Generator-and-youtube-uploader
|
f87676f0e5fca3b720fff42f816e166cb59d8653
|
[
"MIT"
] | 15
|
2020-08-19T13:14:21.000Z
|
2022-01-27T10:32:41.000Z
|
from moviepy.editor import *
import os
def makingVideo(comments,post_titles,post_bodies):
videos=[]
folder_path = os.getcwd()
flash = VideoFileClip('flash.mp4')
flash.fps = 30
# new_audio = r'C:\Users\rajaa\Downloads\Compressed\Reddit-video-generation-master\Reddit-video-generation-master\funny34.mp3'
# i=int(1)
print('combine function')
print(post_titles)
for i in range(len(post_titles)):
print('this is the making video :{}'.format(i))
audio = AudioFileClip(r'{}\voices\part98'.format(folder_path)+ '-' + str(i)+'.mp3')
intro = ImageClip(r'{}\generated\part98'.format(folder_path) + '-' + str(i)+'.jpg', duration = audio.duration )
intro.fps = 30
intro = intro.set_audio(audio)
intro = intro.set_end(intro.duration-.3)
videos.append(intro)
videos.append(flash)
for i in range(len(post_bodies)):
print('this is the making video :{}'.format(i))
audio = AudioFileClip(r'{}\voices\part97'.format(folder_path) + '-' + str(i) + '.mp3')
intro = ImageClip(r'{}\generated\part97'.format(folder_path) + '-' + str(i) + '.jpg', duration=audio.duration)
intro.fps = 30
intro = intro.set_audio(audio)
intro = intro.set_end(intro.duration - .3)
videos.append(intro)
videos.append(flash)
# for x in range(len(comments)):
#
# for i in range(len(comments[x])):
# audio = AudioFileClip(r'{}\voices\part'.format(folder_path) + str(x) + str(i) + '.mp3')
# clip = ImageClip(r'{}\generated\part'.format(folder_path)+str(x) + '-' + str(i)+'.jpg', duration = audio.duration )
# clip.fps = 30
# clip = clip.set_audio(audio)
# clip = clip.set_end(clip.duration)
# videos.append(clip)
#
# videos.append(flash)
for x in range(len(comments)):
# for i in range(len(comments[x])):
audio = AudioFileClip(r'{}\voices\part'.format(folder_path) + str(x) + '.mp3')
clip = ImageClip(r'{}\generated\part'.format(folder_path) + str(x) + '.jpg',
duration=audio.duration)
clip.fps = 30
clip = clip.set_audio(audio)
clip = clip.set_end(clip.duration)
videos.append(clip)
videos.append(flash)
audio = AudioFileClip(r'{}\voices\part100'.format(folder_path)+'-' + str(x) + '.mp3')
clip = ImageClip(r'{}\generated\part100'.format(folder_path) + '-' + str(x) + '.jpg',
duration=audio.duration)
clip.fps = 30
clip = clip.set_audio(audio)
clip = clip.set_end(clip.duration)
videos.append(clip)
videos.append(flash)
final_clip = concatenate(videos)
final_clip.write_videofile("final.mp4")
| 38.905405
| 131
| 0.578326
| 349
| 2,879
| 4.684814
| 0.203438
| 0.067278
| 0.097859
| 0.116208
| 0.7737
| 0.7737
| 0.730275
| 0.730275
| 0.725382
| 0.725382
| 0
| 0.018405
| 0.263981
| 2,879
| 73
| 132
| 39.438356
| 0.753185
| 0.212226
| 0
| 0.521739
| 0
| 0
| 0.122187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.043478
| 0
| 0.065217
| 0.086957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
de9b9c6020569a528e1c1b29e16284c5e86a2825
| 476
|
py
|
Python
|
phablytics/reports/__init__.py
|
zdammel/phablytics
|
f4fceb4f2d4f5c35df9fd41a8b3e2e64d7007ac4
|
[
"MIT"
] | 3
|
2020-06-22T22:05:08.000Z
|
2020-10-27T19:01:40.000Z
|
phablytics/reports/__init__.py
|
zdammel/phablytics
|
f4fceb4f2d4f5c35df9fd41a8b3e2e64d7007ac4
|
[
"MIT"
] | 2
|
2020-12-22T22:15:31.000Z
|
2021-12-14T03:47:19.000Z
|
phablytics/reports/__init__.py
|
zdammel/phablytics
|
f4fceb4f2d4f5c35df9fd41a8b3e2e64d7007ac4
|
[
"MIT"
] | 4
|
2020-08-27T15:56:01.000Z
|
2021-05-07T18:03:11.000Z
|
# Phablytics Imports
from phablytics.reports.group_review_status import GroupReviewStatusReport
from phablytics.reports.new_project_tasks import NewProjectTasksReport
from phablytics.reports.recent_tasks import RecentTasksReport
from phablytics.reports.revision_status import RevisionStatusReport
from phablytics.reports.upcoming_tasks_due import UpcomingProjectTasksDueReport
from phablytics.reports.urgent_and_overdue_project_tasks import UrgentAndOverdueProjectTasksReport
| 59.5
| 98
| 0.915966
| 50
| 476
| 8.48
| 0.48
| 0.198113
| 0.29717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056723
| 476
| 7
| 99
| 68
| 0.944321
| 0.037815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dec49a9f27b371b12c2eea50f7ce75cf0f9657cc
| 276
|
py
|
Python
|
tests/gamestonk_terminal/stocks/options/test_options_api.py
|
minhhoang1023/GamestonkTerminal
|
195dc19b491052df080178c0cc6a9d535a91a704
|
[
"MIT"
] | 1
|
2022-02-18T04:02:52.000Z
|
2022-02-18T04:02:52.000Z
|
tests/gamestonk_terminal/stocks/options/test_options_api.py
|
minhhoang1023/GamestonkTerminal
|
195dc19b491052df080178c0cc6a9d535a91a704
|
[
"MIT"
] | null | null | null |
tests/gamestonk_terminal/stocks/options/test_options_api.py
|
minhhoang1023/GamestonkTerminal
|
195dc19b491052df080178c0cc6a9d535a91a704
|
[
"MIT"
] | null | null | null |
# IMPORTATION STANDARD
# IMPORTATION THIRDPARTY
# IMPORTATION INTERNAL
from gamestonk_terminal.helper_classes import ModelsNamespace as _models
from gamestonk_terminal.stocks.options import options_api
def test_models():
assert isinstance(options_api.models, _models)
| 23
| 72
| 0.836957
| 32
| 276
| 6.96875
| 0.625
| 0.116592
| 0.188341
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 276
| 11
| 73
| 25.090909
| 0.913934
| 0.231884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7201b334cb44e00248c13fda79fad7647c484557
| 72
|
py
|
Python
|
tests/test_obj.py
|
Scotchy/XPipe
|
723317ab7029725cc58c4fa74c41cf6b3e3151de
|
[
"MIT"
] | null | null | null |
tests/test_obj.py
|
Scotchy/XPipe
|
723317ab7029725cc58c4fa74c41cf6b3e3151de
|
[
"MIT"
] | 8
|
2021-11-26T20:28:30.000Z
|
2022-03-26T23:50:45.000Z
|
tests/test_obj.py
|
Scotchy/XPipe
|
723317ab7029725cc58c4fa74c41cf6b3e3151de
|
[
"MIT"
] | null | null | null |
class ObjTest():
def __init__(self, conf):
self.conf = conf
| 18
| 29
| 0.597222
| 9
| 72
| 4.333333
| 0.666667
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 72
| 3
| 30
| 24
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
724c52c0bf41c1be62e36d552075430e21bbdc59
| 133
|
py
|
Python
|
scripts/wk/setup/__init__.py
|
2Shirt/WizardK
|
82a2e7f85c80a52f892c1553e7a45ec0174e7bc6
|
[
"MIT"
] | null | null | null |
scripts/wk/setup/__init__.py
|
2Shirt/WizardK
|
82a2e7f85c80a52f892c1553e7a45ec0174e7bc6
|
[
"MIT"
] | 178
|
2017-11-17T19:14:31.000Z
|
2021-12-15T07:43:29.000Z
|
scripts/wk/setup/__init__.py
|
2Shirt/WizardK
|
82a2e7f85c80a52f892c1553e7a45ec0174e7bc6
|
[
"MIT"
] | 1
|
2017-11-17T19:32:36.000Z
|
2017-11-17T19:32:36.000Z
|
"""WizardKit: repairs module init"""
# vim: sts=2 sw=2 ts=2
import platform
if platform.system() == 'Windows':
from . import win
| 16.625
| 36
| 0.669173
| 20
| 133
| 4.45
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027273
| 0.172932
| 133
| 7
| 37
| 19
| 0.781818
| 0.390977
| 0
| 0
| 0
| 0
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.